diff --git a/data-prepper-plugins/saas-source-plugins/jira-source/src/main/java/org/opensearch/dataprepper/plugins/source/jira/JiraSourceConfig.java b/data-prepper-plugins/saas-source-plugins/jira-source/src/main/java/org/opensearch/dataprepper/plugins/source/jira/JiraSourceConfig.java index 1144191e6b..3cb7b9501c 100644 --- a/data-prepper-plugins/saas-source-plugins/jira-source/src/main/java/org/opensearch/dataprepper/plugins/source/jira/JiraSourceConfig.java +++ b/data-prepper-plugins/saas-source-plugins/jira-source/src/main/java/org/opensearch/dataprepper/plugins/source/jira/JiraSourceConfig.java @@ -17,13 +17,12 @@ import org.opensearch.dataprepper.plugins.source.jira.configuration.FilterConfig; import org.opensearch.dataprepper.plugins.source.source_crawler.base.CrawlerSourceConfig; -import java.time.Duration; import java.util.List; @Getter public class JiraSourceConfig implements CrawlerSourceConfig { - private static final Duration DEFAULT_BACKOFF_MILLIS = Duration.ofMinutes(2); + private static final int DEFAULT_BATCH_SIZE = 50; /** * Jira account url @@ -42,7 +41,7 @@ public class JiraSourceConfig implements CrawlerSourceConfig { * Batch size for fetching tickets */ @JsonProperty("batch_size") - private int batchSize = 50; + private int batchSize = DEFAULT_BATCH_SIZE; /** diff --git a/data-prepper-plugins/saas-source-plugins/source-crawler/src/test/java/org/opensearch/dataprepper/plugins/source/source_crawler/base/CrawlerTest.java b/data-prepper-plugins/saas-source-plugins/source-crawler/src/test/java/org/opensearch/dataprepper/plugins/source/source_crawler/base/CrawlerTest.java index d90f794cbf..afac9f3e54 100644 --- a/data-prepper-plugins/saas-source-plugins/source-crawler/src/test/java/org/opensearch/dataprepper/plugins/source/source_crawler/base/CrawlerTest.java +++ b/data-prepper-plugins/saas-source-plugins/source-crawler/src/test/java/org/opensearch/dataprepper/plugins/source/source_crawler/base/CrawlerTest.java @@ -48,6 +48,8 @@ public class CrawlerTest { private Crawler crawler; + private static final int DEFAULT_BATCH_SIZE = 50; + @BeforeEach public void setup() { crawler = new Crawler(client); @@ -68,35 +70,32 @@ public void executePartitionTest() { void testCrawlWithEmptyList() { Instant lastPollTime = Instant.ofEpochMilli(0); when(client.listItems()).thenReturn(Collections.emptyIterator()); - int maxItemsPerPage = 50; - crawler.crawl(lastPollTime, coordinator, maxItemsPerPage); + crawler.crawl(lastPollTime, coordinator, DEFAULT_BATCH_SIZE); verify(coordinator, never()).createPartition(any(SaasSourcePartition.class)); } @Test - void testCrawlWithNonEmptyList() throws NoSuchFieldException, IllegalAccessException { + void testCrawlWithNonEmptyList(){ Instant lastPollTime = Instant.ofEpochMilli(0); List itemInfoList = new ArrayList<>(); - int maxItemsPerPage = 50; - for (int i = 0; i < maxItemsPerPage; i++) { + for (int i = 0; i < DEFAULT_BATCH_SIZE; i++) { itemInfoList.add(new TestItemInfo("itemId")); } when(client.listItems()).thenReturn(itemInfoList.iterator()); - crawler.crawl(lastPollTime, coordinator, maxItemsPerPage); + crawler.crawl(lastPollTime, coordinator, DEFAULT_BATCH_SIZE); verify(coordinator, times(1)).createPartition(any(SaasSourcePartition.class)); } @Test - void testCrawlWithMultiplePartitions() throws NoSuchFieldException, IllegalAccessException { + void testCrawlWithMultiplePartitions(){ Instant lastPollTime = Instant.ofEpochMilli(0); List itemInfoList = new ArrayList<>(); - int maxItemsPerPage = 50; - for (int i = 0; i < maxItemsPerPage + 1; i++) { + for (int i = 0; i < DEFAULT_BATCH_SIZE + 1; i++) { itemInfoList.add(new TestItemInfo("testId")); } when(client.listItems()).thenReturn(itemInfoList.iterator()); - crawler.crawl(lastPollTime, coordinator, maxItemsPerPage); + crawler.crawl(lastPollTime, coordinator, DEFAULT_BATCH_SIZE); verify(coordinator, times(2)).createPartition(any(SaasSourcePartition.class)); } @@ -128,13 +127,12 @@ void testBatchSize() { void testCrawlWithNullItemsInList() throws NoSuchFieldException, IllegalAccessException { Instant lastPollTime = Instant.ofEpochMilli(0); List itemInfoList = new ArrayList<>(); - int maxItemsPerPage = 50; itemInfoList.add(null); - for (int i = 0; i < maxItemsPerPage - 1; i++) { + for (int i = 0; i < DEFAULT_BATCH_SIZE - 1; i++) { itemInfoList.add(new TestItemInfo("testId")); } when(client.listItems()).thenReturn(itemInfoList.iterator()); - crawler.crawl(lastPollTime, coordinator, maxItemsPerPage); + crawler.crawl(lastPollTime, coordinator, DEFAULT_BATCH_SIZE); verify(coordinator, times(1)).createPartition(any(SaasSourcePartition.class)); } @@ -145,8 +143,7 @@ void testUpdatingPollTimeNullMetaData() { ItemInfo testItem = createTestItemInfo("1"); itemInfoList.add(testItem); when(client.listItems()).thenReturn(itemInfoList.iterator()); - int maxItemsPerPage = 50; - Instant updatedPollTime = crawler.crawl(lastPollTime, coordinator, maxItemsPerPage); + Instant updatedPollTime = crawler.crawl(lastPollTime, coordinator, DEFAULT_BATCH_SIZE); assertNotEquals(Instant.ofEpochMilli(0), updatedPollTime); } @@ -157,8 +154,7 @@ void testUpdatedPollTimeNiCreatedLarger() { ItemInfo testItem = createTestItemInfo("1"); itemInfoList.add(testItem); when(client.listItems()).thenReturn(itemInfoList.iterator()); - int maxItemsPerPage = 50; - Instant updatedPollTime = crawler.crawl(lastPollTime, coordinator, maxItemsPerPage); + Instant updatedPollTime = crawler.crawl(lastPollTime, coordinator, DEFAULT_BATCH_SIZE); assertNotEquals(lastPollTime, updatedPollTime); }