offsets) {
+ for (final TopicPartition tp: offsets.keySet()) {
+ final OutputStream stream = this.outputStreams.get(tp);
if (stream != null) {
- this.logger.info("Flush records for " + tp.topic() + "-" + tp.partition());
+ LOGGER.info("Flush records for " + tp.topic() + "-" + tp.partition());
try {
stream.close();
- } catch (IOException e) {
+ } catch (final IOException e) {
throw new ConnectException(e);
}
- this.output_streams.remove(tp);
+ this.outputStreams.remove(tp);
}
}
}
diff --git a/src/main/java/io/aiven/kafka/connect/s3/templating/TemplatingEngine.java b/src/main/java/io/aiven/kafka/connect/s3/templating/TemplatingEngine.java
index 7059f1e8..3787fdfa 100644
--- a/src/main/java/io/aiven/kafka/connect/s3/templating/TemplatingEngine.java
+++ b/src/main/java/io/aiven/kafka/connect/s3/templating/TemplatingEngine.java
@@ -1,3 +1,20 @@
+/*
+ * Copyright (C) 2020 Aiven Oy
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see .
+ */
+
package io.aiven.kafka.connect.s3.templating;
import java.util.HashMap;
@@ -8,11 +25,8 @@
/**
* A simple templating engine that allows to bind variables to supplier functions.
- *
- * Variable syntax: {@code {{ variable_name }}} (arbitrary number of space inside the braces).
- *
+ * Variable syntax: {@code {{ variable_name }}} (arbitrary number of space inside the braces).
* Non-bound variables are left as is.
- *
*/
public final class TemplatingEngine {
private static Pattern variablePattern = Pattern.compile("\\{\\{\\s*(\\w+)\\s*}}"); // {{ var }}
diff --git a/src/main/resources/aiven-kafka-connect-s3-version.properties b/src/main/resources/aiven-kafka-connect-s3-version.properties
new file mode 100644
index 00000000..c5771996
--- /dev/null
+++ b/src/main/resources/aiven-kafka-connect-s3-version.properties
@@ -0,0 +1,17 @@
+##
+# Copyright (C) 2020 Aiven Oy
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Affero General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Affero General Public License for more details.
+#
+# You should have received a copy of the GNU Affero General Public License
+# along with this program. If not, see .
+##
+version=${version ?: 'unknown'}
diff --git a/src/test/java/io/aiven/kafka/connect/s3/AivenKafkaConnectS3MultipartUploadTest.java b/src/test/java/io/aiven/kafka/connect/s3/AivenKafkaConnectS3MultipartUploadTest.java
index 5650cd8d..c46681c4 100644
--- a/src/test/java/io/aiven/kafka/connect/s3/AivenKafkaConnectS3MultipartUploadTest.java
+++ b/src/test/java/io/aiven/kafka/connect/s3/AivenKafkaConnectS3MultipartUploadTest.java
@@ -1,61 +1,80 @@
-import com.amazonaws.auth.BasicAWSCredentials;
+/*
+ * Copyright (C) 2020 Aiven Oy
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see .
+ */
+
+import java.io.ByteArrayInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.Random;
+
+import io.aiven.kafka.connect.s3.AivenKafkaConnectS3MultipartUpload;
+
import com.amazonaws.auth.AWSStaticCredentialsProvider;
+import com.amazonaws.auth.BasicAWSCredentials;
import com.amazonaws.client.builder.AwsClientBuilder.EndpointConfiguration;
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.AmazonS3ClientBuilder;
import com.amazonaws.services.s3.model.GetObjectRequest;
import com.amazonaws.services.s3.model.S3Object;
-import io.aiven.kafka.connect.s3.AivenKafkaConnectS3MultipartUpload;
import io.findify.s3mock.S3Mock;
-import java.io.ByteArrayInputStream;
-import java.io.InputStream;
-import java.io.IOException;
-import java.util.Random;
import org.junit.Test;
-import static org.junit.Assert.assertEquals;
+
import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.assertEquals;
public class AivenKafkaConnectS3MultipartUploadTest {
@Test
public void testAivenKafkaConnectS3MultipartUploadTest() throws IOException {
- Random generator = new Random();
- int port = generator.nextInt(10000) + 10000;
+ final Random generator = new Random();
+ final int port = generator.nextInt(10000) + 10000;
- S3Mock api = new S3Mock.Builder().withPort(port).withInMemoryBackend().build();
+ final S3Mock api = new S3Mock.Builder().withPort(port).withInMemoryBackend().build();
api.start();
- BasicAWSCredentials awsCreds = new BasicAWSCredentials(
+ final BasicAWSCredentials awsCreds = new BasicAWSCredentials(
"test_key_id",
"test_secret_key"
);
- AmazonS3ClientBuilder builder = AmazonS3ClientBuilder.standard();
+ final AmazonS3ClientBuilder builder = AmazonS3ClientBuilder.standard();
builder.withCredentials(new AWSStaticCredentialsProvider(awsCreds));
builder.withEndpointConfiguration(new EndpointConfiguration("http://localhost:" + port, "us-west-2"));
builder.withPathStyleAccessEnabled(true);
- AmazonS3 s3Client = builder.build();
-
+ final AmazonS3 s3Client = builder.build();
s3Client.createBucket("test-bucket");
- AivenKafkaConnectS3MultipartUpload mp = new AivenKafkaConnectS3MultipartUpload(
+ final AivenKafkaConnectS3MultipartUpload mp = new AivenKafkaConnectS3MultipartUpload(
s3Client,
"test-bucket",
"test-object"
);
- byte[] data = "foobar".getBytes();
- InputStream stream = new ByteArrayInputStream(data, 0, data.length);
- mp.upload_part(stream, data.length);
+ final byte[] data = "foobar".getBytes();
+ final InputStream stream = new ByteArrayInputStream(data, 0, data.length);
+ mp.uploadPart(stream, data.length);
mp.commit();
- S3Object object = s3Client.getObject(new GetObjectRequest("test-bucket", "test-object"));
- InputStream objectData = object.getObjectContent();
+ final S3Object object = s3Client.getObject(new GetObjectRequest("test-bucket", "test-object"));
+ final InputStream objectData = object.getObjectContent();
assertEquals(objectData.available(), 6);
- byte[] stored_data = new byte[data.length];
- objectData.read(stored_data, 0, data.length);
- assertArrayEquals(data, stored_data);
+ final byte[] storedData = new byte[data.length];
+ objectData.read(storedData, 0, data.length);
+ assertArrayEquals(data, storedData);
objectData.close();
api.stop();
diff --git a/src/test/java/io/aiven/kafka/connect/s3/AivenKafkaConnectS3OutputStreamTest.java b/src/test/java/io/aiven/kafka/connect/s3/AivenKafkaConnectS3OutputStreamTest.java
index e49d40f4..f5171bbb 100644
--- a/src/test/java/io/aiven/kafka/connect/s3/AivenKafkaConnectS3OutputStreamTest.java
+++ b/src/test/java/io/aiven/kafka/connect/s3/AivenKafkaConnectS3OutputStreamTest.java
@@ -1,12 +1,31 @@
-import com.amazonaws.auth.BasicAWSCredentials;
+/*
+ * Copyright (C) 2020 Aiven Oy
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see .
+ */
+
+import java.io.IOException;
+import java.util.Random;
+
+import io.aiven.kafka.connect.s3.AivenKafkaConnectS3OutputStream;
+
import com.amazonaws.auth.AWSStaticCredentialsProvider;
+import com.amazonaws.auth.BasicAWSCredentials;
import com.amazonaws.client.builder.AwsClientBuilder.EndpointConfiguration;
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.AmazonS3ClientBuilder;
-import io.aiven.kafka.connect.s3.AivenKafkaConnectS3OutputStream;
import io.findify.s3mock.S3Mock;
-import java.io.IOException;
-import java.util.Random;
import org.junit.Test;
import static org.junit.Assert.assertFalse;
@@ -16,29 +35,29 @@ public class AivenKafkaConnectS3OutputStreamTest {
@Test
public void testAivenKafkaConnectS3OutputStreamTest() throws IOException {
- Random generator = new Random();
- int port = generator.nextInt(10000) + 10000;
+ final Random generator = new Random();
+ final int port = generator.nextInt(10000) + 10000;
- S3Mock api = new S3Mock.Builder().withPort(port).withInMemoryBackend().build();
+ final S3Mock api = new S3Mock.Builder().withPort(port).withInMemoryBackend().build();
api.start();
- BasicAWSCredentials awsCreds = new BasicAWSCredentials(
+ final BasicAWSCredentials awsCreds = new BasicAWSCredentials(
"test_key_id",
"test_secret_key"
);
- AmazonS3ClientBuilder builder = AmazonS3ClientBuilder.standard();
+ final AmazonS3ClientBuilder builder = AmazonS3ClientBuilder.standard();
builder.withCredentials(new AWSStaticCredentialsProvider(awsCreds));
builder.withEndpointConfiguration(new EndpointConfiguration("http://localhost:" + port, "us-west-2"));
builder.withPathStyleAccessEnabled(true);
- AmazonS3 s3Client = builder.build();
-
+ final AmazonS3 s3Client = builder.build();
s3Client.createBucket("test-bucket");
- AivenKafkaConnectS3OutputStream storageSmall = new AivenKafkaConnectS3OutputStream(s3Client, "test-bucket", "test-key-small");
+ final AivenKafkaConnectS3OutputStream storageSmall =
+ new AivenKafkaConnectS3OutputStream(s3Client, "test-bucket", "test-key-small");
- byte[] inputSmall = "small".getBytes();
+ final byte[] inputSmall = "small".getBytes();
storageSmall.write(inputSmall);
assertFalse(s3Client.doesObjectExist("test-bucket", "test-key-small"));
storageSmall.flush();
@@ -46,8 +65,9 @@ public void testAivenKafkaConnectS3OutputStreamTest() throws IOException {
storageSmall.close();
assertTrue(s3Client.doesObjectExist("test-bucket", "test-key-small"));
- AivenKafkaConnectS3OutputStream storageLarge = new AivenKafkaConnectS3OutputStream(s3Client, "test-bucket", "test-key-large");
- byte[] inputLarge = new byte[1024*1024*10];
+ final AivenKafkaConnectS3OutputStream storageLarge =
+ new AivenKafkaConnectS3OutputStream(s3Client, "test-bucket", "test-key-large");
+ final byte[] inputLarge = new byte[1024 * 1024 * 10];
storageLarge.write(inputLarge);
assertFalse(s3Client.doesObjectExist("test-bucket", "test-key-large"));
storageLarge.flush();
diff --git a/src/test/java/io/aiven/kafka/connect/s3/AivenKafkaConnectS3SinkTaskTest.java b/src/test/java/io/aiven/kafka/connect/s3/AivenKafkaConnectS3SinkTaskTest.java
index 1ddb0c38..94f6dd0b 100644
--- a/src/test/java/io/aiven/kafka/connect/s3/AivenKafkaConnectS3SinkTaskTest.java
+++ b/src/test/java/io/aiven/kafka/connect/s3/AivenKafkaConnectS3SinkTaskTest.java
@@ -1,29 +1,56 @@
+/*
+ * Copyright (C) 2020 Aiven Oy
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see .
+ */
+
package io.aiven.kafka.connect.s3;
+import java.time.LocalDateTime;
+import java.time.ZoneId;
+import java.time.ZonedDateTime;
+import java.time.format.DateTimeFormatter;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Random;
+
+import org.apache.kafka.clients.consumer.OffsetAndMetadata;
+import org.apache.kafka.common.TopicPartition;
+import org.apache.kafka.connect.data.Schema;
+import org.apache.kafka.connect.sink.SinkRecord;
+
import com.amazonaws.auth.AWSStaticCredentialsProvider;
import com.amazonaws.auth.BasicAWSCredentials;
import com.amazonaws.client.builder.AwsClientBuilder.EndpointConfiguration;
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.AmazonS3ClientBuilder;
import io.findify.s3mock.S3Mock;
-import org.apache.kafka.clients.consumer.OffsetAndMetadata;
-import org.apache.kafka.common.TopicPartition;
-import org.apache.kafka.connect.data.Schema;
-import org.apache.kafka.connect.sink.SinkRecord;
-import org.junit.*;
-
-import java.time.LocalDateTime;
-import java.time.ZoneId;
-import java.time.ZonedDateTime;
-import java.time.format.DateTimeFormatter;
-import java.util.*;
+import org.junit.After;
+import org.junit.AfterClass;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
public class AivenKafkaConnectS3SinkTaskTest {
- private static String TEST_BUCKET = "test-bucket";
+ private static final String TEST_BUCKET = "test-bucket";
private static S3Mock s3Api;
private static AmazonS3 s3Client;
@@ -34,13 +61,13 @@ public class AivenKafkaConnectS3SinkTaskTest {
@BeforeClass
public static void setUpClass() {
- Random generator = new Random();
- int s3Port = generator.nextInt(10000) + 10000;
+ final Random generator = new Random();
+ final int s3Port = generator.nextInt(10000) + 10000;
s3Api = new S3Mock.Builder().withPort(s3Port).withInMemoryBackend().build();
s3Api.start();
- Map commonPropertiesMutable = new HashMap<>();
+ final Map commonPropertiesMutable = new HashMap<>();
commonPropertiesMutable.put(AivenKafkaConnectS3Constants.AWS_ACCESS_KEY_ID, "test_key_id");
commonPropertiesMutable.put(AivenKafkaConnectS3Constants.AWS_SECRET_ACCESS_KEY, "test_secret_key");
commonPropertiesMutable.put(AivenKafkaConnectS3Constants.AWS_S3_BUCKET, TEST_BUCKET);
@@ -48,8 +75,8 @@ public static void setUpClass() {
commonPropertiesMutable.put(AivenKafkaConnectS3Constants.AWS_S3_REGION, "us-west-2");
commonProperties = Collections.unmodifiableMap(commonPropertiesMutable);
- AmazonS3ClientBuilder builder = AmazonS3ClientBuilder.standard();
- BasicAWSCredentials awsCreds = new BasicAWSCredentials(
+ final AmazonS3ClientBuilder builder = AmazonS3ClientBuilder.standard();
+ final BasicAWSCredentials awsCreds = new BasicAWSCredentials(
commonProperties.get(AivenKafkaConnectS3Constants.AWS_ACCESS_KEY_ID),
commonProperties.get(AivenKafkaConnectS3Constants.AWS_SECRET_ACCESS_KEY)
);
@@ -83,14 +110,14 @@ public void tearDown() {
@Test
public void testAivenKafkaConnectS3SinkTaskTest() {
// Create SinkTask
- AivenKafkaConnectS3SinkTask task = new AivenKafkaConnectS3SinkTask();
+ final AivenKafkaConnectS3SinkTask task = new AivenKafkaConnectS3SinkTask();
properties.put(AivenKafkaConnectS3Constants.OUTPUT_COMPRESSION, "gzip");
properties.put(AivenKafkaConnectS3Constants.OUTPUT_FIELDS, "value,key,timestamp,offset");
task.start(properties);
- TopicPartition tp = new TopicPartition("test-topic", 0);
- Collection tps = Collections.singletonList(tp);
+ final TopicPartition tp = new TopicPartition("test-topic", 0);
+ final Collection tps = Collections.singletonList(tp);
task.open(tps);
// * Simulate periodical flush() cycle - ensure that data files are written
@@ -101,14 +128,13 @@ public void testAivenKafkaConnectS3SinkTaskTest() {
assertFalse(s3Client.doesObjectExist(TEST_BUCKET, "test-topic-0-0000000000.gz"));
// Flush data - this is called by Connect on offset.flush.interval
- Map offsets = new HashMap<>();
+ final Map offsets = new HashMap<>();
offsets.put(tp, new OffsetAndMetadata(100));
task.flush(offsets);
assertTrue(s3Client.doesObjectExist(TEST_BUCKET, "test-topic-0-0000000000.gz"));
// * Verify that we store data on partition unassignment
-
task.put(createBatchOfRecord(100, 200));
assertFalse(s3Client.doesObjectExist(TEST_BUCKET, "test-topic-0-0000000100.gz"));
@@ -129,20 +155,20 @@ public void testAivenKafkaConnectS3SinkTaskTest() {
@Test
public void testS3ConstantPrefix() {
- AivenKafkaConnectS3SinkTask task = new AivenKafkaConnectS3SinkTask();
+ final AivenKafkaConnectS3SinkTask task = new AivenKafkaConnectS3SinkTask();
properties.put(AivenKafkaConnectS3Constants.OUTPUT_COMPRESSION, "gzip");
properties.put(AivenKafkaConnectS3Constants.OUTPUT_FIELDS, "value,key,timestamp,offset");
properties.put(AivenKafkaConnectS3Constants.AWS_S3_PREFIX, "prefix--");
task.start(properties);
- TopicPartition tp = new TopicPartition("test-topic", 0);
- Collection tps = Collections.singletonList(tp);
+ final TopicPartition tp = new TopicPartition("test-topic", 0);
+ final Collection tps = Collections.singletonList(tp);
task.open(tps);
task.put(createBatchOfRecord(0, 100));
- Map offsets = new HashMap<>();
+ final Map offsets = new HashMap<>();
offsets.put(tp, new OffsetAndMetadata(100));
task.flush(offsets);
@@ -150,25 +176,25 @@ public void testS3ConstantPrefix() {
}
@Test
- public void testS3UTCDatePrefix() {
- AivenKafkaConnectS3SinkTask task = new AivenKafkaConnectS3SinkTask();
+ public void testS3UtcDatePrefix() {
+ final AivenKafkaConnectS3SinkTask task = new AivenKafkaConnectS3SinkTask();
properties.put(AivenKafkaConnectS3Constants.OUTPUT_COMPRESSION, "gzip");
properties.put(AivenKafkaConnectS3Constants.OUTPUT_FIELDS, "value,key,timestamp,offset");
properties.put(AivenKafkaConnectS3Constants.AWS_S3_PREFIX, "prefix-{{ utc_date }}--");
task.start(properties);
- TopicPartition tp = new TopicPartition("test-topic", 0);
- Collection tps = Collections.singletonList(tp);
+ final TopicPartition tp = new TopicPartition("test-topic", 0);
+ final Collection tps = Collections.singletonList(tp);
task.open(tps);
task.put(createBatchOfRecord(0, 100));
- Map offsets = new HashMap<>();
+ final Map offsets = new HashMap<>();
offsets.put(tp, new OffsetAndMetadata(100));
task.flush(offsets);
- String expectedFileName = String.format("prefix-%s--test-topic-0-0000000000.gz",
+ final String expectedFileName = String.format("prefix-%s--test-topic-0-0000000000.gz",
ZonedDateTime.now(ZoneId.of("UTC")).format(DateTimeFormatter.ISO_LOCAL_DATE));
assertTrue(s3Client.doesObjectExist(TEST_BUCKET, expectedFileName));
@@ -177,34 +203,34 @@ public void testS3UTCDatePrefix() {
@Test
public void testS3LocalDatePrefix() {
- AivenKafkaConnectS3SinkTask task = new AivenKafkaConnectS3SinkTask();
+ final AivenKafkaConnectS3SinkTask task = new AivenKafkaConnectS3SinkTask();
properties.put(AivenKafkaConnectS3Constants.OUTPUT_COMPRESSION, "gzip");
properties.put(AivenKafkaConnectS3Constants.OUTPUT_FIELDS, "value,key,timestamp,offset");
properties.put(AivenKafkaConnectS3Constants.AWS_S3_PREFIX, "prefix-{{ local_date }}--");
task.start(properties);
- TopicPartition tp = new TopicPartition("test-topic", 0);
- Collection tps = Collections.singletonList(tp);
+ final TopicPartition tp = new TopicPartition("test-topic", 0);
+ final Collection tps = Collections.singletonList(tp);
task.open(tps);
task.put(createBatchOfRecord(0, 100));
- Map offsets = new HashMap<>();
+ final Map offsets = new HashMap<>();
offsets.put(tp, new OffsetAndMetadata(100));
task.flush(offsets);
- String expectedFileName = String.format("prefix-%s--test-topic-0-0000000000.gz",
+ final String expectedFileName = String.format("prefix-%s--test-topic-0-0000000000.gz",
LocalDateTime.now().format(DateTimeFormatter.ISO_LOCAL_DATE));
assertTrue(s3Client.doesObjectExist(TEST_BUCKET, expectedFileName));
task.stop();
}
- private Collection createBatchOfRecord(int offsetFrom, int offsetTo) {
- ArrayList records = new ArrayList<>();
+ private Collection createBatchOfRecord(final int offsetFrom, final int offsetTo) {
+ final ArrayList records = new ArrayList<>();
for (int offset = offsetFrom; offset < offsetTo; offset++) {
- SinkRecord record = new SinkRecord(
+ final SinkRecord record = new SinkRecord(
"test-topic",
0,
Schema.BYTES_SCHEMA, "test-key".getBytes(),
diff --git a/src/test/java/io/aiven/kafka/connect/s3/templating/TemplatingEngineTest.java b/src/test/java/io/aiven/kafka/connect/s3/templating/TemplatingEngineTest.java
index 206c73b8..2222dea4 100644
--- a/src/test/java/io/aiven/kafka/connect/s3/templating/TemplatingEngineTest.java
+++ b/src/test/java/io/aiven/kafka/connect/s3/templating/TemplatingEngineTest.java
@@ -1,6 +1,21 @@
-package io.aiven.kafka.connect.s3.templating;
+/*
+ * Copyright (C) 2020 Aiven Oy
+ *
+ * This program is free software: you can redistribute it and/or modify
+ * it under the terms of the GNU Affero General Public License as published by
+ * the Free Software Foundation, either version 3 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Affero General Public License for more details.
+ *
+ * You should have received a copy of the GNU Affero General Public License
+ * along with this program. If not, see .
+ */
-import org.junit.Test;
+package io.aiven.kafka.connect.s3.templating;
import java.io.BufferedReader;
import java.io.IOException;
@@ -10,6 +25,8 @@
import java.util.Collection;
import java.util.stream.Collectors;
+import org.junit.Test;
+
import static org.junit.Assert.assertEquals;
public class TemplatingEngineTest {