diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 00000000..00a51aff --- /dev/null +++ b/.gitattributes @@ -0,0 +1,6 @@ +# +# https://help.github.com/articles/dealing-with-line-endings/ +# +# These are explicitly windows files and should use crlf +*.bat text eol=crlf + diff --git a/.gitignore b/.gitignore new file mode 100644 index 00000000..b0f0496f --- /dev/null +++ b/.gitignore @@ -0,0 +1,10 @@ +target +docs/_build +.idea +*.iml +*.DS_Store +.gradle +.gradletasknamecache +build/ +rpm/ +rpmbuild/ diff --git a/Makefile b/Makefile index 5b6f9f86..37a84be4 100644 --- a/Makefile +++ b/Makefile @@ -1,3 +1,20 @@ +## +# Copyright (C) 2020 Aiven Oy +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . +## + short_ver = $(shell git describe --abbrev=0 2>/dev/null || echo 0.0.1) long_ver = $(shell git describe --long 2>/dev/null || echo $(short_ver)-0-unknown-g`git describe --always`) @@ -8,15 +25,16 @@ SOURCES := \ src/main/java/io/aiven/kafka/connect/s3/AivenKafkaConnectS3OutputStream.java \ src/main/java/io/aiven/kafka/connect/s3/AivenKafkaConnectS3SinkConnector.java \ src/main/java/io/aiven/kafka/connect/s3/AivenKafkaConnectS3SinkTask.java \ - pom.xml \ + build.gradle \ + gradle/ \ + gradlew \ aiven-kafka-connect-s3.spec all: rpm build-dep: sudo dnf install -y --allowerasing --best \ - java-1.8.0-openjdk-devel \ - maven + rpm-build java-1.8.0-openjdk-devel clean: $(RM) -r rpm/ rpmbuild/ @@ -33,4 +51,4 @@ rpm: $(SOURCES) cp "$(CURDIR)/rpmbuild/RPMS/noarch"/*.rpm "$@/" test: - mvn -Dmodule_version=0.0.1 test + ./gradlew -Pmodule_version=0.0.1 test diff --git a/aiven-kafka-connect-s3.spec b/aiven-kafka-connect-s3.spec index e00fcbd8..c6996848 100644 --- a/aiven-kafka-connect-s3.spec +++ b/aiven-kafka-connect-s3.spec @@ -7,7 +7,7 @@ License: Apache (v2) URL: https://aiven.io/ Source0: aiven-kafka-connect-s3-src.tar BuildArch: noarch -BuildRequires: java, maven +BuildRequires: java Requires: java Packager: Heikki Nousiainen @@ -18,11 +18,11 @@ Aiven Kafka Connect S3 Connector %setup %build -mvn -Dmodule_version=%{major_version} package +./gradlew -Pmodule_version=%{major_version} clean build %install %{__mkdir_p} %{buildroot}/opt/aiven-kafka/libs -install target/aiven-kafka-connect-s3-%{version}.jar %{buildroot}/opt/aiven-kafka/libs/aiven-kafka-connect-s3-%{version}.jar +install build/libs/aiven-kafka-connect-s3-%{version}.jar %{buildroot}/opt/aiven-kafka/libs/aiven-kafka-connect-s3-%{version}.jar %files /opt/aiven-kafka/libs/aiven-kafka-connect-s3-%{version}.jar diff --git a/build.gradle b/build.gradle new file mode 100644 index 00000000..2ba8ef43 --- /dev/null +++ b/build.gradle @@ -0,0 +1,137 @@ +/* + * Copyright (C) 2020 Aiven Oy + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Affero General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Affero General Public License for more details. + * + * You should have received a copy of the GNU Affero General Public License + * along with this program. If not, see . + */ + +plugins { + + // https://docs.gradle.org/current/userguide/java_library_plugin.html + id "java-library" + + // https://docs.gradle.org/current/userguide/checkstyle_plugin.html + id "checkstyle" + + // https://docs.gradle.org/current/userguide/jacoco_plugin.html + id "jacoco" + + // https://docs.gradle.org/current/userguide/distribution_plugin.html + id "distribution" + + // https://docs.gradle.org/current/userguide/publishing_maven.html + id "maven-publish" + +} + +group = "io.aiven" +version = hasProperty("module_version") ? "$project.module_version" : 'unknown' + +repositories { + jcenter() +} + +java { + sourceCompatibility = JavaVersion.VERSION_1_8 + targetCompatibility = JavaVersion.VERSION_1_8 +} + +checkstyle { + toolVersion "8.29" + configDir rootProject.file("checkstyle/") +} + +jacoco { + toolVersion = "0.8.5" +} + +ext { + kafkaVersion = "0.11.0.1" + amazonS3Version = "1.11.718" + slf4jVersion = "1.7.25" +} + +distributions { + main { + contents { + from jar + from configurations.runtimeClasspath + + into("/") { + from projectDir + include "version.txt", "README*", "LICENSE*", "NOTICE*", "licenses/" + include "config/" + } + } + } +} + +publishing { + publications { + maven(MavenPublication) { + // Defaults, for clarity + groupId = getGroup() + artifactId = getName() + version = getVersion() + + pom { + name = "Aiven Kafka S3 connector" + description = "A Kafka S3 sink connector for copying data from Kafka to S3." + url = "https://aiven.io" + organization { + name = "Aiven Oy" + url = "https://aiven.io" + } + licenses { + license { + name = "GNU Affero General Public License 3.0" + url = "https://www.gnu.org/licenses/agpl-3.0.en.html" + distribution = "repo" + } + } + scm { + connection = "scm:git:git://github.com/aiven/aiven-kafka-connect-s3.git" + developerConnection = "scm:git:git@github.com:aiven/aiven-kafka-connect-s3.git" + url = "https://github.com/aiven/aiven-kafka-connect-s3.git" + tag = "HEAD" + } + } + } + } +} + +processResources { + filesMatching('aiven-kafka-connect-s3-version.properties') { + expand(version: version) + } +} + +dependencies { + compileOnly "org.apache.kafka:connect-api:$kafkaVersion" + compileOnly "org.apache.kafka:connect-runtime:$kafkaVersion" + + implementation "org.slf4j:slf4j-api:$slf4jVersion" + implementation "com.amazonaws:aws-java-sdk-s3:$amazonS3Version" + + runtimeOnly "org.slf4j:slf4j-log4j12:$slf4jVersion" + + compileOnly "org.apache.kafka:connect-api:$kafkaVersion" + compileOnly "org.apache.kafka:connect-runtime:$kafkaVersion" + compileOnly "org.slf4j:slf4j-api:$slf4jVersion" + + testImplementation "org.apache.kafka:connect-api:$kafkaVersion" + testImplementation "org.apache.kafka:connect-runtime:$kafkaVersion" + testImplementation "org.slf4j:slf4j-simple:$slf4jVersion" + testImplementation 'junit:junit:4.12' + testImplementation 'io.findify:s3mock_2.11:0.2.3' +} diff --git a/checkstyle/checkstyle.xml b/checkstyle/checkstyle.xml new file mode 100644 index 00000000..1e3bdc62 --- /dev/null +++ b/checkstyle/checkstyle.xml @@ -0,0 +1,335 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/checkstyle/java.header b/checkstyle/java.header new file mode 100644 index 00000000..3e509b73 --- /dev/null +++ b/checkstyle/java.header @@ -0,0 +1,16 @@ +/\* + \* Copyright \(C\) 20(19|2[0-9]) Aiven Oy + \* + \* This program is free software: you can redistribute it and/or modify + \* it under the terms of the GNU Affero General Public License as published by + \* the Free Software Foundation, either version 3 of the License, or + \* \(at your option\) any later version. + \* + \* This program is distributed in the hope that it will be useful, + \* but WITHOUT ANY WARRANTY; without even the implied warranty of + \* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + \* GNU Affero General Public License for more details. + \* + \* You should have received a copy of the GNU Affero General Public License + \* along with this program. If not, see . + \*/ diff --git a/checkstyle/suppressions.xml b/checkstyle/suppressions.xml new file mode 100644 index 00000000..6c3f2a9f --- /dev/null +++ b/checkstyle/suppressions.xml @@ -0,0 +1,19 @@ + + + + + + + + + + + + + + diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar new file mode 100644 index 00000000..f3d88b1c Binary files /dev/null and b/gradle/wrapper/gradle-wrapper.jar differ diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties new file mode 100644 index 00000000..353223db --- /dev/null +++ b/gradle/wrapper/gradle-wrapper.properties @@ -0,0 +1,6 @@ +#Thu Mar 05 10:38:18 CET 2020 +distributionUrl=https\://services.gradle.org/distributions/gradle-6.2.2-all.zip +distributionBase=GRADLE_USER_HOME +distributionPath=wrapper/dists +zipStorePath=wrapper/dists +zipStoreBase=GRADLE_USER_HOME diff --git a/gradlew b/gradlew new file mode 100755 index 00000000..2fe81a7d --- /dev/null +++ b/gradlew @@ -0,0 +1,183 @@ +#!/usr/bin/env sh + +# +# Copyright 2015 the original author or authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +############################################################################## +## +## Gradle start up script for UN*X +## +############################################################################## + +# Attempt to set APP_HOME +# Resolve links: $0 may be a link +PRG="$0" +# Need this for relative symlinks. +while [ -h "$PRG" ] ; do + ls=`ls -ld "$PRG"` + link=`expr "$ls" : '.*-> \(.*\)$'` + if expr "$link" : '/.*' > /dev/null; then + PRG="$link" + else + PRG=`dirname "$PRG"`"/$link" + fi +done +SAVED="`pwd`" +cd "`dirname \"$PRG\"`/" >/dev/null +APP_HOME="`pwd -P`" +cd "$SAVED" >/dev/null + +APP_NAME="Gradle" +APP_BASE_NAME=`basename "$0"` + +# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' + +# Use the maximum available, or set MAX_FD != -1 to use that value. +MAX_FD="maximum" + +warn () { + echo "$*" +} + +die () { + echo + echo "$*" + echo + exit 1 +} + +# OS specific support (must be 'true' or 'false'). +cygwin=false +msys=false +darwin=false +nonstop=false +case "`uname`" in + CYGWIN* ) + cygwin=true + ;; + Darwin* ) + darwin=true + ;; + MINGW* ) + msys=true + ;; + NONSTOP* ) + nonstop=true + ;; +esac + +CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar + +# Determine the Java command to use to start the JVM. +if [ -n "$JAVA_HOME" ] ; then + if [ -x "$JAVA_HOME/jre/sh/java" ] ; then + # IBM's JDK on AIX uses strange locations for the executables + JAVACMD="$JAVA_HOME/jre/sh/java" + else + JAVACMD="$JAVA_HOME/bin/java" + fi + if [ ! -x "$JAVACMD" ] ; then + die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." + fi +else + JAVACMD="java" + which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. + +Please set the JAVA_HOME variable in your environment to match the +location of your Java installation." +fi + +# Increase the maximum file descriptors if we can. +if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then + MAX_FD_LIMIT=`ulimit -H -n` + if [ $? -eq 0 ] ; then + if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then + MAX_FD="$MAX_FD_LIMIT" + fi + ulimit -n $MAX_FD + if [ $? -ne 0 ] ; then + warn "Could not set maximum file descriptor limit: $MAX_FD" + fi + else + warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT" + fi +fi + +# For Darwin, add options to specify how the application appears in the dock +if $darwin; then + GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\"" +fi + +# For Cygwin or MSYS, switch paths to Windows format before running java +if [ "$cygwin" = "true" -o "$msys" = "true" ] ; then + APP_HOME=`cygpath --path --mixed "$APP_HOME"` + CLASSPATH=`cygpath --path --mixed "$CLASSPATH"` + JAVACMD=`cygpath --unix "$JAVACMD"` + + # We build the pattern for arguments to be converted via cygpath + ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null` + SEP="" + for dir in $ROOTDIRSRAW ; do + ROOTDIRS="$ROOTDIRS$SEP$dir" + SEP="|" + done + OURCYGPATTERN="(^($ROOTDIRS))" + # Add a user-defined pattern to the cygpath arguments + if [ "$GRADLE_CYGPATTERN" != "" ] ; then + OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)" + fi + # Now convert the arguments - kludge to limit ourselves to /bin/sh + i=0 + for arg in "$@" ; do + CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -` + CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option + + if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition + eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"` + else + eval `echo args$i`="\"$arg\"" + fi + i=`expr $i + 1` + done + case $i in + 0) set -- ;; + 1) set -- "$args0" ;; + 2) set -- "$args0" "$args1" ;; + 3) set -- "$args0" "$args1" "$args2" ;; + 4) set -- "$args0" "$args1" "$args2" "$args3" ;; + 5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; + 6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; + 7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; + 8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; + 9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; + esac +fi + +# Escape application args +save () { + for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done + echo " " +} +APP_ARGS=`save "$@"` + +# Collect all arguments for the java command, following the shell quoting and substitution rules +eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS" + +exec "$JAVACMD" "$@" diff --git a/gradlew.bat b/gradlew.bat new file mode 100644 index 00000000..9618d8d9 --- /dev/null +++ b/gradlew.bat @@ -0,0 +1,100 @@ +@rem +@rem Copyright 2015 the original author or authors. +@rem +@rem Licensed under the Apache License, Version 2.0 (the "License"); +@rem you may not use this file except in compliance with the License. +@rem You may obtain a copy of the License at +@rem +@rem https://www.apache.org/licenses/LICENSE-2.0 +@rem +@rem Unless required by applicable law or agreed to in writing, software +@rem distributed under the License is distributed on an "AS IS" BASIS, +@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +@rem See the License for the specific language governing permissions and +@rem limitations under the License. +@rem + +@if "%DEBUG%" == "" @echo off +@rem ########################################################################## +@rem +@rem Gradle startup script for Windows +@rem +@rem ########################################################################## + +@rem Set local scope for the variables with windows NT shell +if "%OS%"=="Windows_NT" setlocal + +set DIRNAME=%~dp0 +if "%DIRNAME%" == "" set DIRNAME=. +set APP_BASE_NAME=%~n0 +set APP_HOME=%DIRNAME% + +@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. +set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" + +@rem Find java.exe +if defined JAVA_HOME goto findJavaFromJavaHome + +set JAVA_EXE=java.exe +%JAVA_EXE% -version >NUL 2>&1 +if "%ERRORLEVEL%" == "0" goto init + +echo. +echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. +echo. +echo Please set the JAVA_HOME variable in your environment to match the +echo location of your Java installation. + +goto fail + +:findJavaFromJavaHome +set JAVA_HOME=%JAVA_HOME:"=% +set JAVA_EXE=%JAVA_HOME%/bin/java.exe + +if exist "%JAVA_EXE%" goto init + +echo. +echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% +echo. +echo Please set the JAVA_HOME variable in your environment to match the +echo location of your Java installation. + +goto fail + +:init +@rem Get command-line arguments, handling Windows variants + +if not "%OS%" == "Windows_NT" goto win9xME_args + +:win9xME_args +@rem Slurp the command line arguments. +set CMD_LINE_ARGS= +set _SKIP=2 + +:win9xME_args_slurp +if "x%~1" == "x" goto execute + +set CMD_LINE_ARGS=%* + +:execute +@rem Setup the command line + +set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar + +@rem Execute Gradle +"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS% + +:end +@rem End local scope for the variables with windows NT shell +if "%ERRORLEVEL%"=="0" goto mainEnd + +:fail +rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of +rem the _cmd.exe /c_ return code! +if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 +exit /b 1 + +:mainEnd +if "%OS%"=="Windows_NT" endlocal + +:omega diff --git a/licenses/LICENSE-aws.txt b/licenses/LICENSE-aws.txt new file mode 100644 index 00000000..aeea9995 --- /dev/null +++ b/licenses/LICENSE-aws.txt @@ -0,0 +1,63 @@ +Apache License +Version 2.0, January 2004 + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + +"License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. + +"Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. + +"Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. + +"You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. + +"Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. + +"Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. + +"Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). + +"Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. + +"Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." + +"Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: + + 1. You must give any other recipients of the Work or Derivative Works a copy of this License; and + 2. You must cause any modified files to carry prominent notices stating that You changed the files; and + 3. You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and + 4. If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. + +You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +Note: Other license terms may apply to certain, identified software files contained within or distributed with the accompanying software if such terms are included in the directory containing the accompanying software. Such other license terms will then apply in lieu of the terms of the software license above. + +JSON processing code subject to the JSON License from JSON.org: + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +The Software shall be used for Good, not Evil. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/notices/NOTICE-aws.txt b/notices/NOTICE-aws.txt new file mode 100644 index 00000000..979460ec --- /dev/null +++ b/notices/NOTICE-aws.txt @@ -0,0 +1,13 @@ +AWS IoT Device SDK for Java +Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved. + +This product includes software developed by +Amazon Technologies, Inc (http://www.amazon.com/). + +********************** +THIRD PARTY COMPONENTS +********************** +This software includes third party software subject to the following copyrights: +- PKCS#1 and PKCS#8 PEM encoded private key parsing and utility functions from oauth.googlecode.com - Copyright 1998-2010 AOL Inc. + +The licenses for these third party components are included in LICENSE.txt \ No newline at end of file diff --git a/pom.xml b/pom.xml deleted file mode 100644 index e0f5fcb3..00000000 --- a/pom.xml +++ /dev/null @@ -1,82 +0,0 @@ - - 4.0.0 - io.aiven - aiven-kafka-connect-s3 - ${module_version} - - 1.8 - 1.8 - - - - - maven-assembly-plugin - - - jar-with-dependencies - - false - - - - make-assembly - - package - - single - - - - - - - - - com.amazonaws - aws-java-sdk-s3 - 1.11.207 - - - org.apache.kafka - connect-api - 0.11.0.1 - provided - - - org.apache.kafka - connect-runtime - 0.11.0.1 - provided - - - org.slf4j - slf4j-api - 1.7.25 - provided - - - org.slf4j - slf4j-simple - 1.7.25 - test - - - org.slf4j - slf4j-log4j12 - 1.7.25 - runtime - - - junit - junit - 4.12 - test - - - io.findify - s3mock_2.11 - 0.2.3 - test - - - diff --git a/settings.gradle b/settings.gradle new file mode 100644 index 00000000..07975ce4 --- /dev/null +++ b/settings.gradle @@ -0,0 +1,10 @@ +/* + * This file was generated by the Gradle 'init' task. + * + * The settings file is used to specify which projects to include in your build. + * + * Detailed information about configuring a multi-project build in Gradle can be found + * in the user manual at https://docs.gradle.org/6.1.1/userguide/multi_project_builds.html + */ + +rootProject.name = 'aiven-kafka-connect-s3' diff --git a/src/main/java/io/aiven/kafka/connect/s3/AivenKafkaConnectS3Config.java b/src/main/java/io/aiven/kafka/connect/s3/AivenKafkaConnectS3Config.java index 55e4fd94..a5858e34 100644 --- a/src/main/java/io/aiven/kafka/connect/s3/AivenKafkaConnectS3Config.java +++ b/src/main/java/io/aiven/kafka/connect/s3/AivenKafkaConnectS3Config.java @@ -1,3 +1,20 @@ +/* + * Copyright (C) 2020 Aiven Oy + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Affero General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Affero General Public License for more details. + * + * You should have received a copy of the GNU Affero General Public License + * along with this program. If not, see . + */ + package io.aiven.kafka.connect.s3; import org.apache.kafka.common.config.ConfigDef; @@ -6,7 +23,8 @@ public class AivenKafkaConnectS3Config { public static ConfigDef newConfigDef() { - ConfigDef configDef = new ConfigDef(); + + final ConfigDef configDef = new ConfigDef(); configDef.define( AivenKafkaConnectS3Constants.AWS_ACCESS_KEY_ID, @@ -57,9 +75,9 @@ public static ConfigDef newConfigDef() { Type.STRING, AivenKafkaConnectS3Constants.OUTPUT_COMPRESSION_TYPE_GZIP, Importance.MEDIUM, - "Output compression. Valid values are: " + - AivenKafkaConnectS3Constants.OUTPUT_COMPRESSION_TYPE_GZIP + " and " + - AivenKafkaConnectS3Constants.OUTPUT_COMPRESSION_TYPE_NONE + "Output compression. Valid values are: " + + AivenKafkaConnectS3Constants.OUTPUT_COMPRESSION_TYPE_GZIP + " and " + + AivenKafkaConnectS3Constants.OUTPUT_COMPRESSION_TYPE_NONE ); configDef.define( @@ -67,11 +85,11 @@ public static ConfigDef newConfigDef() { Type.STRING, AivenKafkaConnectS3Constants.OUTPUT_FIELD_NAME_VALUE, Importance.MEDIUM, - "Output fields. A comma separated list of one or more: " + - AivenKafkaConnectS3Constants.OUTPUT_FIELD_NAME_KEY + ", " + - AivenKafkaConnectS3Constants.OUTPUT_FIELD_NAME_OFFSET + ", " + - AivenKafkaConnectS3Constants.OUTPUT_FIELD_NAME_TIMESTAMP + ", " + - AivenKafkaConnectS3Constants.OUTPUT_FIELD_NAME_VALUE + "Output fields. A comma separated list of one or more: " + + AivenKafkaConnectS3Constants.OUTPUT_FIELD_NAME_KEY + ", " + + AivenKafkaConnectS3Constants.OUTPUT_FIELD_NAME_OFFSET + ", " + + AivenKafkaConnectS3Constants.OUTPUT_FIELD_NAME_TIMESTAMP + ", " + + AivenKafkaConnectS3Constants.OUTPUT_FIELD_NAME_VALUE ); return configDef; diff --git a/src/main/java/io/aiven/kafka/connect/s3/AivenKafkaConnectS3Constants.java b/src/main/java/io/aiven/kafka/connect/s3/AivenKafkaConnectS3Constants.java index 9a774ed4..d367177b 100644 --- a/src/main/java/io/aiven/kafka/connect/s3/AivenKafkaConnectS3Constants.java +++ b/src/main/java/io/aiven/kafka/connect/s3/AivenKafkaConnectS3Constants.java @@ -1,3 +1,20 @@ +/* + * Copyright (C) 2020 Aiven Oy + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Affero General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Affero General Public License for more details. + * + * You should have received a copy of the GNU Affero General Public License + * along with this program. If not, see . + */ + package io.aiven.kafka.connect.s3; public class AivenKafkaConnectS3Constants { diff --git a/src/main/java/io/aiven/kafka/connect/s3/AivenKafkaConnectS3MultipartUpload.java b/src/main/java/io/aiven/kafka/connect/s3/AivenKafkaConnectS3MultipartUpload.java index dd156b83..d91e9e60 100644 --- a/src/main/java/io/aiven/kafka/connect/s3/AivenKafkaConnectS3MultipartUpload.java +++ b/src/main/java/io/aiven/kafka/connect/s3/AivenKafkaConnectS3MultipartUpload.java @@ -1,22 +1,36 @@ +/* + * Copyright (C) 2020 Aiven Oy + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Affero General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Affero General Public License for more details. + * + * You should have received a copy of the GNU Affero General Public License + * along with this program. If not, see . + */ + package io.aiven.kafka.connect.s3; +import java.io.InputStream; +import java.util.ArrayList; +import java.util.List; + import com.amazonaws.services.s3.AmazonS3; -import com.amazonaws.services.s3.model.PartETag; import com.amazonaws.services.s3.model.AbortMultipartUploadRequest; import com.amazonaws.services.s3.model.CompleteMultipartUploadRequest; import com.amazonaws.services.s3.model.InitiateMultipartUploadRequest; import com.amazonaws.services.s3.model.InitiateMultipartUploadResult; +import com.amazonaws.services.s3.model.PartETag; import com.amazonaws.services.s3.model.UploadPartRequest; -import java.io.ByteArrayInputStream; -import java.io.InputStream; -import java.util.ArrayList; -import java.util.List; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - public class AivenKafkaConnectS3MultipartUpload { - private static final Logger logger = LoggerFactory.getLogger(AivenKafkaConnectS3MultipartUpload.class); + private AmazonS3 s3; private String bucketName; private String keyName; @@ -24,23 +38,25 @@ public class AivenKafkaConnectS3MultipartUpload { private List partETags; private int partIndex; - public AivenKafkaConnectS3MultipartUpload(AmazonS3 s3, String bucketName, String keyName) { - InitiateMultipartUploadRequest initRequest; - InitiateMultipartUploadResult initResponse; + public AivenKafkaConnectS3MultipartUpload( + final AmazonS3 s3, + final String bucketName, + final String keyName) { this.s3 = s3; this.bucketName = bucketName; this.keyName = keyName; - this.partETags = new ArrayList(); + this.partETags = new ArrayList<>(); this.partIndex = 1; - initRequest = new InitiateMultipartUploadRequest(bucketName, keyName); - initResponse = s3.initiateMultipartUpload(initRequest); + final InitiateMultipartUploadRequest initRequest = new InitiateMultipartUploadRequest(bucketName, keyName); + final InitiateMultipartUploadResult initResponse = s3.initiateMultipartUpload(initRequest); this.uploadId = initResponse.getUploadId(); } - public void upload_part(InputStream data, int len) { - UploadPartRequest uploadRequest = new UploadPartRequest() + public void uploadPart(final InputStream data, + final int len) { + final UploadPartRequest uploadRequest = new UploadPartRequest() .withBucketName(this.bucketName) .withKey(this.keyName) .withUploadId(this.uploadId) @@ -54,7 +70,7 @@ public void upload_part(InputStream data, int len) { } public void commit() { - CompleteMultipartUploadRequest completeRequest = new CompleteMultipartUploadRequest( + final CompleteMultipartUploadRequest completeRequest = new CompleteMultipartUploadRequest( this.bucketName, this.keyName, this.uploadId, @@ -65,7 +81,7 @@ public void commit() { } public void abort() { - AbortMultipartUploadRequest abortRequest = new AbortMultipartUploadRequest( + final AbortMultipartUploadRequest abortRequest = new AbortMultipartUploadRequest( this.bucketName, this.keyName, this.uploadId diff --git a/src/main/java/io/aiven/kafka/connect/s3/AivenKafkaConnectS3OutputStream.java b/src/main/java/io/aiven/kafka/connect/s3/AivenKafkaConnectS3OutputStream.java index 088a9fc0..ec93b746 100644 --- a/src/main/java/io/aiven/kafka/connect/s3/AivenKafkaConnectS3OutputStream.java +++ b/src/main/java/io/aiven/kafka/connect/s3/AivenKafkaConnectS3OutputStream.java @@ -1,101 +1,119 @@ -package io.aiven.kafka.connect.s3; +/* + * Copyright (C) 2020 Aiven Oy + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Affero General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Affero General Public License for more details. + * + * You should have received a copy of the GNU Affero General Public License + * along with this program. If not, see . + */ -import com.amazonaws.services.s3.AmazonS3; -import com.amazonaws.services.s3.model.ObjectMetadata; -import com.amazonaws.services.s3.model.PutObjectRequest; +package io.aiven.kafka.connect.s3; import java.io.ByteArrayInputStream; import java.io.InputStream; import java.io.OutputStream; -import java.nio.ByteBuffer; -import java.util.ArrayList; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; + +import com.amazonaws.services.s3.AmazonS3; +import com.amazonaws.services.s3.model.ObjectMetadata; public class AivenKafkaConnectS3OutputStream extends OutputStream { - private static final Logger logger = LoggerFactory.getLogger(AivenKafkaConnectS3OutputStream.class); + private AmazonS3 s3Client; private String bucketName; private String keyName; private byte[] buffer; - private int buffer_len; - private int buffer_size; - AivenKafkaConnectS3MultipartUpload multipart_upload; + private int bufferLen; + private int bufferSize; + AivenKafkaConnectS3MultipartUpload multipartUpload; - public AivenKafkaConnectS3OutputStream(AmazonS3 s3Client, String bucketName, String keyName) { + public AivenKafkaConnectS3OutputStream( + final AmazonS3 s3Client, + final String bucketName, + final String keyName) { this.s3Client = s3Client; this.bucketName = bucketName; this.keyName = keyName; - this.buffer_size = 32 * 1024; - this.buffer = new byte[this.buffer_size]; - this.buffer_len = 0; - this.multipart_upload = null; + this.bufferSize = 32 * 1024; + this.buffer = new byte[this.bufferSize]; + this.bufferLen = 0; + this.multipartUpload = null; } - private void expand_buffer(int data_length) { - if (this.buffer_size - this.buffer_len < data_length) { - byte[] new_buffer; - int new_buffer_size = this.buffer_size; - while (new_buffer_size - this.buffer_len < data_length) { - new_buffer_size = new_buffer_size * 2; + private void expandBuffer(final int dataLength) { + if (this.bufferSize - this.bufferLen < dataLength) { + int newBufferSize = this.bufferSize; + while (newBufferSize - this.bufferLen < dataLength) { + newBufferSize = newBufferSize * 2; } - new_buffer = new byte[new_buffer_size]; - System.arraycopy(this.buffer, 0, new_buffer, 0, this.buffer_len); - this.buffer = new_buffer; - this.buffer_size = new_buffer_size; + final byte[] newBuffer = new byte[newBufferSize]; + System.arraycopy(this.buffer, 0, newBuffer, 0, this.bufferLen); + this.buffer = newBuffer; + this.bufferSize = newBufferSize; } } @Override - public void write(byte[] data, int offset, int len) { - this.expand_buffer(len); - System.arraycopy(data, offset, this.buffer, this.buffer_len, len); - this.buffer_len += len; + public void write(final byte[] data, + final int offset, + final int len) { + this.expandBuffer(len); + System.arraycopy(data, offset, this.buffer, this.bufferLen, len); + this.bufferLen += len; } @Override - public void write(byte[] data) { + public void write(final byte[] data) { this.write(data, 0, data.length); } @Override - public void write(int data_byte) { - this.expand_buffer(1); - this.buffer[this.buffer_len] = (byte)data_byte; - this.buffer_len += 1; + public void write(final int dataByte) { + this.expandBuffer(1); + this.buffer[this.bufferLen] = (byte) dataByte; + this.bufferLen += 1; } @Override public void flush() { // flush buffered data to S3, if we have at least the minimum required 5MB for multipart request - if (this.buffer_len > 5 * 1024 * 1024) { - if (this.multipart_upload == null) { - this.multipart_upload = new AivenKafkaConnectS3MultipartUpload(this.s3Client, this.bucketName, this.keyName); + if (this.bufferLen > 5 * 1024 * 1024) { + if (this.multipartUpload == null) { + this.multipartUpload = + new AivenKafkaConnectS3MultipartUpload(this.s3Client, this.bucketName, this.keyName); } - InputStream stream = new ByteArrayInputStream(this.buffer, 0, this.buffer_len); - this.multipart_upload.upload_part(stream, this.buffer_len); - this.buffer_len = 0; + //FIXME use try-resources here + final InputStream stream = new ByteArrayInputStream(this.buffer, 0, this.bufferLen); + this.multipartUpload.uploadPart(stream, this.bufferLen); + this.bufferLen = 0; } } @Override public void close() { - if (this.buffer_len > 0) { - InputStream stream = new ByteArrayInputStream(this.buffer, 0, this.buffer_len); + if (this.bufferLen > 0) { + final InputStream stream = new ByteArrayInputStream(this.buffer, 0, this.bufferLen); - if (this.multipart_upload != null) { - this.multipart_upload.upload_part(stream, this.buffer_len); + if (this.multipartUpload != null) { + this.multipartUpload.uploadPart(stream, this.bufferLen); } else { - ObjectMetadata metadata = new ObjectMetadata(); - metadata.setContentLength(this.buffer_len); + final ObjectMetadata metadata = new ObjectMetadata(); + metadata.setContentLength(this.bufferLen); this.s3Client.putObject(this.bucketName, this.keyName, stream, metadata); } - this.buffer_len = 0; + this.bufferLen = 0; } - if (this.multipart_upload != null) { - this.multipart_upload.commit(); - this.multipart_upload = null; + if (this.multipartUpload != null) { + this.multipartUpload.commit(); + this.multipartUpload = null; } } } diff --git a/src/main/java/io/aiven/kafka/connect/s3/AivenKafkaConnectS3SinkConnector.java b/src/main/java/io/aiven/kafka/connect/s3/AivenKafkaConnectS3SinkConnector.java index c3b5e9ee..6533604e 100644 --- a/src/main/java/io/aiven/kafka/connect/s3/AivenKafkaConnectS3SinkConnector.java +++ b/src/main/java/io/aiven/kafka/connect/s3/AivenKafkaConnectS3SinkConnector.java @@ -1,19 +1,35 @@ +/* + * Copyright (C) 2020 Aiven Oy + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Affero General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Affero General Public License for more details. + * + * You should have received a copy of the GNU Affero General Public License + * along with this program. If not, see . + */ + package io.aiven.kafka.connect.s3; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; + import org.apache.kafka.common.config.ConfigDef; import org.apache.kafka.common.utils.AppInfoParser; import org.apache.kafka.connect.connector.Connector; import org.apache.kafka.connect.connector.Task; import org.apache.kafka.connect.errors.ConnectException; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; public class AivenKafkaConnectS3SinkConnector extends Connector { - private static final Logger logger = LoggerFactory.getLogger(AivenKafkaConnectS3SinkConnector.class); + private Map configProperties; @Override @@ -27,25 +43,26 @@ public Class taskClass() { } @Override - public void start(Map properties) { - String[] mandatory_keys = new String[] { + public void start(final Map properties) { + final String[] mandatoryKeys = new String[] { AivenKafkaConnectS3Constants.AWS_ACCESS_KEY_ID, AivenKafkaConnectS3Constants.AWS_SECRET_ACCESS_KEY, AivenKafkaConnectS3Constants.AWS_S3_BUCKET }; - for (String property_key: mandatory_keys) { - if (properties.get(property_key) == null) { - throw new ConnectException("Mandatory parameter '" + property_key + "' is missing."); + for (final String pk: mandatoryKeys) { + if (properties.get(pk) == null) { + throw new ConnectException("Mandatory parameter '" + pk + "' is missing."); } } - String fieldConfig = properties.get(AivenKafkaConnectS3Constants.OUTPUT_FIELDS); + final String fieldConfig = properties.get(AivenKafkaConnectS3Constants.OUTPUT_FIELDS); if (fieldConfig != null) { - String[] fieldNames = fieldConfig.split("\\s*,\\s*"); + final String[] fieldNames = fieldConfig.split("\\s*,\\s*"); for (int i = 0; i < fieldNames.length; i++) { - if (fieldNames[i].equalsIgnoreCase(AivenKafkaConnectS3Constants.OUTPUT_FIELD_NAME_KEY) || - fieldNames[i].equalsIgnoreCase(AivenKafkaConnectS3Constants.OUTPUT_FIELD_NAME_OFFSET) || - fieldNames[i].equalsIgnoreCase(AivenKafkaConnectS3Constants.OUTPUT_FIELD_NAME_TIMESTAMP) || - fieldNames[i].equalsIgnoreCase(AivenKafkaConnectS3Constants.OUTPUT_FIELD_NAME_VALUE)) { + //FIXME simplify if/else statements + if (fieldNames[i].equalsIgnoreCase(AivenKafkaConnectS3Constants.OUTPUT_FIELD_NAME_KEY) + || fieldNames[i].equalsIgnoreCase(AivenKafkaConnectS3Constants.OUTPUT_FIELD_NAME_OFFSET) + || fieldNames[i].equalsIgnoreCase(AivenKafkaConnectS3Constants.OUTPUT_FIELD_NAME_TIMESTAMP) + || fieldNames[i].equalsIgnoreCase(AivenKafkaConnectS3Constants.OUTPUT_FIELD_NAME_VALUE)) { // pass } else { throw new ConnectException("Unknown output field name '" + fieldNames[i] + "'."); @@ -60,9 +77,9 @@ public void stop() { } @Override - public List> taskConfigs(int maxTasks) { - List> taskConfigs = new ArrayList<>(); - Map taskProperties = new HashMap<>(); + public List> taskConfigs(final int maxTasks) { + final List> taskConfigs = new ArrayList<>(); + final Map taskProperties = new HashMap<>(); taskProperties.putAll(configProperties); for (int i = 0; i < maxTasks; i++) { taskConfigs.add(taskProperties); diff --git a/src/main/java/io/aiven/kafka/connect/s3/AivenKafkaConnectS3SinkTask.java b/src/main/java/io/aiven/kafka/connect/s3/AivenKafkaConnectS3SinkTask.java index 3b8bb234..e7db5afb 100644 --- a/src/main/java/io/aiven/kafka/connect/s3/AivenKafkaConnectS3SinkTask.java +++ b/src/main/java/io/aiven/kafka/connect/s3/AivenKafkaConnectS3SinkTask.java @@ -1,11 +1,22 @@ +/* + * Copyright (C) 2020 Aiven Oy + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Affero General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Affero General Public License for more details. + * + * You should have received a copy of the GNU Affero General Public License + * along with this program. If not, see . + */ + package io.aiven.kafka.connect.s3; -import com.amazonaws.auth.BasicAWSCredentials; -import com.amazonaws.auth.AWSStaticCredentialsProvider; -import com.amazonaws.client.builder.AwsClientBuilder.EndpointConfiguration; -import com.amazonaws.regions.Regions; -import com.amazonaws.services.s3.AmazonS3; -import com.amazonaws.services.s3.AmazonS3ClientBuilder; import java.io.IOException; import java.io.OutputStream; import java.time.LocalDateTime; @@ -18,7 +29,6 @@ import java.util.Map; import java.util.zip.GZIPOutputStream; -import io.aiven.kafka.connect.s3.templating.TemplatingEngine; import org.apache.kafka.clients.consumer.OffsetAndMetadata; import org.apache.kafka.common.TopicPartition; import org.apache.kafka.connect.converters.ByteArrayConverter; @@ -26,11 +36,21 @@ import org.apache.kafka.connect.sink.SinkRecord; import org.apache.kafka.connect.sink.SinkTask; import org.apache.kafka.connect.storage.Converter; + +import io.aiven.kafka.connect.s3.templating.TemplatingEngine; + +import com.amazonaws.auth.AWSStaticCredentialsProvider; +import com.amazonaws.auth.BasicAWSCredentials; +import com.amazonaws.client.builder.AwsClientBuilder.EndpointConfiguration; +import com.amazonaws.regions.Regions; +import com.amazonaws.services.s3.AmazonS3; +import com.amazonaws.services.s3.AmazonS3ClientBuilder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class AivenKafkaConnectS3SinkTask extends SinkTask { - private static final Logger logger = LoggerFactory.getLogger(AivenKafkaConnectS3SinkConnector.class); + + private static final Logger LOGGER = LoggerFactory.getLogger(AivenKafkaConnectS3SinkConnector.class); private Map taskConfig; @@ -40,7 +60,7 @@ public class AivenKafkaConnectS3SinkTask extends SinkTask { private AmazonS3 s3Client; - private Map output_streams = new HashMap(); + private Map outputStreams = new HashMap<>(); private enum OutputFieldType { KEY, @@ -49,7 +69,7 @@ private enum OutputFieldType { VALUE } - OutputFieldType[] output_fields; + OutputFieldType[] outputFields; private enum CompressionType { GZIP, @@ -59,12 +79,17 @@ private enum CompressionType { CompressionType outputCompression = CompressionType.GZIP; private final TemplatingEngine templatingEngine = new TemplatingEngine(); + { templatingEngine.bindVariable("utc_date", - () -> ZonedDateTime.now(ZoneId.of("UTC")).format(DateTimeFormatter.ISO_LOCAL_DATE) + () -> { + return ZonedDateTime.now(ZoneId.of("UTC")).format(DateTimeFormatter.ISO_LOCAL_DATE); + } ); templatingEngine.bindVariable("local_date", - () -> LocalDateTime.now().format(DateTimeFormatter.ISO_LOCAL_DATE) + () -> { + return LocalDateTime.now().format(DateTimeFormatter.ISO_LOCAL_DATE); + } ); } @@ -74,13 +99,13 @@ public String version() { } @Override - public void start(Map props) { + public void start(final Map props) { this.taskConfig = new HashMap<>(props); - this.logger.info("AivenKafkaConnectS3SinkTask starting"); + LOGGER.info("AivenKafkaConnectS3SinkTask starting"); - AmazonS3ClientBuilder builder = AmazonS3ClientBuilder.standard(); + final AmazonS3ClientBuilder builder = AmazonS3ClientBuilder.standard(); - BasicAWSCredentials awsCreds = new BasicAWSCredentials( + final BasicAWSCredentials awsCreds = new BasicAWSCredentials( props.get(AivenKafkaConnectS3Constants.AWS_ACCESS_KEY_ID), props.get(AivenKafkaConnectS3Constants.AWS_SECRET_ACCESS_KEY) ); @@ -91,12 +116,12 @@ public void start(Map props) { region = Regions.US_EAST_1.getName(); } - String endpoint_url = props.get(AivenKafkaConnectS3Constants.AWS_S3_ENDPOINT); + final String endpointUrl = props.get(AivenKafkaConnectS3Constants.AWS_S3_ENDPOINT); - if (endpoint_url == null || endpoint_url.equals("")) { + if (endpointUrl == null || endpointUrl.equals("")) { builder.withRegion(Regions.fromName(region)); } else { - builder.withEndpointConfiguration(new EndpointConfiguration(endpoint_url, region)); + builder.withEndpointConfiguration(new EndpointConfiguration(endpointUrl, region)); builder.withPathStyleAccessEnabled(true); } @@ -109,24 +134,25 @@ public void start(Map props) { if (fieldConfig == null) { fieldConfig = AivenKafkaConnectS3Constants.OUTPUT_FIELD_NAME_VALUE; } - String[] fieldNames = fieldConfig.split("\\s*,\\s*"); - this.output_fields = new OutputFieldType[fieldNames.length]; + final String[] fieldNames = fieldConfig.split("\\s*,\\s*"); + this.outputFields = new OutputFieldType[fieldNames.length]; for (int i = 0; i < fieldNames.length; i++) { if (fieldNames[i].equalsIgnoreCase(AivenKafkaConnectS3Constants.OUTPUT_FIELD_NAME_KEY)) { - this.output_fields[i] = OutputFieldType.KEY; + this.outputFields[i] = OutputFieldType.KEY; } else if (fieldNames[i].equalsIgnoreCase(AivenKafkaConnectS3Constants.OUTPUT_FIELD_NAME_OFFSET)) { - this.output_fields[i] = OutputFieldType.OFFSET; + this.outputFields[i] = OutputFieldType.OFFSET; } else if (fieldNames[i].equalsIgnoreCase(AivenKafkaConnectS3Constants.OUTPUT_FIELD_NAME_TIMESTAMP)) { - this.output_fields[i] = OutputFieldType.TIMESTAMP; + this.outputFields[i] = OutputFieldType.TIMESTAMP; } else if (fieldNames[i].equalsIgnoreCase(AivenKafkaConnectS3Constants.OUTPUT_FIELD_NAME_VALUE)) { - this.output_fields[i] = OutputFieldType.VALUE; + this.outputFields[i] = OutputFieldType.VALUE; } else { throw new ConnectException("Unknown output field name '" + fieldNames[i] + "'."); } } - String compression = props.get(AivenKafkaConnectS3Constants.OUTPUT_COMPRESSION); + final String compression = props.get(AivenKafkaConnectS3Constants.OUTPUT_COMPRESSION); if (compression != null) { + //FIXME simplify if/else statements if (compression.equalsIgnoreCase(AivenKafkaConnectS3Constants.OUTPUT_COMPRESSION_TYPE_GZIP)) { // default } else if (compression.equalsIgnoreCase(AivenKafkaConnectS3Constants.OUTPUT_COMPRESSION_TYPE_NONE)) { @@ -139,96 +165,103 @@ public void start(Map props) { @Override public void stop() { - logger.info("AivenKafkaConnectS3SinkTask stopping"); - for (TopicPartition tp: this.output_streams.keySet()) { - OutputStream stream = this.output_streams.get(tp); + LOGGER.info("AivenKafkaConnectS3SinkTask stopping"); + for (final TopicPartition tp: this.outputStreams.keySet()) { + final OutputStream stream = this.outputStreams.get(tp); if (stream != null) { try { stream.close(); - } catch (IOException e) { - this.logger.error("Error closing stream " + tp.topic() + "-" + tp.partition() + ": " + e); + } catch (final IOException e) { + LOGGER.error("Error closing stream " + tp.topic() + "-" + tp.partition() + ": " + e); } - this.output_streams.remove(tp); + this.outputStreams.remove(tp); } } } @Override - public void open(Collection partitions) { + public void open(final Collection partitions) { // We don't need to do anything here; we'll create the streams on first message on a partition - for (TopicPartition tp: partitions) { - this.logger.info("New assignment " + tp.topic() + "#" + tp.partition()); + for (final TopicPartition tp: partitions) { + LOGGER.info("New assignment " + tp.topic() + "#" + tp.partition()); } } @Override - public void close(Collection partitions) throws ConnectException { - for (TopicPartition tp: partitions) { - this.logger.info("Unassigned " + tp.topic() + "#" + tp.partition()); - OutputStream stream = this.output_streams.get(tp); + public void close(final Collection partitions) throws ConnectException { + for (final TopicPartition tp: partitions) { + LOGGER.info("Unassigned " + tp.topic() + "#" + tp.partition()); + final OutputStream stream = this.outputStreams.get(tp); if (stream != null) { try { stream.close(); - } catch (IOException e) { + } catch (final IOException e) { throw new ConnectException(e); } - this.output_streams.remove(tp); + this.outputStreams.remove(tp); } } } @Override - public void put(Collection records) throws ConnectException { - this.logger.info("Processing " + records.size() + " records"); - for (SinkRecord record: records) { - String topic = record.topic(); - Integer partition = record.kafkaPartition(); - TopicPartition tp = new TopicPartition(topic, partition); + public void put(final Collection records) throws ConnectException { + LOGGER.info("Processing " + records.size() + " records"); + for (final SinkRecord record: records) { + final String topic = record.topic(); + final Integer partition = record.kafkaPartition(); + final TopicPartition tp = new TopicPartition(topic, partition); // identify or allocate a new output stream for topic/partition combination - OutputStream stream = this.output_streams.get(tp); + OutputStream stream = this.outputStreams.get(tp); if (stream == null) { - String keyName = getS3Prefix() + topic + "-" + partition + "-" + String.format("%010d", record.kafkaOffset()); + String keyName = getS3Prefix() + topic + + "-" + partition + + "-" + String.format("%010d", record.kafkaOffset()); if (this.outputCompression == CompressionType.GZIP) { keyName = keyName + ".gz"; } - stream = new AivenKafkaConnectS3OutputStream(this.s3Client, this.taskConfig.get(AivenKafkaConnectS3Constants.AWS_S3_BUCKET), keyName); + stream = + new AivenKafkaConnectS3OutputStream( + this.s3Client, + this.taskConfig.get(AivenKafkaConnectS3Constants.AWS_S3_BUCKET), + keyName + ); if (this.outputCompression == CompressionType.GZIP) { try { stream = new GZIPOutputStream(stream); - } catch (IOException e) { + } catch (final IOException e) { throw new ConnectException(e); } } - this.output_streams.put(tp, stream); + this.outputStreams.put(tp, stream); } // Create output with the requested fields - StringBuilder outputRecordBuilder = new StringBuilder(4096); - for (int i = 0; i < this.output_fields.length; i++) { + final StringBuilder outputRecordBuilder = new StringBuilder(4096); + for (int i = 0; i < this.outputFields.length; i++) { if (i > 0) { outputRecordBuilder.append(","); } - switch (this.output_fields[i]) { + switch (this.outputFields[i]) { case KEY: - Object key_raw = record.key(); - if (key_raw != null) { - byte[] key = this.keyConverter.fromConnectData( + final Object keyRaw = record.key(); + if (keyRaw != null) { + final byte[] key = this.keyConverter.fromConnectData( record.topic(), record.valueSchema(), - key_raw + keyRaw ); outputRecordBuilder.append(this.b64Encoder.encodeToString(key)); } break; case VALUE: - Object value_raw = record.value(); - if (value_raw != null) { - byte[] value = this.valueConverter.fromConnectData( + final Object valueRaw = record.value(); + if (valueRaw != null) { + final byte[] value = this.valueConverter.fromConnectData( record.topic(), record.valueSchema(), - value_raw + valueRaw ); outputRecordBuilder.append(this.b64Encoder.encodeToString(value)); } @@ -239,6 +272,7 @@ public void put(Collection records) throws ConnectException { case OFFSET: outputRecordBuilder.append(record.kafkaOffset()); break; + default: } } outputRecordBuilder.append("\n"); @@ -246,33 +280,33 @@ public void put(Collection records) throws ConnectException { // write output to the topic/partition specific stream try { stream.write(outputRecordBuilder.toString().getBytes()); - } catch (IOException e) { + } catch (final IOException e) { throw new ConnectException(e); } } // Send flush signal down the streams, and give opportunity for part uploads - for (OutputStream stream: this.output_streams.values()) { + for (final OutputStream stream: this.outputStreams.values()) { try { stream.flush(); - } catch (IOException e) { + } catch (final IOException e) { throw new ConnectException(e); } } } @Override - public void flush(Map offsets) { - for (TopicPartition tp: offsets.keySet()) { - OutputStream stream = this.output_streams.get(tp); + public void flush(final Map offsets) { + for (final TopicPartition tp: offsets.keySet()) { + final OutputStream stream = this.outputStreams.get(tp); if (stream != null) { - this.logger.info("Flush records for " + tp.topic() + "-" + tp.partition()); + LOGGER.info("Flush records for " + tp.topic() + "-" + tp.partition()); try { stream.close(); - } catch (IOException e) { + } catch (final IOException e) { throw new ConnectException(e); } - this.output_streams.remove(tp); + this.outputStreams.remove(tp); } } } diff --git a/src/main/java/io/aiven/kafka/connect/s3/templating/TemplatingEngine.java b/src/main/java/io/aiven/kafka/connect/s3/templating/TemplatingEngine.java index 7059f1e8..3787fdfa 100644 --- a/src/main/java/io/aiven/kafka/connect/s3/templating/TemplatingEngine.java +++ b/src/main/java/io/aiven/kafka/connect/s3/templating/TemplatingEngine.java @@ -1,3 +1,20 @@ +/* + * Copyright (C) 2020 Aiven Oy + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Affero General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Affero General Public License for more details. + * + * You should have received a copy of the GNU Affero General Public License + * along with this program. If not, see . + */ + package io.aiven.kafka.connect.s3.templating; import java.util.HashMap; @@ -8,11 +25,8 @@ /** * A simple templating engine that allows to bind variables to supplier functions. - * - *

Variable syntax: {@code {{ variable_name }}} (arbitrary number of space inside the braces). - * + * Variable syntax: {@code {{ variable_name }}} (arbitrary number of space inside the braces). * Non-bound variables are left as is. - * */ public final class TemplatingEngine { private static Pattern variablePattern = Pattern.compile("\\{\\{\\s*(\\w+)\\s*}}"); // {{ var }} diff --git a/src/main/resources/aiven-kafka-connect-s3-version.properties b/src/main/resources/aiven-kafka-connect-s3-version.properties new file mode 100644 index 00000000..c5771996 --- /dev/null +++ b/src/main/resources/aiven-kafka-connect-s3-version.properties @@ -0,0 +1,17 @@ +## +# Copyright (C) 2020 Aiven Oy +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU Affero General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU Affero General Public License for more details. +# +# You should have received a copy of the GNU Affero General Public License +# along with this program. If not, see . +## +version=${version ?: 'unknown'} diff --git a/src/test/java/io/aiven/kafka/connect/s3/AivenKafkaConnectS3MultipartUploadTest.java b/src/test/java/io/aiven/kafka/connect/s3/AivenKafkaConnectS3MultipartUploadTest.java index 5650cd8d..c46681c4 100644 --- a/src/test/java/io/aiven/kafka/connect/s3/AivenKafkaConnectS3MultipartUploadTest.java +++ b/src/test/java/io/aiven/kafka/connect/s3/AivenKafkaConnectS3MultipartUploadTest.java @@ -1,61 +1,80 @@ -import com.amazonaws.auth.BasicAWSCredentials; +/* + * Copyright (C) 2020 Aiven Oy + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Affero General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Affero General Public License for more details. + * + * You should have received a copy of the GNU Affero General Public License + * along with this program. If not, see . + */ + +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.util.Random; + +import io.aiven.kafka.connect.s3.AivenKafkaConnectS3MultipartUpload; + import com.amazonaws.auth.AWSStaticCredentialsProvider; +import com.amazonaws.auth.BasicAWSCredentials; import com.amazonaws.client.builder.AwsClientBuilder.EndpointConfiguration; import com.amazonaws.services.s3.AmazonS3; import com.amazonaws.services.s3.AmazonS3ClientBuilder; import com.amazonaws.services.s3.model.GetObjectRequest; import com.amazonaws.services.s3.model.S3Object; -import io.aiven.kafka.connect.s3.AivenKafkaConnectS3MultipartUpload; import io.findify.s3mock.S3Mock; -import java.io.ByteArrayInputStream; -import java.io.InputStream; -import java.io.IOException; -import java.util.Random; import org.junit.Test; -import static org.junit.Assert.assertEquals; + import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; public class AivenKafkaConnectS3MultipartUploadTest { @Test public void testAivenKafkaConnectS3MultipartUploadTest() throws IOException { - Random generator = new Random(); - int port = generator.nextInt(10000) + 10000; + final Random generator = new Random(); + final int port = generator.nextInt(10000) + 10000; - S3Mock api = new S3Mock.Builder().withPort(port).withInMemoryBackend().build(); + final S3Mock api = new S3Mock.Builder().withPort(port).withInMemoryBackend().build(); api.start(); - BasicAWSCredentials awsCreds = new BasicAWSCredentials( + final BasicAWSCredentials awsCreds = new BasicAWSCredentials( "test_key_id", "test_secret_key" ); - AmazonS3ClientBuilder builder = AmazonS3ClientBuilder.standard(); + final AmazonS3ClientBuilder builder = AmazonS3ClientBuilder.standard(); builder.withCredentials(new AWSStaticCredentialsProvider(awsCreds)); builder.withEndpointConfiguration(new EndpointConfiguration("http://localhost:" + port, "us-west-2")); builder.withPathStyleAccessEnabled(true); - AmazonS3 s3Client = builder.build(); - + final AmazonS3 s3Client = builder.build(); s3Client.createBucket("test-bucket"); - AivenKafkaConnectS3MultipartUpload mp = new AivenKafkaConnectS3MultipartUpload( + final AivenKafkaConnectS3MultipartUpload mp = new AivenKafkaConnectS3MultipartUpload( s3Client, "test-bucket", "test-object" ); - byte[] data = "foobar".getBytes(); - InputStream stream = new ByteArrayInputStream(data, 0, data.length); - mp.upload_part(stream, data.length); + final byte[] data = "foobar".getBytes(); + final InputStream stream = new ByteArrayInputStream(data, 0, data.length); + mp.uploadPart(stream, data.length); mp.commit(); - S3Object object = s3Client.getObject(new GetObjectRequest("test-bucket", "test-object")); - InputStream objectData = object.getObjectContent(); + final S3Object object = s3Client.getObject(new GetObjectRequest("test-bucket", "test-object")); + final InputStream objectData = object.getObjectContent(); assertEquals(objectData.available(), 6); - byte[] stored_data = new byte[data.length]; - objectData.read(stored_data, 0, data.length); - assertArrayEquals(data, stored_data); + final byte[] storedData = new byte[data.length]; + objectData.read(storedData, 0, data.length); + assertArrayEquals(data, storedData); objectData.close(); api.stop(); diff --git a/src/test/java/io/aiven/kafka/connect/s3/AivenKafkaConnectS3OutputStreamTest.java b/src/test/java/io/aiven/kafka/connect/s3/AivenKafkaConnectS3OutputStreamTest.java index e49d40f4..f5171bbb 100644 --- a/src/test/java/io/aiven/kafka/connect/s3/AivenKafkaConnectS3OutputStreamTest.java +++ b/src/test/java/io/aiven/kafka/connect/s3/AivenKafkaConnectS3OutputStreamTest.java @@ -1,12 +1,31 @@ -import com.amazonaws.auth.BasicAWSCredentials; +/* + * Copyright (C) 2020 Aiven Oy + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Affero General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Affero General Public License for more details. + * + * You should have received a copy of the GNU Affero General Public License + * along with this program. If not, see . + */ + +import java.io.IOException; +import java.util.Random; + +import io.aiven.kafka.connect.s3.AivenKafkaConnectS3OutputStream; + import com.amazonaws.auth.AWSStaticCredentialsProvider; +import com.amazonaws.auth.BasicAWSCredentials; import com.amazonaws.client.builder.AwsClientBuilder.EndpointConfiguration; import com.amazonaws.services.s3.AmazonS3; import com.amazonaws.services.s3.AmazonS3ClientBuilder; -import io.aiven.kafka.connect.s3.AivenKafkaConnectS3OutputStream; import io.findify.s3mock.S3Mock; -import java.io.IOException; -import java.util.Random; import org.junit.Test; import static org.junit.Assert.assertFalse; @@ -16,29 +35,29 @@ public class AivenKafkaConnectS3OutputStreamTest { @Test public void testAivenKafkaConnectS3OutputStreamTest() throws IOException { - Random generator = new Random(); - int port = generator.nextInt(10000) + 10000; + final Random generator = new Random(); + final int port = generator.nextInt(10000) + 10000; - S3Mock api = new S3Mock.Builder().withPort(port).withInMemoryBackend().build(); + final S3Mock api = new S3Mock.Builder().withPort(port).withInMemoryBackend().build(); api.start(); - BasicAWSCredentials awsCreds = new BasicAWSCredentials( + final BasicAWSCredentials awsCreds = new BasicAWSCredentials( "test_key_id", "test_secret_key" ); - AmazonS3ClientBuilder builder = AmazonS3ClientBuilder.standard(); + final AmazonS3ClientBuilder builder = AmazonS3ClientBuilder.standard(); builder.withCredentials(new AWSStaticCredentialsProvider(awsCreds)); builder.withEndpointConfiguration(new EndpointConfiguration("http://localhost:" + port, "us-west-2")); builder.withPathStyleAccessEnabled(true); - AmazonS3 s3Client = builder.build(); - + final AmazonS3 s3Client = builder.build(); s3Client.createBucket("test-bucket"); - AivenKafkaConnectS3OutputStream storageSmall = new AivenKafkaConnectS3OutputStream(s3Client, "test-bucket", "test-key-small"); + final AivenKafkaConnectS3OutputStream storageSmall = + new AivenKafkaConnectS3OutputStream(s3Client, "test-bucket", "test-key-small"); - byte[] inputSmall = "small".getBytes(); + final byte[] inputSmall = "small".getBytes(); storageSmall.write(inputSmall); assertFalse(s3Client.doesObjectExist("test-bucket", "test-key-small")); storageSmall.flush(); @@ -46,8 +65,9 @@ public void testAivenKafkaConnectS3OutputStreamTest() throws IOException { storageSmall.close(); assertTrue(s3Client.doesObjectExist("test-bucket", "test-key-small")); - AivenKafkaConnectS3OutputStream storageLarge = new AivenKafkaConnectS3OutputStream(s3Client, "test-bucket", "test-key-large"); - byte[] inputLarge = new byte[1024*1024*10]; + final AivenKafkaConnectS3OutputStream storageLarge = + new AivenKafkaConnectS3OutputStream(s3Client, "test-bucket", "test-key-large"); + final byte[] inputLarge = new byte[1024 * 1024 * 10]; storageLarge.write(inputLarge); assertFalse(s3Client.doesObjectExist("test-bucket", "test-key-large")); storageLarge.flush(); diff --git a/src/test/java/io/aiven/kafka/connect/s3/AivenKafkaConnectS3SinkTaskTest.java b/src/test/java/io/aiven/kafka/connect/s3/AivenKafkaConnectS3SinkTaskTest.java index 1ddb0c38..94f6dd0b 100644 --- a/src/test/java/io/aiven/kafka/connect/s3/AivenKafkaConnectS3SinkTaskTest.java +++ b/src/test/java/io/aiven/kafka/connect/s3/AivenKafkaConnectS3SinkTaskTest.java @@ -1,29 +1,56 @@ +/* + * Copyright (C) 2020 Aiven Oy + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Affero General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Affero General Public License for more details. + * + * You should have received a copy of the GNU Affero General Public License + * along with this program. If not, see . + */ + package io.aiven.kafka.connect.s3; +import java.time.LocalDateTime; +import java.time.ZoneId; +import java.time.ZonedDateTime; +import java.time.format.DateTimeFormatter; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.Random; + +import org.apache.kafka.clients.consumer.OffsetAndMetadata; +import org.apache.kafka.common.TopicPartition; +import org.apache.kafka.connect.data.Schema; +import org.apache.kafka.connect.sink.SinkRecord; + import com.amazonaws.auth.AWSStaticCredentialsProvider; import com.amazonaws.auth.BasicAWSCredentials; import com.amazonaws.client.builder.AwsClientBuilder.EndpointConfiguration; import com.amazonaws.services.s3.AmazonS3; import com.amazonaws.services.s3.AmazonS3ClientBuilder; import io.findify.s3mock.S3Mock; -import org.apache.kafka.clients.consumer.OffsetAndMetadata; -import org.apache.kafka.common.TopicPartition; -import org.apache.kafka.connect.data.Schema; -import org.apache.kafka.connect.sink.SinkRecord; -import org.junit.*; - -import java.time.LocalDateTime; -import java.time.ZoneId; -import java.time.ZonedDateTime; -import java.time.format.DateTimeFormatter; -import java.util.*; +import org.junit.After; +import org.junit.AfterClass; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; public class AivenKafkaConnectS3SinkTaskTest { - private static String TEST_BUCKET = "test-bucket"; + private static final String TEST_BUCKET = "test-bucket"; private static S3Mock s3Api; private static AmazonS3 s3Client; @@ -34,13 +61,13 @@ public class AivenKafkaConnectS3SinkTaskTest { @BeforeClass public static void setUpClass() { - Random generator = new Random(); - int s3Port = generator.nextInt(10000) + 10000; + final Random generator = new Random(); + final int s3Port = generator.nextInt(10000) + 10000; s3Api = new S3Mock.Builder().withPort(s3Port).withInMemoryBackend().build(); s3Api.start(); - Map commonPropertiesMutable = new HashMap<>(); + final Map commonPropertiesMutable = new HashMap<>(); commonPropertiesMutable.put(AivenKafkaConnectS3Constants.AWS_ACCESS_KEY_ID, "test_key_id"); commonPropertiesMutable.put(AivenKafkaConnectS3Constants.AWS_SECRET_ACCESS_KEY, "test_secret_key"); commonPropertiesMutable.put(AivenKafkaConnectS3Constants.AWS_S3_BUCKET, TEST_BUCKET); @@ -48,8 +75,8 @@ public static void setUpClass() { commonPropertiesMutable.put(AivenKafkaConnectS3Constants.AWS_S3_REGION, "us-west-2"); commonProperties = Collections.unmodifiableMap(commonPropertiesMutable); - AmazonS3ClientBuilder builder = AmazonS3ClientBuilder.standard(); - BasicAWSCredentials awsCreds = new BasicAWSCredentials( + final AmazonS3ClientBuilder builder = AmazonS3ClientBuilder.standard(); + final BasicAWSCredentials awsCreds = new BasicAWSCredentials( commonProperties.get(AivenKafkaConnectS3Constants.AWS_ACCESS_KEY_ID), commonProperties.get(AivenKafkaConnectS3Constants.AWS_SECRET_ACCESS_KEY) ); @@ -83,14 +110,14 @@ public void tearDown() { @Test public void testAivenKafkaConnectS3SinkTaskTest() { // Create SinkTask - AivenKafkaConnectS3SinkTask task = new AivenKafkaConnectS3SinkTask(); + final AivenKafkaConnectS3SinkTask task = new AivenKafkaConnectS3SinkTask(); properties.put(AivenKafkaConnectS3Constants.OUTPUT_COMPRESSION, "gzip"); properties.put(AivenKafkaConnectS3Constants.OUTPUT_FIELDS, "value,key,timestamp,offset"); task.start(properties); - TopicPartition tp = new TopicPartition("test-topic", 0); - Collection tps = Collections.singletonList(tp); + final TopicPartition tp = new TopicPartition("test-topic", 0); + final Collection tps = Collections.singletonList(tp); task.open(tps); // * Simulate periodical flush() cycle - ensure that data files are written @@ -101,14 +128,13 @@ public void testAivenKafkaConnectS3SinkTaskTest() { assertFalse(s3Client.doesObjectExist(TEST_BUCKET, "test-topic-0-0000000000.gz")); // Flush data - this is called by Connect on offset.flush.interval - Map offsets = new HashMap<>(); + final Map offsets = new HashMap<>(); offsets.put(tp, new OffsetAndMetadata(100)); task.flush(offsets); assertTrue(s3Client.doesObjectExist(TEST_BUCKET, "test-topic-0-0000000000.gz")); // * Verify that we store data on partition unassignment - task.put(createBatchOfRecord(100, 200)); assertFalse(s3Client.doesObjectExist(TEST_BUCKET, "test-topic-0-0000000100.gz")); @@ -129,20 +155,20 @@ public void testAivenKafkaConnectS3SinkTaskTest() { @Test public void testS3ConstantPrefix() { - AivenKafkaConnectS3SinkTask task = new AivenKafkaConnectS3SinkTask(); + final AivenKafkaConnectS3SinkTask task = new AivenKafkaConnectS3SinkTask(); properties.put(AivenKafkaConnectS3Constants.OUTPUT_COMPRESSION, "gzip"); properties.put(AivenKafkaConnectS3Constants.OUTPUT_FIELDS, "value,key,timestamp,offset"); properties.put(AivenKafkaConnectS3Constants.AWS_S3_PREFIX, "prefix--"); task.start(properties); - TopicPartition tp = new TopicPartition("test-topic", 0); - Collection tps = Collections.singletonList(tp); + final TopicPartition tp = new TopicPartition("test-topic", 0); + final Collection tps = Collections.singletonList(tp); task.open(tps); task.put(createBatchOfRecord(0, 100)); - Map offsets = new HashMap<>(); + final Map offsets = new HashMap<>(); offsets.put(tp, new OffsetAndMetadata(100)); task.flush(offsets); @@ -150,25 +176,25 @@ public void testS3ConstantPrefix() { } @Test - public void testS3UTCDatePrefix() { - AivenKafkaConnectS3SinkTask task = new AivenKafkaConnectS3SinkTask(); + public void testS3UtcDatePrefix() { + final AivenKafkaConnectS3SinkTask task = new AivenKafkaConnectS3SinkTask(); properties.put(AivenKafkaConnectS3Constants.OUTPUT_COMPRESSION, "gzip"); properties.put(AivenKafkaConnectS3Constants.OUTPUT_FIELDS, "value,key,timestamp,offset"); properties.put(AivenKafkaConnectS3Constants.AWS_S3_PREFIX, "prefix-{{ utc_date }}--"); task.start(properties); - TopicPartition tp = new TopicPartition("test-topic", 0); - Collection tps = Collections.singletonList(tp); + final TopicPartition tp = new TopicPartition("test-topic", 0); + final Collection tps = Collections.singletonList(tp); task.open(tps); task.put(createBatchOfRecord(0, 100)); - Map offsets = new HashMap<>(); + final Map offsets = new HashMap<>(); offsets.put(tp, new OffsetAndMetadata(100)); task.flush(offsets); - String expectedFileName = String.format("prefix-%s--test-topic-0-0000000000.gz", + final String expectedFileName = String.format("prefix-%s--test-topic-0-0000000000.gz", ZonedDateTime.now(ZoneId.of("UTC")).format(DateTimeFormatter.ISO_LOCAL_DATE)); assertTrue(s3Client.doesObjectExist(TEST_BUCKET, expectedFileName)); @@ -177,34 +203,34 @@ public void testS3UTCDatePrefix() { @Test public void testS3LocalDatePrefix() { - AivenKafkaConnectS3SinkTask task = new AivenKafkaConnectS3SinkTask(); + final AivenKafkaConnectS3SinkTask task = new AivenKafkaConnectS3SinkTask(); properties.put(AivenKafkaConnectS3Constants.OUTPUT_COMPRESSION, "gzip"); properties.put(AivenKafkaConnectS3Constants.OUTPUT_FIELDS, "value,key,timestamp,offset"); properties.put(AivenKafkaConnectS3Constants.AWS_S3_PREFIX, "prefix-{{ local_date }}--"); task.start(properties); - TopicPartition tp = new TopicPartition("test-topic", 0); - Collection tps = Collections.singletonList(tp); + final TopicPartition tp = new TopicPartition("test-topic", 0); + final Collection tps = Collections.singletonList(tp); task.open(tps); task.put(createBatchOfRecord(0, 100)); - Map offsets = new HashMap<>(); + final Map offsets = new HashMap<>(); offsets.put(tp, new OffsetAndMetadata(100)); task.flush(offsets); - String expectedFileName = String.format("prefix-%s--test-topic-0-0000000000.gz", + final String expectedFileName = String.format("prefix-%s--test-topic-0-0000000000.gz", LocalDateTime.now().format(DateTimeFormatter.ISO_LOCAL_DATE)); assertTrue(s3Client.doesObjectExist(TEST_BUCKET, expectedFileName)); task.stop(); } - private Collection createBatchOfRecord(int offsetFrom, int offsetTo) { - ArrayList records = new ArrayList<>(); + private Collection createBatchOfRecord(final int offsetFrom, final int offsetTo) { + final ArrayList records = new ArrayList<>(); for (int offset = offsetFrom; offset < offsetTo; offset++) { - SinkRecord record = new SinkRecord( + final SinkRecord record = new SinkRecord( "test-topic", 0, Schema.BYTES_SCHEMA, "test-key".getBytes(), diff --git a/src/test/java/io/aiven/kafka/connect/s3/templating/TemplatingEngineTest.java b/src/test/java/io/aiven/kafka/connect/s3/templating/TemplatingEngineTest.java index 206c73b8..2222dea4 100644 --- a/src/test/java/io/aiven/kafka/connect/s3/templating/TemplatingEngineTest.java +++ b/src/test/java/io/aiven/kafka/connect/s3/templating/TemplatingEngineTest.java @@ -1,6 +1,21 @@ -package io.aiven.kafka.connect.s3.templating; +/* + * Copyright (C) 2020 Aiven Oy + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU Affero General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Affero General Public License for more details. + * + * You should have received a copy of the GNU Affero General Public License + * along with this program. If not, see . + */ -import org.junit.Test; +package io.aiven.kafka.connect.s3.templating; import java.io.BufferedReader; import java.io.IOException; @@ -10,6 +25,8 @@ import java.util.Collection; import java.util.stream.Collectors; +import org.junit.Test; + import static org.junit.Assert.assertEquals; public class TemplatingEngineTest {