Skip to content

Commit

Permalink
Update commons-io to version from Flink 1.19 (#6937)
Browse files Browse the repository at this point in the history
  • Loading branch information
piotrp authored Sep 24, 2024
1 parent 40fc17d commit df4cb52
Show file tree
Hide file tree
Showing 5 changed files with 7 additions and 6 deletions.
2 changes: 1 addition & 1 deletion build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -284,7 +284,7 @@ val flinkV = "1.19.1"
val flinkConnectorKafkaV = "3.2.0-1.19"
val flinkCommonsLang3V = "3.12.0"
val flinkCommonsTextV = "1.10.0"
val flinkCommonsIOV = "2.11.0"
val flinkCommonsIOV = "2.15.1"
val avroV = "1.11.3"
//we should use max(version used by confluent, version acceptable by flink), https://docs.confluent.io/platform/current/installation/versions-interoperability.html - confluent version reference
val kafkaV = "3.6.2"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ public static void addDefaultSerializers(ExecutionConfig config) {
addSerializer(config, Period.class, new PeriodSerializer());
}

@SuppressWarnings("unchecked")
@SuppressWarnings({"unchecked", "deprecation"})
private static <T, Y extends Serializer<T>> void addSerializer(ExecutionConfig config, Class<T> klass, Y serializer) {
config.addDefaultKryoSerializer(klass, (Class<? extends Serializer<?>>) serializer.getClass());
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ public final class ScalaCaseClassSerializerSnapshot<T extends scala.Product>
private Class<T> type;

/** Used via reflection. */
@SuppressWarnings("unused")
@SuppressWarnings({"unused", "deprecation"})
public ScalaCaseClassSerializerSnapshot() {
super(ScalaCaseClassSerializer.class);
}
Expand Down Expand Up @@ -85,6 +85,7 @@ protected void readOuterSnapshot(
}

@Override
@SuppressWarnings("deprecation")
protected CompositeTypeSerializerSnapshot.OuterSchemaCompatibility
resolveOuterSchemaCompatibility(ScalaCaseClassSerializer<T> newSerializer) {
return (Objects.equals(type, newSerializer.getTupleClass()))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ public final class ScalaOptionSerializerSnapshot<E>

private static final int VERSION = 2;

@SuppressWarnings("WeakerAccess")
@SuppressWarnings({"WeakerAccess", "deprecation"})
public ScalaOptionSerializerSnapshot() {
super(OptionSerializer.class);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ object EmbeddedKafkaServer {
private def prepareRaftStorage(logDir: File, kafkaConfig: server.KafkaConfig) = {
val uuid = Uuid.randomUuid()
StorageTool.formatCommand(
new PrintStream(NullOutputStream.NULL_OUTPUT_STREAM),
new PrintStream(NullOutputStream.INSTANCE),
Seq(logDir.getAbsolutePath),
StorageTool.buildMetadataProperties(uuid.toString, kafkaConfig),
MetadataVersion.IBP_3_3_IV3,
Expand Down Expand Up @@ -165,7 +165,7 @@ object KafkaTestUtils {
new KafkaProducer(props)
}

private def createCommonProducerProps[K, T](kafkaAddress: String, id: String) = {
private def createCommonProducerProps[K, T](kafkaAddress: String, id: String): Properties = {
val props = new Properties()
props.put("bootstrap.servers", kafkaAddress)
props.put("batch.size", "100000")
Expand Down

0 comments on commit df4cb52

Please sign in to comment.