Skip to content

Commit ec402eb

Browse files
committed
modify config default value, solve conflict.
1 parent 70fe118 commit ec402eb

File tree

3 files changed

+8
-6
lines changed

3 files changed

+8
-6
lines changed

spark-doris-connector/src/main/java/org/apache/doris/spark/cfg/ConfigurationOptions.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -94,10 +94,10 @@ public interface ConfigurationOptions {
9494
int SINK_MAX_BLOCKING_TIMES_DEFAULT = 1;
9595

9696
String DORIS_SINK_MAX_BLOCKING_INTERVAL_MS = "doris.sink.max.blocking.interval.ms";
97-
int SINK_MAX_BLOCKING_INTERVAL_MS_DEFAULT = 1000;
97+
int SINK_MAX_BLOCKING_INTERVAL_MS_DEFAULT = 300000;
9898

9999
String DORIS_SINK_BLOCKING_TRIGGER_KEYS = "doris.sink.block.trigger.keys";
100-
String SINK_BLOCKING_TRIGGER_KEYS_DEFAULT = "-235";
100+
String SINK_BLOCKING_TRIGGER_KEYS_DEFAULT = "";
101101

102102

103103
/**

spark-doris-connector/src/main/scala/org/apache/doris/spark/sql/Utils.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -182,7 +182,7 @@ private[spark] object Utils {
182182
}
183183

184184
def shouldBlock(exception: String): Boolean = {
185-
blockTriggerKeysArray.exists(exception.contains)
185+
blockTriggerKeysArray.nonEmpty && blockTriggerKeysArray.exists(exception.contains)
186186
}
187187

188188
val result = Try(f)

spark-doris-connector/src/main/scala/org/apache/doris/spark/writer/DorisWriter.scala

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -87,8 +87,9 @@ class DorisWriter(settings: SparkSettings) extends Serializable {
8787
*
8888
*/
8989
def flush(batch: Seq[util.List[Object]], dfColumns: Array[String]): Unit = {
90-
Utils.retry[util.List[Integer], Exception](maxRetryTimes, maxSinkBlocks, Duration.ofMillis(batchInterValMs.toLong), Duration.ofMillis(maxBlockInterValMs.toLong), blockTriggerKeysArray, logger) {
91-
dorisStreamLoader.loadV2(batch.toList.asJava, dfColumns, enable2PC)
90+
Utils.retry[util.List[Integer], Exception](maxRetryTimes, maxSinkBlocks, Duration.ofMillis(batchInterValMs.toLong),
91+
Duration.ofMillis(maxBlockInterValMs.toLong), blockTriggerKeysArray, logger) {
92+
dorisStreamLoader.loadV2(batch.asJava, dfColumns, enable2PC)
9293
} match {
9394
case Success(txnIds) => if (enable2PC) handleLoadSuccess(txnIds.asScala, preCommittedTxnAcc)
9495
case Failure(e) =>
@@ -127,7 +128,8 @@ class DorisWriter(settings: SparkSettings) extends Serializable {
127128
*
128129
*/
129130
def flush(batch: Seq[InternalRow], dfColumns: Array[String]): Unit = {
130-
Utils.retry[util.List[Integer], Exception](maxRetryTimes, Duration.ofMillis(batchInterValMs.toLong), logger) {
131+
Utils.retry[util.List[Integer], Exception](maxRetryTimes, maxSinkBlocks, Duration.ofMillis(batchInterValMs.toLong),
132+
Duration.ofMillis(maxBlockInterValMs.toLong), blockTriggerKeysArray, logger) {
131133
dorisStreamLoader.loadStream(convertToObjectList(batch, schema), dfColumns, enable2PC)
132134
} match {
133135
case Success(txnIds) => if (enable2PC) handleLoadSuccess(txnIds.asScala, preCommittedTxnAcc)

0 commit comments

Comments
 (0)