From 88bbda35f8e5d49925e4ddd29c41c2e64624bf40 Mon Sep 17 00:00:00 2001 From: Amanda Liu Date: Tue, 24 Jun 2025 21:24:01 -0700 Subject: [PATCH] arrowMaxBytesPerBatch to 64mb --- .../src/main/scala/org/apache/spark/sql/internal/SQLConf.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala index a322ec8a7e215..ef6e3984ea39d 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala @@ -3648,7 +3648,7 @@ object SQLConf { errorMsg = "The value of " + "spark.sql.execution.arrow.maxBytesPerBatch should be greater " + "than zero and less than INT_MAX.") - .createWithDefaultString("256MB") + .createWithDefaultString("64MB") val ARROW_TRANSFORM_WITH_STATE_IN_PYSPARK_MAX_STATE_RECORDS_PER_BATCH = buildConf("spark.sql.execution.arrow.transformWithStateInPySpark.maxStateRecordsPerBatch")