Skip to content

Commit 4da10ee

Browse files
authored
Merge branch-25.08 into main (NVIDIA#13284)
Merge branch-25.08 into main Note: merge this PR with **Create a merge commit to merge**
2 parents e33d09e + 1bb07c2 commit 4da10ee

File tree

4 files changed

+36
-22
lines changed

4 files changed

+36
-22
lines changed

CHANGELOG.md

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
# Change log
2-
Generated on 2025-08-08
2+
Generated on 2025-08-09
33

44
## Release 25.08
55

@@ -37,6 +37,7 @@ Generated on 2025-08-08
3737
### Bugs Fixed
3838
|||
3939
|:---|:---|
40+
|[#13279](https://github.com/NVIDIA/spark-rapids/issues/13279)|[BUG] Integration test failures with Delta|
4041
|[#13265](https://github.com/NVIDIA/spark-rapids/issues/13265)|[BUG] test failed Expected error IllegalArgumentException/ArrayIndexOutOfBoundsException|
4142
|[#13254](https://github.com/NVIDIA/spark-rapids/issues/13254)|[BUG] delta_lake_time_travel_test.py::test_time_travel_on_non_existing_table failed Expected error 'AnalysisException' did not appear|
4243
|[#13049](https://github.com/NVIDIA/spark-rapids/issues/13049)|[BUG] hash_aggregate_test tests FAILED on [DATABRICKS]|
@@ -128,6 +129,8 @@ Generated on 2025-08-08
128129
### PRs
129130
|||
130131
|:---|:---|
132+
|[#13280](https://github.com/NVIDIA/spark-rapids/pull/13280)|Fix fallback test params for Delta MergeCommand, UpdateCommand, and DeleteCommand|
133+
|[#13258](https://github.com/NVIDIA/spark-rapids/pull/13258)|Update changelog for the v25.08 release [skip ci]|
131134
|[#13257](https://github.com/NVIDIA/spark-rapids/pull/13257)|Update dependency version JNI, private, hybrid to 25.08.0|
132135
|[#13123](https://github.com/NVIDIA/spark-rapids/pull/13123)|Enable MERGE, UPDATE, DELETE by default for Delta 3.3.0|
133136
|[#13267](https://github.com/NVIDIA/spark-rapids/pull/13267)|Use pytest.ExceptionInfo to create the same exception text as pytest raises|

integration_tests/src/main/python/delta_lake_delete_test.py

Lines changed: 11 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@
1919
from delta_lake_utils import *
2020
from marks import *
2121
from spark_session import is_before_spark_320, is_databricks_runtime, supports_delta_lake_deletion_vectors, \
22-
with_cpu_session, with_gpu_session, is_databricks143_or_later
22+
with_cpu_session, with_gpu_session, is_before_spark_353
2323

2424
delta_delete_enabled_conf = copy_and_update(delta_writes_enabled_conf,
2525
{"spark.rapids.sql.command.DeleteCommand": "true",
@@ -63,16 +63,19 @@ def checker(data_path, do_delete):
6363
delta_sql_delete_test(spark_tmp_path, use_cdf, dest_table_func, delete_sql, checker, enable_deletion_vectors,
6464
partition_columns)
6565

66+
fallback_test_params = [{"spark.rapids.sql.format.delta.write.enabled": "false"},
67+
{"spark.rapids.sql.format.parquet.enabled": "false"},
68+
{"spark.rapids.sql.format.parquet.write.enabled": "false"},
69+
{"spark.rapids.sql.command.DeleteCommand": "false"},
70+
]
71+
if is_before_spark_353():
72+
# DeleteCommand is disabled by default before Spark 3.5.3
73+
fallback_test_params.append(delta_writes_enabled_conf)
74+
6675
@allow_non_gpu("ExecutedCommandExec", *delta_meta_allow)
6776
@delta_lake
6877
@ignore_order
69-
@pytest.mark.parametrize("disable_conf",
70-
[{"spark.rapids.sql.format.delta.write.enabled": "false"},
71-
{"spark.rapids.sql.format.parquet.enabled": "false"},
72-
{"spark.rapids.sql.format.parquet.write.enabled": "false"},
73-
{"spark.rapids.sql.command.DeleteCommand": "false"},
74-
delta_writes_enabled_conf # Test disabled by default
75-
], ids=idfn)
78+
@pytest.mark.parametrize("disable_conf", fallback_test_params, ids=idfn)
7679
@pytest.mark.skipif(is_before_spark_320(), reason="Delta Lake writes are not supported before Spark 3.2.x")
7780
@pytest.mark.parametrize("enable_deletion_vectors", deletion_vector_values, ids=idfn)
7881
def test_delta_delete_disabled_fallback(spark_tmp_path, disable_conf, enable_deletion_vectors):

integration_tests/src/main/python/delta_lake_merge_test.py

Lines changed: 12 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -24,17 +24,22 @@
2424
{"spark.rapids.sql.command.MergeIntoCommand": "true",
2525
"spark.rapids.sql.command.MergeIntoCommandEdge": "true"})
2626

27+
fallback_test_params = [{"spark.rapids.sql.format.delta.write.enabled": "false"},
28+
{"spark.rapids.sql.format.parquet.enabled": "false"},
29+
{"spark.rapids.sql.format.parquet.write.enabled": "false"},
30+
{"spark.rapids.sql.command.MergeIntoCommand": "false"},
31+
]
32+
if is_before_spark_353():
33+
# MergeCommand is disabled by default before Spark 3.5.3.
34+
# In Spark 3.5.3 and later, MergeCommand is enabled by default, but may not run on GPU yet
35+
# because of https://github.com/NVIDIA/spark-rapids/issues/8042.
36+
# See https://github.com/NVIDIA/spark-rapids/issues/13021#issuecomment-3166724473 for details.
37+
fallback_test_params.append(delta_writes_enabled_conf)
2738

2839
@allow_non_gpu(delta_write_fallback_allow, *delta_meta_allow)
2940
@delta_lake
3041
@ignore_order
31-
@pytest.mark.parametrize("disable_conf",
32-
[{"spark.rapids.sql.format.delta.write.enabled": "false"},
33-
{"spark.rapids.sql.format.parquet.enabled": "false"},
34-
{"spark.rapids.sql.format.parquet.write.enabled": "false"},
35-
{"spark.rapids.sql.command.MergeIntoCommand": "false"},
36-
delta_writes_enabled_conf # Test disabled by default
37-
], ids=idfn)
42+
@pytest.mark.parametrize("disable_conf", fallback_test_params, ids=idfn)
3843
@pytest.mark.skipif(is_before_spark_320(), reason="Delta Lake writes are not supported before Spark 3.2.x")
3944
@pytest.mark.parametrize("enable_deletion_vectors", deletion_vector_values_with_350DB143_xfail_reasons(
4045
enabled_xfail_reason='https://github.com/NVIDIA/spark-rapids/issues/12042'), ids=idfn)

integration_tests/src/main/python/delta_lake_update_test.py

Lines changed: 9 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -82,15 +82,18 @@ def write_func(spark, path):
8282
assert_gpu_fallback_write(write_func, read_delta_path, data_path,
8383
"ExecutedCommandExec", delta_update_enabled_conf)
8484

85+
fallback_test_params = [{"spark.rapids.sql.format.delta.write.enabled": "false"},
86+
{"spark.rapids.sql.format.parquet.write.enabled": "false"},
87+
{"spark.rapids.sql.command.UpdateCommand": "false"},
88+
]
89+
if is_before_spark_353():
90+
# UpdateCommand is disabled by default before Spark 3.5.3
91+
fallback_test_params.append(delta_writes_enabled_conf)
92+
8593
@allow_non_gpu(delta_write_fallback_allow, *delta_meta_allow)
8694
@delta_lake
8795
@ignore_order
88-
@pytest.mark.parametrize("disable_conf",
89-
[{"spark.rapids.sql.format.delta.write.enabled": "false"},
90-
{"spark.rapids.sql.format.parquet.write.enabled": "false"},
91-
{"spark.rapids.sql.command.UpdateCommand": "false"},
92-
delta_writes_enabled_conf # Test disabled by default
93-
], ids=idfn)
96+
@pytest.mark.parametrize("disable_conf", fallback_test_params, ids=idfn)
9497
@pytest.mark.skipif(is_before_spark_320(), reason="Delta Lake writes are not supported before Spark 3.2.x")
9598
@pytest.mark.parametrize("enable_deletion_vector", deletion_vector_values_with_350DB143_xfail_reasons(
9699
enabled_xfail_reason='https://github.com/NVIDIA/spark-rapids/issues/12042'), ids=idfn)

0 commit comments

Comments
 (0)