@@ -210,7 +210,9 @@ def test_sagemaker_pyspark_sse_s3(role, image_uri, sagemaker_session, region, sa
210210 assert len (output_contents ) != 0
211211
212212
213- def test_sagemaker_pyspark_sse_kms_s3 (role , image_uri , sagemaker_session , region , sagemaker_client , account_id ):
213+ def test_sagemaker_pyspark_sse_kms_s3 (
214+ role , image_uri , sagemaker_session , region , sagemaker_client , account_id , partition
215+ ):
214216 spark = PySparkProcessor (
215217 base_job_name = "sm-spark-py" ,
216218 image_uri = image_uri ,
@@ -253,7 +255,7 @@ def test_sagemaker_pyspark_sse_kms_s3(role, image_uri, sagemaker_session, region
253255 "Classification" : "core-site" ,
254256 "Properties" : {
255257 "fs.s3a.server-side-encryption-algorithm" : "SSE-KMS" ,
256- "fs.s3a.server-side-encryption.key" : f"arn:aws :kms:{ region } :{ account_id } :key/{ kms_key_id } " ,
258+ "fs.s3a.server-side-encryption.key" : f"arn:{ partition } :kms:{ region } :{ account_id } :key/{ kms_key_id } " ,
257259 },
258260 },
259261 )
@@ -270,7 +272,7 @@ def test_sagemaker_pyspark_sse_kms_s3(role, image_uri, sagemaker_session, region
270272 for s3_object in s3_objects :
271273 object_metadata = s3_client .get_object (Bucket = bucket , Key = s3_object ["Key" ])
272274 assert object_metadata ["ServerSideEncryption" ] == "aws:kms"
273- assert object_metadata ["SSEKMSKeyId" ] == f"arn:aws :kms:{ region } :{ account_id } :key/{ kms_key_id } "
275+ assert object_metadata ["SSEKMSKeyId" ] == f"arn:{ partition } :kms:{ region } :{ account_id } :key/{ kms_key_id } "
274276
275277
276278def test_sagemaker_scala_jar_multinode (role , image_uri , configuration , sagemaker_session , sagemaker_client ):
0 commit comments