diff --git a/pyiceberg/catalog/s3tables.py b/pyiceberg/catalog/s3tables.py index 0485bb3072..1408322ea7 100644 --- a/pyiceberg/catalog/s3tables.py +++ b/pyiceberg/catalog/s3tables.py @@ -141,7 +141,7 @@ def create_table( schema: Schema = self._convert_schema_if_needed(schema) # type: ignore # creating a new table with S3 Tables is a two step process. We first have to create an S3 Table with the - # S3 Tables API and then write the new metadata.json to the warehouseLocaiton associated with the newly + # S3 Tables API and then write the new metadata.json to the warehouseLocation associated with the newly # created S3 Table. try: self.s3tables.create_table( diff --git a/tests/catalog/test_s3tables.py b/tests/catalog/test_s3tables.py index 930b11ec9a..d9f48e363a 100644 --- a/tests/catalog/test_s3tables.py +++ b/tests/catalog/test_s3tables.py @@ -26,11 +26,15 @@ def table_bucket_arn() -> str: # in one of the supported regions. return os.environ["ARN"] +@pytest.fixture +def aws_region() -> str: + import os + return os.environ["AWS_REGION"] @pytest.fixture -def catalog(table_bucket_arn: str) -> S3TableCatalog: - properties = {"s3tables.table-bucket-arn": table_bucket_arn, "s3tables.region": "us-east-1", "s3.region": "us-east-1"} +def catalog(table_bucket_arn: str, aws_region: str) -> S3TableCatalog: + properties = {"s3tables.table-bucket-arn": table_bucket_arn, "s3tables.region": aws_region} return S3TableCatalog(name="test_s3tables_catalog", **properties)