Skip to content

Commit 4226713

Browse files
authored
Fixed reg test for cross-region S3 calls (#995)
1 parent 42af733 commit 4226713

File tree

3 files changed

+7
-10
lines changed

3 files changed

+7
-10
lines changed

regtests/docker-compose.yml

+1
Original file line numberDiff line numberDiff line change
@@ -66,6 +66,7 @@ services:
6666
AWS_CROSS_REGION_TEST_ENABLED: $AWS_CROSS_REGION_TEST_ENABLED
6767
AWS_CROSS_REGION_BUCKET: $AWS_CROSS_REGION_BUCKET
6868
AWS_ROLE_FOR_CROSS_REGION_BUCKET: $AWS_ROLE_FOR_CROSS_REGION_BUCKET
69+
AWS_REGION_FOR_CROSS_REGION_TEST: $AWS_REGION_FOR_CROSS_REGION_TEST
6970
volumes:
7071
- ./output:/tmp/polaris-regtests/
7172
- ./credentials:/tmp/credentials/
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
{"defaults":{"default-base-location":"s3://sfc-role-stage-for-reg-test-do-not-modify-write-only/polaris_test/spark_sql_s3_cross_region_catalog/"},"overrides":{"prefix":"spark_sql_s3_cross_region_catalog"},"endpoints":["GET /v1/{prefix}/namespaces","GET /v1/{prefix}/namespaces/{namespace}","POST /v1/{prefix}/namespaces","POST /v1/{prefix}/namespaces/{namespace}/properties","DELETE /v1/{prefix}/namespaces/{namespace}","GET /v1/{prefix}/namespaces/{namespace}/tables","GET /v1/{prefix}/namespaces/{namespace}/tables/{table}","POST /v1/{prefix}/namespaces/{namespace}/tables","POST /v1/{prefix}/namespaces/{namespace}/tables/{table}","DELETE /v1/{prefix}/namespaces/{namespace}/tables/{table}","POST /v1/{prefix}/tables/rename","POST /v1/{prefix}/namespaces/{namespace}/register","POST /v1/{prefix}/namespaces/{namespace}/tables/{table}/metrics","GET /v1/{prefix}/namespaces/{namespace}/views","GET /v1/{prefix}/namespaces/{namespace}/views/{view}","POST /v1/{prefix}/namespaces/{namespace}/views","POST /v1/{prefix}/namespaces/{namespace}/views/{view}","DELETE /v1/{prefix}/namespaces/{namespace}/views/{view}","POST /v1/{prefix}/views/rename","POST /v1/{prefix}/transactions/commit"]}
21
Catalog created
32
spark-sql (default)> use polaris;
43
spark-sql ()> show namespaces;
@@ -7,23 +6,23 @@ spark-sql ()> create namespace db2;
76
spark-sql ()> show namespaces;
87
db1
98
db2
10-
spark-sql ()>
9+
spark-sql ()>
1110
> create namespace db1.schema1;
1211
spark-sql ()> show namespaces;
1312
db1
1413
db2
1514
spark-sql ()> show namespaces in db1;
1615
db1.schema1
17-
spark-sql ()>
16+
spark-sql ()>
1817
> create table db1.schema1.tbl1 (col1 int);
1918
spark-sql ()> show tables in db1;
2019
spark-sql ()> use db1.schema1;
21-
spark-sql (db1.schema1)>
20+
spark-sql (db1.schema1)>
2221
> insert into tbl1 values (123), (234);
2322
spark-sql (db1.schema1)> select * from tbl1;
2423
123
2524
234
26-
spark-sql (db1.schema1)>
25+
spark-sql (db1.schema1)>
2726
> drop table tbl1 purge;
2827
spark-sql (db1.schema1)> show tables;
2928
spark-sql (db1.schema1)> drop namespace db1.schema1;
@@ -32,4 +31,4 @@ spark-sql (db1.schema1)> show namespaces;
3231
db2
3332
spark-sql (db1.schema1)> drop namespace db2;
3433
spark-sql (db1.schema1)> show namespaces;
35-
spark-sql (db1.schema1)>
34+
spark-sql (db1.schema1)>

regtests/t_spark_sql/src/spark_sql_s3_cross_region.sh

100644100755
+1-4
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@ ROLE_ARN="${AWS_ROLE_FOR_CROSS_REGION_BUCKET}"
3030

3131
curl -i -X POST -H "Authorization: Bearer ${SPARK_BEARER_TOKEN}" -H 'Accept: application/json' -H 'Content-Type: application/json' \
3232
http://${POLARIS_HOST:-localhost}:8181/api/management/v1/catalogs \
33-
-d '{"name": "spark_sql_s3_cross_region_catalog", "id": 100, "type": "INTERNAL", "readOnly": false, "properties": {"default-base-location": "s3://${BUCKET}/polaris_test/spark_sql_s3_cross_region_catalog/"}, "storageConfigInfo": {"storageType": "S3", "allowedLocations": ["s3://${BUCKET}/polaris_test/"], "roleArn": "${ROLE_ARN}"}}' > /dev/stderr
33+
-d "{\"name\": \"spark_sql_s3_cross_region_catalog\", \"id\": 100, \"type\": \"INTERNAL\", \"readOnly\": false, \"properties\": {\"client.region\": \"${AWS_REGION_FOR_CROSS_REGION_TEST}\", \"default-base-location\": \"s3://${BUCKET}/polaris_test/spark_sql_s3_cross_region_catalog/\"}, \"storageConfigInfo\": {\"storageType\": \"S3\", \"allowedLocations\": [\"s3://${BUCKET}/polaris_test/\"], \"roleArn\": \"${ROLE_ARN}\", \"region\": \"${AWS_REGION_FOR_CROSS_REGION_TEST}\"}}" > /dev/stderr
3434

3535
# Add TABLE_WRITE_DATA to the catalog's catalog_admin role since by default it can only manage access and metadata
3636
curl -i -X PUT -H "Authorization: Bearer ${SPARK_BEARER_TOKEN}" -H 'Accept: application/json' -H 'Content-Type: application/json' \
@@ -42,9 +42,6 @@ curl -i -X PUT -H "Authorization: Bearer ${SPARK_BEARER_TOKEN}" -H 'Accept: appl
4242
http://${POLARIS_HOST:-localhost}:8181/api/management/v1/principal-roles/service_admin/catalog-roles/spark_sql_s3_cross_region_catalog \
4343
-d '{"name": "catalog_admin"}' > /dev/stderr
4444

45-
curl -H "Authorization: Bearer ${SPARK_BEARER_TOKEN}" -H 'Accept: application/json' -H 'Content-Type: application/json' \
46-
"http://${POLARIS_HOST:-localhost}:8181/api/catalog/v1/config?warehouse=spark_sql_s3_cross_region_catalog"
47-
echo
4845
echo "Catalog created"
4946
cat << EOF | ${SPARK_HOME}/bin/spark-sql -S --conf spark.sql.catalog.polaris.token="${SPARK_BEARER_TOKEN}" --conf spark.sql.catalog.polaris.warehouse=spark_sql_s3_cross_region_catalog
5047
use polaris;

0 commit comments

Comments
 (0)