diff --git a/LICENSE b/LICENSE
index c094f20819..2879d70d5f 100644
--- a/LICENSE
+++ b/LICENSE
@@ -277,6 +277,7 @@ commons-collections:commons-collections
commons-io:commons-io
commons-logging:commons-logging
commons-net:commons-net
+dev.failsafe:failsafe
io.airlift:aircompressor
io.dropwizard.logback:logback-throttling-appender
io.dropwizard.metrics:metrics-annotation
diff --git a/gradle/libs.versions.toml b/gradle/libs.versions.toml
index 793958deee..5fd5fe49ad 100644
--- a/gradle/libs.versions.toml
+++ b/gradle/libs.versions.toml
@@ -19,7 +19,7 @@
[versions]
hadoop = "3.4.0"
-iceberg = "1.6.1"
+iceberg = "1.7.1"
dropwizard = "4.0.8"
slf4j = "2.0.13"
swagger = "1.6.14"
diff --git a/polaris-core/src/main/java/org/apache/polaris/core/storage/aws/PolarisS3FileIOClientFactory.java b/polaris-core/src/main/java/org/apache/polaris/core/storage/aws/PolarisS3FileIOClientFactory.java
deleted file mode 100644
index 4434741bab..0000000000
--- a/polaris-core/src/main/java/org/apache/polaris/core/storage/aws/PolarisS3FileIOClientFactory.java
+++ /dev/null
@@ -1,68 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.polaris.core.storage.aws;
-
-import java.util.Map;
-import org.apache.iceberg.aws.AwsClientProperties;
-import org.apache.iceberg.aws.HttpClientProperties;
-import org.apache.iceberg.aws.s3.S3FileIOAwsClientFactory;
-import org.apache.iceberg.aws.s3.S3FileIOProperties;
-import software.amazon.awssdk.services.s3.S3Client;
-
-/**
- * A S3FileIOAwsClientFactory that will be used by the S3FileIO to initialize S3 client. The
- * difference of this factory and DefaultS3FileIOAwsClientFactory is that this one enables cross
- * region access. The S3FileIO is not supporting cross region access due to the issue described here
- */
-public class PolarisS3FileIOClientFactory implements S3FileIOAwsClientFactory {
- private S3FileIOProperties s3FileIOProperties;
- private HttpClientProperties httpClientProperties;
- private AwsClientProperties awsClientProperties;
-
- PolarisS3FileIOClientFactory() {
- this.s3FileIOProperties = new S3FileIOProperties();
- this.httpClientProperties = new HttpClientProperties();
- this.awsClientProperties = new AwsClientProperties();
- }
-
- @Override
- public void initialize(Map properties) {
- this.s3FileIOProperties = new S3FileIOProperties(properties);
- this.awsClientProperties = new AwsClientProperties(properties);
- this.httpClientProperties = new HttpClientProperties(properties);
- }
-
- @Override
- public S3Client s3() {
- return S3Client.builder()
- .applyMutation(awsClientProperties::applyClientRegionConfiguration)
- .applyMutation(httpClientProperties::applyHttpClientConfigurations)
- .applyMutation(s3FileIOProperties::applyEndpointConfigurations)
- .applyMutation(s3FileIOProperties::applyServiceConfigurations)
- .applyMutation(
- s3ClientBuilder ->
- s3FileIOProperties.applyCredentialConfigurations(
- awsClientProperties, s3ClientBuilder))
- .applyMutation(s3FileIOProperties::applySignerConfiguration)
- .applyMutation(s3FileIOProperties::applyS3AccessGrantsConfigurations)
- .applyMutation(s3ClientBuilder -> s3ClientBuilder.crossRegionAccessEnabled(true))
- .build();
- }
-}
diff --git a/polaris-service/src/main/java/org/apache/polaris/service/catalog/BasePolarisCatalog.java b/polaris-service/src/main/java/org/apache/polaris/service/catalog/BasePolarisCatalog.java
index 42009d3347..89ea700a22 100644
--- a/polaris-service/src/main/java/org/apache/polaris/service/catalog/BasePolarisCatalog.java
+++ b/polaris-service/src/main/java/org/apache/polaris/service/catalog/BasePolarisCatalog.java
@@ -47,7 +47,6 @@
import org.apache.iceberg.TableMetadata;
import org.apache.iceberg.TableMetadataParser;
import org.apache.iceberg.TableOperations;
-import org.apache.iceberg.aws.s3.S3FileIOProperties;
import org.apache.iceberg.catalog.Namespace;
import org.apache.iceberg.catalog.SupportsNamespaces;
import org.apache.iceberg.catalog.TableIdentifier;
@@ -99,7 +98,6 @@
import org.apache.polaris.core.storage.PolarisStorageConfigurationInfo;
import org.apache.polaris.core.storage.PolarisStorageIntegration;
import org.apache.polaris.core.storage.StorageLocation;
-import org.apache.polaris.core.storage.aws.PolarisS3FileIOClientFactory;
import org.apache.polaris.service.catalog.io.FileIOFactory;
import org.apache.polaris.service.exception.IcebergExceptionMapper;
import org.apache.polaris.service.task.TaskExecutor;
@@ -2055,8 +2053,6 @@ private List listTableLike(PolarisEntitySubType subType, Namesp
*/
private FileIO loadFileIO(String ioImpl, Map properties) {
Map propertiesWithS3CustomizedClientFactory = new HashMap<>(properties);
- propertiesWithS3CustomizedClientFactory.put(
- S3FileIOProperties.CLIENT_FACTORY, PolarisS3FileIOClientFactory.class.getName());
return fileIOFactory.loadFileIO(ioImpl, propertiesWithS3CustomizedClientFactory);
}
diff --git a/polaris-service/src/main/java/org/apache/polaris/service/catalog/IcebergCatalogAdapter.java b/polaris-service/src/main/java/org/apache/polaris/service/catalog/IcebergCatalogAdapter.java
index 94ea65243c..8e29cdb250 100644
--- a/polaris-service/src/main/java/org/apache/polaris/service/catalog/IcebergCatalogAdapter.java
+++ b/polaris-service/src/main/java/org/apache/polaris/service/catalog/IcebergCatalogAdapter.java
@@ -21,7 +21,9 @@
import static org.apache.polaris.service.catalog.AccessDelegationMode.VENDED_CREDENTIALS;
import com.google.common.base.Preconditions;
+import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.ImmutableSet;
import jakarta.ws.rs.core.Response;
import jakarta.ws.rs.core.SecurityContext;
import java.net.URLEncoder;
@@ -29,13 +31,16 @@
import java.util.EnumSet;
import java.util.Map;
import java.util.Optional;
+import java.util.Set;
import org.apache.iceberg.catalog.Catalog;
import org.apache.iceberg.catalog.Namespace;
import org.apache.iceberg.catalog.TableIdentifier;
import org.apache.iceberg.exceptions.BadRequestException;
import org.apache.iceberg.exceptions.NotAuthorizedException;
import org.apache.iceberg.exceptions.NotFoundException;
+import org.apache.iceberg.rest.Endpoint;
import org.apache.iceberg.rest.RESTUtil;
+import org.apache.iceberg.rest.ResourcePaths;
import org.apache.iceberg.rest.requests.CommitTransactionRequest;
import org.apache.iceberg.rest.requests.CreateNamespaceRequest;
import org.apache.iceberg.rest.requests.CreateTableRequest;
@@ -71,6 +76,38 @@
public class IcebergCatalogAdapter
implements IcebergRestCatalogApiService, IcebergRestConfigurationApiService {
+ private static final Set DEFAULT_ENDPOINTS =
+ ImmutableSet.builder()
+ .add(Endpoint.V1_LIST_NAMESPACES)
+ .add(Endpoint.V1_LOAD_NAMESPACE)
+ .add(Endpoint.V1_CREATE_NAMESPACE)
+ .add(Endpoint.V1_UPDATE_NAMESPACE)
+ .add(Endpoint.V1_DELETE_NAMESPACE)
+ .add(Endpoint.V1_LIST_TABLES)
+ .add(Endpoint.V1_LOAD_TABLE)
+ .add(Endpoint.V1_CREATE_TABLE)
+ .add(Endpoint.V1_UPDATE_TABLE)
+ .add(Endpoint.V1_DELETE_TABLE)
+ .add(Endpoint.V1_RENAME_TABLE)
+ .add(Endpoint.V1_REGISTER_TABLE)
+ .add(Endpoint.V1_REPORT_METRICS)
+ .build();
+
+ private static final Set VIEW_ENDPOINTS =
+ ImmutableSet.builder()
+ .add(Endpoint.V1_LIST_VIEWS)
+ .add(Endpoint.V1_LOAD_VIEW)
+ .add(Endpoint.V1_CREATE_VIEW)
+ .add(Endpoint.V1_UPDATE_VIEW)
+ .add(Endpoint.V1_DELETE_VIEW)
+ .add(Endpoint.V1_RENAME_VIEW)
+ .build();
+
+ private static final Set COMMIT_ENDPOINT =
+ ImmutableSet.builder()
+ .add(Endpoint.create("POST", ResourcePaths.V1_TRANSACTIONS_COMMIT))
+ .build();
+
private final CallContextCatalogFactory catalogFactory;
private final MetaStoreManagerFactory metaStoreManagerFactory;
private final RealmEntityManagerFactory entityManagerFactory;
@@ -466,6 +503,12 @@ public Response getConfig(String warehouse, SecurityContext securityContext) {
ConfigResponse.builder()
.withDefaults(properties) // catalog properties are defaults
.withOverrides(ImmutableMap.of("prefix", warehouse))
+ .withEndpoints(
+ ImmutableList.builder()
+ .addAll(DEFAULT_ENDPOINTS)
+ .addAll(VIEW_ENDPOINTS)
+ .addAll(COMMIT_ENDPOINT)
+ .build())
.build())
.build();
}
diff --git a/regtests/setup.sh b/regtests/setup.sh
index 7c9a4ae359..5b03eab03f 100755
--- a/regtests/setup.sh
+++ b/regtests/setup.sh
@@ -31,7 +31,7 @@ if [ -z "${SPARK_HOME}" ]; then
fi
SPARK_CONF="${SPARK_HOME}/conf/spark-defaults.conf"
DERBY_HOME="/tmp/derby"
-ICEBERG_VERSION="1.6.1"
+ICEBERG_VERSION="1.7.1"
export PYTHONPATH="${SPARK_HOME}/python/:${SPARK_HOME}/python/lib/py4j-0.10.9.7-src.zip:$PYTHONPATH"
# Ensure binaries are downloaded locally
diff --git a/regtests/t_pyspark/src/iceberg_spark.py b/regtests/t_pyspark/src/iceberg_spark.py
index 579c09b27a..db907ed2b8 100644
--- a/regtests/t_pyspark/src/iceberg_spark.py
+++ b/regtests/t_pyspark/src/iceberg_spark.py
@@ -72,7 +72,7 @@ def __enter__(self):
"""Initial method for Iceberg Spark session. Creates a Spark session with specified configs.
"""
packages = [
- "org.apache.iceberg:iceberg-spark-runtime-3.5_2.12:1.6.1",
+ "org.apache.iceberg:iceberg-spark-runtime-3.5_2.12:1.7.1",
"org.apache.hadoop:hadoop-aws:3.4.0",
"software.amazon.awssdk:bundle:2.23.19",
"software.amazon.awssdk:url-connection-client:2.23.19",
diff --git a/regtests/t_spark_sql/ref/spark_sql_azure_blob.sh.ref b/regtests/t_spark_sql/ref/spark_sql_azure_blob.sh.ref
index 5c18f802cd..24150b2535 100755
--- a/regtests/t_spark_sql/ref/spark_sql_azure_blob.sh.ref
+++ b/regtests/t_spark_sql/ref/spark_sql_azure_blob.sh.ref
@@ -1,4 +1,4 @@
-{"defaults":{"default-base-location":"abfss://polaris-container@polarisadls.blob.core.windows.net/polaris-test/spark_sql_blob_catalog/"},"overrides":{"prefix":"spark_sql_azure_blob_catalog"}}
+{"defaults":{"default-base-location":"abfss://polaris-container@polarisadls.blob.core.windows.net/polaris-test/spark_sql_blob_catalog/"},"overrides":{"prefix":"spark_sql_azure_blob_catalog"},"endpoints":["GET /v1/{prefix}/namespaces","GET /v1/{prefix}/namespaces/{namespace}","POST /v1/{prefix}/namespaces","POST /v1/{prefix}/namespaces/{namespace}/properties","DELETE /v1/{prefix}/namespaces/{namespace}","GET /v1/{prefix}/namespaces/{namespace}/tables","GET /v1/{prefix}/namespaces/{namespace}/tables/{table}","POST /v1/{prefix}/namespaces/{namespace}/tables","POST /v1/{prefix}/namespaces/{namespace}/tables/{table}","DELETE /v1/{prefix}/namespaces/{namespace}/tables/{table}","POST /v1/{prefix}/tables/rename","POST /v1/{prefix}/namespaces/{namespace}/register","POST /v1/{prefix}/namespaces/{namespace}/tables/{table}/metrics","GET /v1/{prefix}/namespaces/{namespace}/views","GET /v1/{prefix}/namespaces/{namespace}/views/{view}","POST /v1/{prefix}/namespaces/{namespace}/views","POST /v1/{prefix}/namespaces/{namespace}/views/{view}","DELETE /v1/{prefix}/namespaces/{namespace}/views/{view}","POST /v1/{prefix}/views/rename","POST /v1/{prefix}/transactions/commit"]}
Catalog created
spark-sql (default)> use polaris;
spark-sql ()> show namespaces;
diff --git a/regtests/t_spark_sql/ref/spark_sql_azure_dfs.sh.ref b/regtests/t_spark_sql/ref/spark_sql_azure_dfs.sh.ref
index 422389565d..180f3e6d9c 100755
--- a/regtests/t_spark_sql/ref/spark_sql_azure_dfs.sh.ref
+++ b/regtests/t_spark_sql/ref/spark_sql_azure_dfs.sh.ref
@@ -1,4 +1,4 @@
-{"defaults":{"default-base-location":"abfss://polaris-container@polarisadls.dfs.core.windows.net/polaris-test/spark_sql_dfs_catalog/"},"overrides":{"prefix":"spark_sql_azure_dfs_catalog"}}
+{"defaults":{"default-base-location":"abfss://polaris-container@polarisadls.dfs.core.windows.net/polaris-test/spark_sql_dfs_catalog/"},"overrides":{"prefix":"spark_sql_azure_dfs_catalog"},"endpoints":["GET /v1/{prefix}/namespaces","GET /v1/{prefix}/namespaces/{namespace}","POST /v1/{prefix}/namespaces","POST /v1/{prefix}/namespaces/{namespace}/properties","DELETE /v1/{prefix}/namespaces/{namespace}","GET /v1/{prefix}/namespaces/{namespace}/tables","GET /v1/{prefix}/namespaces/{namespace}/tables/{table}","POST /v1/{prefix}/namespaces/{namespace}/tables","POST /v1/{prefix}/namespaces/{namespace}/tables/{table}","DELETE /v1/{prefix}/namespaces/{namespace}/tables/{table}","POST /v1/{prefix}/tables/rename","POST /v1/{prefix}/namespaces/{namespace}/register","POST /v1/{prefix}/namespaces/{namespace}/tables/{table}/metrics","GET /v1/{prefix}/namespaces/{namespace}/views","GET /v1/{prefix}/namespaces/{namespace}/views/{view}","POST /v1/{prefix}/namespaces/{namespace}/views","POST /v1/{prefix}/namespaces/{namespace}/views/{view}","DELETE /v1/{prefix}/namespaces/{namespace}/views/{view}","POST /v1/{prefix}/views/rename","POST /v1/{prefix}/transactions/commit"]}
Catalog created
spark-sql (default)> use polaris;
spark-sql ()> show namespaces;
diff --git a/regtests/t_spark_sql/ref/spark_sql_basic.sh.ref b/regtests/t_spark_sql/ref/spark_sql_basic.sh.ref
index 1ab8f91896..a23d1d941e 100755
--- a/regtests/t_spark_sql/ref/spark_sql_basic.sh.ref
+++ b/regtests/t_spark_sql/ref/spark_sql_basic.sh.ref
@@ -1,4 +1,4 @@
-{"defaults":{"default-base-location":"file:///tmp/spark_sql_s3_catalog"},"overrides":{"prefix":"spark_sql_basic_catalog"}}
+{"defaults":{"default-base-location":"file:///tmp/spark_sql_s3_catalog"},"overrides":{"prefix":"spark_sql_basic_catalog"},"endpoints":["GET /v1/{prefix}/namespaces","GET /v1/{prefix}/namespaces/{namespace}","POST /v1/{prefix}/namespaces","POST /v1/{prefix}/namespaces/{namespace}/properties","DELETE /v1/{prefix}/namespaces/{namespace}","GET /v1/{prefix}/namespaces/{namespace}/tables","GET /v1/{prefix}/namespaces/{namespace}/tables/{table}","POST /v1/{prefix}/namespaces/{namespace}/tables","POST /v1/{prefix}/namespaces/{namespace}/tables/{table}","DELETE /v1/{prefix}/namespaces/{namespace}/tables/{table}","POST /v1/{prefix}/tables/rename","POST /v1/{prefix}/namespaces/{namespace}/register","POST /v1/{prefix}/namespaces/{namespace}/tables/{table}/metrics","GET /v1/{prefix}/namespaces/{namespace}/views","GET /v1/{prefix}/namespaces/{namespace}/views/{view}","POST /v1/{prefix}/namespaces/{namespace}/views","POST /v1/{prefix}/namespaces/{namespace}/views/{view}","DELETE /v1/{prefix}/namespaces/{namespace}/views/{view}","POST /v1/{prefix}/views/rename","POST /v1/{prefix}/transactions/commit"]}
Catalog created
spark-sql (default)> use polaris;
spark-sql ()> show namespaces;
diff --git a/regtests/t_spark_sql/ref/spark_sql_gcp.sh.ref b/regtests/t_spark_sql/ref/spark_sql_gcp.sh.ref
index f083b9a0af..67c56757bb 100755
--- a/regtests/t_spark_sql/ref/spark_sql_gcp.sh.ref
+++ b/regtests/t_spark_sql/ref/spark_sql_gcp.sh.ref
@@ -1,4 +1,4 @@
-{"defaults":{"default-base-location":"gs://polaris-test1/polaris_test/spark_sql_gcp_catalog/"},"overrides":{"prefix":"spark_sql_gcp_catalog"}}
+{"defaults":{"default-base-location":"gs://polaris-test1/polaris_test/spark_sql_gcp_catalog/"},"overrides":{"prefix":"spark_sql_gcp_catalog"},"endpoints":["GET /v1/{prefix}/namespaces","GET /v1/{prefix}/namespaces/{namespace}","POST /v1/{prefix}/namespaces","POST /v1/{prefix}/namespaces/{namespace}/properties","DELETE /v1/{prefix}/namespaces/{namespace}","GET /v1/{prefix}/namespaces/{namespace}/tables","GET /v1/{prefix}/namespaces/{namespace}/tables/{table}","POST /v1/{prefix}/namespaces/{namespace}/tables","POST /v1/{prefix}/namespaces/{namespace}/tables/{table}","DELETE /v1/{prefix}/namespaces/{namespace}/tables/{table}","POST /v1/{prefix}/tables/rename","POST /v1/{prefix}/namespaces/{namespace}/register","POST /v1/{prefix}/namespaces/{namespace}/tables/{table}/metrics","GET /v1/{prefix}/namespaces/{namespace}/views","GET /v1/{prefix}/namespaces/{namespace}/views/{view}","POST /v1/{prefix}/namespaces/{namespace}/views","POST /v1/{prefix}/namespaces/{namespace}/views/{view}","DELETE /v1/{prefix}/namespaces/{namespace}/views/{view}","POST /v1/{prefix}/views/rename","POST /v1/{prefix}/transactions/commit"]}
Catalog created
spark-sql (default)> use polaris;
spark-sql ()> show namespaces;
diff --git a/regtests/t_spark_sql/ref/spark_sql_s3.sh.ref b/regtests/t_spark_sql/ref/spark_sql_s3.sh.ref
index 885663c151..a25c745437 100755
--- a/regtests/t_spark_sql/ref/spark_sql_s3.sh.ref
+++ b/regtests/t_spark_sql/ref/spark_sql_s3.sh.ref
@@ -1,4 +1,4 @@
-{"defaults":{"default-base-location":"s3://datalake-storage-team/polaris_test/spark_sql_s3_catalog"},"overrides":{"prefix":"spark_sql_s3_catalog"}}
+{"defaults":{"default-base-location":"s3://datalake-storage-team/polaris_test/spark_sql_s3_catalog"},"overrides":{"prefix":"spark_sql_s3_catalog"},"endpoints":["GET /v1/{prefix}/namespaces","GET /v1/{prefix}/namespaces/{namespace}","POST /v1/{prefix}/namespaces","POST /v1/{prefix}/namespaces/{namespace}/properties","DELETE /v1/{prefix}/namespaces/{namespace}","GET /v1/{prefix}/namespaces/{namespace}/tables","GET /v1/{prefix}/namespaces/{namespace}/tables/{table}","POST /v1/{prefix}/namespaces/{namespace}/tables","POST /v1/{prefix}/namespaces/{namespace}/tables/{table}","DELETE /v1/{prefix}/namespaces/{namespace}/tables/{table}","POST /v1/{prefix}/tables/rename","POST /v1/{prefix}/namespaces/{namespace}/register","POST /v1/{prefix}/namespaces/{namespace}/tables/{table}/metrics","GET /v1/{prefix}/namespaces/{namespace}/views","GET /v1/{prefix}/namespaces/{namespace}/views/{view}","POST /v1/{prefix}/namespaces/{namespace}/views","POST /v1/{prefix}/namespaces/{namespace}/views/{view}","DELETE /v1/{prefix}/namespaces/{namespace}/views/{view}","POST /v1/{prefix}/views/rename","POST /v1/{prefix}/transactions/commit"]}
Catalog created
spark-sql (default)> use polaris;
spark-sql ()> show namespaces;
diff --git a/regtests/t_spark_sql/ref/spark_sql_s3_cross_region.sh.ref b/regtests/t_spark_sql/ref/spark_sql_s3_cross_region.sh.ref
index 957214cc17..feef8667c8 100644
--- a/regtests/t_spark_sql/ref/spark_sql_s3_cross_region.sh.ref
+++ b/regtests/t_spark_sql/ref/spark_sql_s3_cross_region.sh.ref
@@ -1,4 +1,4 @@
-{"defaults":{"default-base-location":"s3://sfc-role-stage-for-reg-test-do-not-modify-write-only/polaris_test/spark_sql_s3_cross_region_catalog/"},"overrides":{"prefix":"spark_sql_s3_cross_region_catalog"}}
+{"defaults":{"default-base-location":"s3://sfc-role-stage-for-reg-test-do-not-modify-write-only/polaris_test/spark_sql_s3_cross_region_catalog/"},"overrides":{"prefix":"spark_sql_s3_cross_region_catalog"},"endpoints":["GET /v1/{prefix}/namespaces","GET /v1/{prefix}/namespaces/{namespace}","POST /v1/{prefix}/namespaces","POST /v1/{prefix}/namespaces/{namespace}/properties","DELETE /v1/{prefix}/namespaces/{namespace}","GET /v1/{prefix}/namespaces/{namespace}/tables","GET /v1/{prefix}/namespaces/{namespace}/tables/{table}","POST /v1/{prefix}/namespaces/{namespace}/tables","POST /v1/{prefix}/namespaces/{namespace}/tables/{table}","DELETE /v1/{prefix}/namespaces/{namespace}/tables/{table}","POST /v1/{prefix}/tables/rename","POST /v1/{prefix}/namespaces/{namespace}/register","POST /v1/{prefix}/namespaces/{namespace}/tables/{table}/metrics","GET /v1/{prefix}/namespaces/{namespace}/views","GET /v1/{prefix}/namespaces/{namespace}/views/{view}","POST /v1/{prefix}/namespaces/{namespace}/views","POST /v1/{prefix}/namespaces/{namespace}/views/{view}","DELETE /v1/{prefix}/namespaces/{namespace}/views/{view}","POST /v1/{prefix}/views/rename","POST /v1/{prefix}/transactions/commit"]}
Catalog created
spark-sql (default)> use polaris;
spark-sql ()> show namespaces;
diff --git a/regtests/t_spark_sql/ref/spark_sql_views.sh.ref b/regtests/t_spark_sql/ref/spark_sql_views.sh.ref
index 44e64f2c29..8e4bff2702 100755
--- a/regtests/t_spark_sql/ref/spark_sql_views.sh.ref
+++ b/regtests/t_spark_sql/ref/spark_sql_views.sh.ref
@@ -1,4 +1,4 @@
-{"defaults":{"default-base-location":"file:///tmp/spark_sql_s3_catalog"},"overrides":{"prefix":"spark_sql_views_catalog"}}
+{"defaults":{"default-base-location":"file:///tmp/spark_sql_s3_catalog"},"overrides":{"prefix":"spark_sql_views_catalog"},"endpoints":["GET /v1/{prefix}/namespaces","GET /v1/{prefix}/namespaces/{namespace}","POST /v1/{prefix}/namespaces","POST /v1/{prefix}/namespaces/{namespace}/properties","DELETE /v1/{prefix}/namespaces/{namespace}","GET /v1/{prefix}/namespaces/{namespace}/tables","GET /v1/{prefix}/namespaces/{namespace}/tables/{table}","POST /v1/{prefix}/namespaces/{namespace}/tables","POST /v1/{prefix}/namespaces/{namespace}/tables/{table}","DELETE /v1/{prefix}/namespaces/{namespace}/tables/{table}","POST /v1/{prefix}/tables/rename","POST /v1/{prefix}/namespaces/{namespace}/register","POST /v1/{prefix}/namespaces/{namespace}/tables/{table}/metrics","GET /v1/{prefix}/namespaces/{namespace}/views","GET /v1/{prefix}/namespaces/{namespace}/views/{view}","POST /v1/{prefix}/namespaces/{namespace}/views","POST /v1/{prefix}/namespaces/{namespace}/views/{view}","DELETE /v1/{prefix}/namespaces/{namespace}/views/{view}","POST /v1/{prefix}/views/rename","POST /v1/{prefix}/transactions/commit"]}
Catalog created
spark-sql (default)> use polaris;
spark-sql ()> show namespaces;