@@ -50,7 +50,7 @@ import org.apache.spark.sql.errors.QueryExecutionErrors.hiveTableWithAnsiInterva
5050import org .apache .spark .sql .execution .datasources .{DataSource , DataSourceUtils , FileFormat , HadoopFsRelation , LogicalRelation }
5151import org .apache .spark .sql .execution .datasources .delta .DeltaUtils
5252import org .apache .spark .sql .execution .datasources .v2 .FileDataSourceV2
53- import org .apache .spark .sql .internal .{HiveSerDe , SessionState , SQLConf }
53+ import org .apache .spark .sql .internal .{HiveSerDe , SessionState , SQLConf , StaticSQLConf }
5454import org .apache .spark .sql .types ._
5555import org .apache .spark .sql .util .PartitioningUtils
5656import org .apache .spark .util .{SerializableConfiguration , ThreadUtils }
@@ -1160,12 +1160,14 @@ object DDLUtils extends Logging {
11601160
11611161 def checkPrivilegeOfSpecifyTableLocation (
11621162 location : Option [_], state : SessionState ): Unit = {
1163- val catalog = state.catalog
1164- val user = catalog.getCurrentUser
1165- val authorizer = state.authorizer
1166- if (location.isDefined
1167- && ! authorizer.hasPrivilegeOfSpecifyTableLocation(user)) {
1168- throw QueryCompilationErrors .createExternalTableUnsupported(catalog.getCurrentUser)
1163+ if (state.conf.getConf(StaticSQLConf .AUTHORIZATION_ENABLED )) {
1164+ val catalog = state.catalog
1165+ val user = catalog.getCurrentUser
1166+ val authorizer = state.authorizer
1167+ if (location.isDefined
1168+ && ! authorizer.hasPrivilegeOfSpecifyTableLocation(user)) {
1169+ throw QueryCompilationErrors .createExternalTableUnsupported(catalog.getCurrentUser)
1170+ }
11691171 }
11701172 }
11711173
0 commit comments