Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -292,4 +292,6 @@ private[hive] trait HiveClient {
/** Used for testing only. Removes all metadata from this instance of Hive. */
def reset(): Unit

/** Returns the user name which is used as owner for Hive table. */
def userName: String
}
Original file line number Diff line number Diff line change
Expand Up @@ -222,7 +222,7 @@ private[hive] class HiveClientImpl(
hiveConf
}

private val userName = UserGroupInformation.getCurrentUser.getShortUserName
override val userName = UserGroupInformation.getCurrentUser.getShortUserName
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Can we change user name at runtime? If we can then it should be def instead of val.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'm not 100% sure, but we have been defining userName as val from Spark 2.2.0, starting from this commit 344f38b

Changing it to def is safer anyway (can handle both cases - whether user name is changed or not) so please let me know if we would like to make change.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Btw, that would be better to be handled via separate PR if necessary, to isolate two different things "adding test" and "fixing logic".

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

yea, we can investigate it in a separated pr


override def getConf(key: String, defaultValue: String): String = {
conf.get(key, defaultValue)
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.spark.sql.hive.client

import java.security.PrivilegedExceptionAction

import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.security.UserGroupInformation
import org.scalatest.{BeforeAndAfterAll, PrivateMethodTester}

import org.apache.spark.util.Utils

class HiveClientUserNameSuite(version: String) extends HiveVersionSuite(version) {

test("username of HiveClient - no UGI") {
// Assuming we're not faking System username
assert(getUserNameFromHiveClient === System.getProperty("user.name"))
}

test("username of HiveClient - UGI") {
val ugi = UserGroupInformation.createUserForTesting(
"[email protected]", Array.empty)
ugi.doAs(new PrivilegedExceptionAction[Unit]() {
override def run(): Unit = {
assert(getUserNameFromHiveClient === ugi.getShortUserName)
}
})
}

test("username of HiveClient - Proxy user") {
val ugi = UserGroupInformation.createUserForTesting(
"[email protected]", Array.empty)
val proxyUgi = UserGroupInformation.createProxyUserForTesting(
"[email protected]", ugi, Array.empty)
proxyUgi.doAs(new PrivilegedExceptionAction[Unit]() {
override def run(): Unit = {
assert(getUserNameFromHiveClient === proxyUgi.getShortUserName)
}
})
}

private def getUserNameFromHiveClient: String = {
val hadoopConf = new Configuration()
hadoopConf.set("hive.metastore.warehouse.dir", Utils.createTempDir().toURI().toString())
val client = buildClient(hadoopConf)
client.userName
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.spark.sql.hive.client

import scala.collection.immutable.IndexedSeq

import org.scalatest.Suite

class HiveClientUserNameSuites extends Suite with HiveClientVersions {
override def nestedSuites: IndexedSeq[Suite] = {
versions.map(new HiveClientUserNameSuite(_))
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -31,8 +31,7 @@ import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.types.{BooleanType, IntegerType, LongType, StructType}
import org.apache.spark.util.Utils

// TODO: Refactor this to `HivePartitionFilteringSuite`
class HiveClientSuite(version: String)
class HivePartitionFilteringSuite(version: String)
extends HiveVersionSuite(version) with BeforeAndAfterAll {

private val tryDirectSqlKey = HiveConf.ConfVars.METASTORE_TRY_DIRECT_SQL.varname
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,9 @@ import scala.collection.immutable.IndexedSeq

import org.scalatest.Suite

class HiveClientSuites extends Suite with HiveClientVersions {
class HivePartitionFilteringSuites extends Suite with HiveClientVersions {
override def nestedSuites: IndexedSeq[Suite] = {
// Hive 0.12 does not provide the partition filtering API we call
versions.filterNot(_ == "0.12").map(new HiveClientSuite(_))
versions.filterNot(_ == "0.12").map(new HivePartitionFilteringSuite(_))
}
}