diff --git a/extensions/spark/kyuubi-extension-spark-3-3/src/test/scala/org/apache/spark/sql/KyuubiSparkSQLExtensionTest.scala b/extensions/spark/kyuubi-extension-spark-3-3/src/test/scala/org/apache/spark/sql/KyuubiSparkSQLExtensionTest.scala index 1fa136e60b9..cc883bedbbd 100644 --- a/extensions/spark/kyuubi-extension-spark-3-3/src/test/scala/org/apache/spark/sql/KyuubiSparkSQLExtensionTest.scala +++ b/extensions/spark/kyuubi-extension-spark-3-3/src/test/scala/org/apache/spark/sql/KyuubiSparkSQLExtensionTest.scala @@ -86,7 +86,7 @@ trait KyuubiSparkSQLExtensionTest extends QueryTest .set("spark.hadoop.hive.exec.dynamic.partition.mode", "nonstrict") .set("spark.hadoop.hive.metastore.client.capability.check", "false") .set( - ConfVars.METASTORECONNECTURLKEY.varname, + "javax.jdo.option.ConnectionURL", s"jdbc:derby:;databaseName=$metastorePath;create=true") .set(StaticSQLConf.WAREHOUSE_PATH, warehousePath) .set("spark.ui.enabled", "false") diff --git a/extensions/spark/kyuubi-extension-spark-3-4/src/test/scala/org/apache/spark/sql/KyuubiSparkSQLExtensionTest.scala b/extensions/spark/kyuubi-extension-spark-3-4/src/test/scala/org/apache/spark/sql/KyuubiSparkSQLExtensionTest.scala index 996bef763a2..6dca7e2e36b 100644 --- a/extensions/spark/kyuubi-extension-spark-3-4/src/test/scala/org/apache/spark/sql/KyuubiSparkSQLExtensionTest.scala +++ b/extensions/spark/kyuubi-extension-spark-3-4/src/test/scala/org/apache/spark/sql/KyuubiSparkSQLExtensionTest.scala @@ -89,7 +89,7 @@ trait KyuubiSparkSQLExtensionTest extends QueryTest .set("spark.hadoop.hive.exec.dynamic.partition.mode", "nonstrict") .set("spark.hadoop.hive.metastore.client.capability.check", "false") .set( - ConfVars.METASTORECONNECTURLKEY.varname, + "javax.jdo.option.ConnectionURL", s"jdbc:derby:;databaseName=$metastorePath;create=true") .set(StaticSQLConf.WAREHOUSE_PATH, warehousePath) .set("spark.ui.enabled", "false") diff --git a/extensions/spark/kyuubi-extension-spark-3-5/src/test/scala/org/apache/spark/sql/KyuubiSparkSQLExtensionTest.scala b/extensions/spark/kyuubi-extension-spark-3-5/src/test/scala/org/apache/spark/sql/KyuubiSparkSQLExtensionTest.scala index 996bef763a2..6dca7e2e36b 100644 --- a/extensions/spark/kyuubi-extension-spark-3-5/src/test/scala/org/apache/spark/sql/KyuubiSparkSQLExtensionTest.scala +++ b/extensions/spark/kyuubi-extension-spark-3-5/src/test/scala/org/apache/spark/sql/KyuubiSparkSQLExtensionTest.scala @@ -89,7 +89,7 @@ trait KyuubiSparkSQLExtensionTest extends QueryTest .set("spark.hadoop.hive.exec.dynamic.partition.mode", "nonstrict") .set("spark.hadoop.hive.metastore.client.capability.check", "false") .set( - ConfVars.METASTORECONNECTURLKEY.varname, + "javax.jdo.option.ConnectionURL", s"jdbc:derby:;databaseName=$metastorePath;create=true") .set(StaticSQLConf.WAREHOUSE_PATH, warehousePath) .set("spark.ui.enabled", "false") diff --git a/extensions/spark/kyuubi-spark-connector-hive/src/main/scala/org/apache/kyuubi/spark/connector/hive/write/HiveBatchWrite.scala b/extensions/spark/kyuubi-spark-connector-hive/src/main/scala/org/apache/kyuubi/spark/connector/hive/write/HiveBatchWrite.scala index 2a30ac434c8..872513650d5 100644 --- a/extensions/spark/kyuubi-spark-connector-hive/src/main/scala/org/apache/kyuubi/spark/connector/hive/write/HiveBatchWrite.scala +++ b/extensions/spark/kyuubi-spark-connector-hive/src/main/scala/org/apache/kyuubi/spark/connector/hive/write/HiveBatchWrite.scala @@ -126,10 +126,10 @@ class HiveBatchWrite( if (overwrite && table.tableType == CatalogTableType.EXTERNAL) { val numWrittenParts = writtenParts.size - val maxDynamicPartitionsKey = HiveConf.ConfVars.DYNAMICPARTITIONMAXPARTS.varname + val maxDynamicPartitionsKey = "hive.exec.dynamic.partition.max.partitions" val maxDynamicPartitions = hadoopConf.getInt( maxDynamicPartitionsKey, - HiveConf.ConfVars.DYNAMICPARTITIONMAXPARTS.defaultIntVal) + HiveConf.getInt("hive.exec.dynamic.partition.max.partitions")) if (numWrittenParts > maxDynamicPartitions) { throw KyuubiHiveConnectorException( s"Number of dynamic partitions created is $numWrittenParts, " + diff --git a/extensions/spark/kyuubi-spark-lineage/src/test/scala/org/apache/spark/sql/SparkListenerExtensionTest.scala b/extensions/spark/kyuubi-spark-lineage/src/test/scala/org/apache/spark/sql/SparkListenerExtensionTest.scala index c2c5cc59cb4..fd62d613513 100644 --- a/extensions/spark/kyuubi-spark-lineage/src/test/scala/org/apache/spark/sql/SparkListenerExtensionTest.scala +++ b/extensions/spark/kyuubi-spark-lineage/src/test/scala/org/apache/spark/sql/SparkListenerExtensionTest.scala @@ -17,7 +17,6 @@ package org.apache.spark.sql -import org.apache.hadoop.hive.conf.HiveConf.ConfVars import org.apache.spark.SparkConf import org.apache.spark.sql.internal.StaticSQLConf import org.apache.spark.util.Utils @@ -35,7 +34,7 @@ trait SparkListenerExtensionTest { .master("local") .config("spark.ui.enabled", "false") .config( - ConfVars.METASTORECONNECTURLKEY.varname, + "javax.jdo.option.ConnectionURL", s"jdbc:derby:;databaseName=$metastorePath;create=true") .config("spark.sql.catalogImplementation", catalogImpl) .config(StaticSQLConf.WAREHOUSE_PATH.key, warehousePath) diff --git a/externals/kyuubi-hive-sql-engine/src/main/scala/org/apache/kyuubi/engine/hive/operation/HiveOperationManager.scala b/externals/kyuubi-hive-sql-engine/src/main/scala/org/apache/kyuubi/engine/hive/operation/HiveOperationManager.scala index faa7381ced7..84c5084cebb 100644 --- a/externals/kyuubi-hive-sql-engine/src/main/scala/org/apache/kyuubi/engine/hive/operation/HiveOperationManager.scala +++ b/externals/kyuubi-hive-sql-engine/src/main/scala/org/apache/kyuubi/engine/hive/operation/HiveOperationManager.scala @@ -19,7 +19,7 @@ package org.apache.kyuubi.engine.hive.operation import java.util.List -import org.apache.hadoop.hive.conf.HiveConf.ConfVars +import org.apache.hadoop.hive.conf.HiveConf import org.apache.kyuubi.config.KyuubiConf._ import org.apache.kyuubi.engine.hive.session.HiveSessionImpl @@ -162,6 +162,7 @@ class HiveOperationManager() extends OperationManager("HiveOperationManager") { override def getQueryId(operation: Operation): String = { val hiveOperation = operation.asInstanceOf[HiveOperation] val internalHiveOperation = hiveOperation.internalHiveOperation - internalHiveOperation.getParentSession.getHiveConf.getVar(ConfVars.HIVEQUERYID) + internalHiveOperation.getParentSession.getHiveConf.getVar( + HiveConf.getConfVars("hive.query.id")) } }