spark.sql.hive.metastore.version
2.3.9
2.3.10
2.0.0
through 2.3.9
and 3.0.0
through 3.1.3
.
+ options are 2.0.0
through 2.3.10
and 3.0.0
through 3.1.3
.
builtin
-Phive
is
+ Use Hive 2.3.10, which is bundled with the Spark assembly when -Phive
is
enabled. When this option is chosen, spark.sql.hive.metastore.version
must be
- either 2.3.9
or not defined.
+ either 2.3.10
or not defined.
maven
2.0.0
through 2.3.9
and " +
+ "2.0.0
through 2.3.10
and " +
"3.0.0
through 3.1.3
.")
.version("1.4.0")
.stringConf
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala
index 1cd60c0d3fffe..130da78623b79 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala
@@ -1358,7 +1358,7 @@ private[hive] object HiveClientImpl extends Logging {
try {
Hive.getWithoutRegisterFns(hiveConf)
} catch {
- // SPARK-37069: not all Hive versions have the above method (e.g., Hive 2.3.9 has it but
+ // SPARK-37069: not all Hive versions have the above method (e.g., Hive 2.3.10 has it but
// 2.3.8 don't), therefore here we fallback when encountering the exception.
case _: NoSuchMethodError =>
Hive.get(hiveConf)
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/package.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/package.scala
index 564c87a0fca8e..d172af21a9170 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/package.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/package.scala
@@ -59,13 +59,12 @@ package object client {
"org.pentaho:pentaho-aggdesigner-algorithm"))
// Since HIVE-23980, calcite-core included in Hive package jar.
- case object v2_3 extends HiveVersion("2.3.9",
+ case object v2_3 extends HiveVersion("2.3.10",
exclusions = Seq("org.apache.calcite:calcite-core",
"org.apache.calcite:calcite-druid",
"org.apache.calcite.avatica:avatica",
- "com.fasterxml.jackson.core:*",
"org.apache.curator:*",
- "org.pentaho:pentaho-aggdesigner-algorithm",
+ "net.hydromatic:aggdesigner-algorithm",
"org.apache.hive:hive-vector-code-gen"))
// Since Hive 3.0, HookUtils uses org.apache.logging.log4j.util.Strings
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveExternalCatalogVersionsSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveExternalCatalogVersionsSuite.scala
index 726341ffdf9e3..55dd9bf6efd50 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveExternalCatalogVersionsSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveExternalCatalogVersionsSuite.scala
@@ -211,7 +211,7 @@ class HiveExternalCatalogVersionsSuite extends SparkSubmitTestUtils {
tryDownloadSpark(version, sparkTestingDir.getCanonicalPath)
}
- // Extract major.minor for testing Spark 3.1.x and 3.0.x with metastore 2.3.9 and Java 11.
+ // Extract major.minor for testing Spark 3.1.x and 3.0.x with metastore 2.3.10 and Java 11.
val hiveMetastoreVersion = """^\d+\.\d+""".r.findFirstIn(hiveVersion).get
val args = Seq(
"--name", "prepare testing tables",
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala
index c7aa412959097..e88a37f019b7d 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala
@@ -149,7 +149,7 @@ class HiveSparkSubmitSuite
"--conf", s"${EXECUTOR_MEMORY.key}=512m",
"--conf", "spark.ui.enabled=false",
"--conf", "spark.master.rest.enabled=false",
- "--conf", "spark.sql.hive.metastore.version=2.3.9",
+ "--conf", "spark.sql.hive.metastore.version=2.3.10",
"--conf", "spark.sql.hive.metastore.jars=maven",
"--driver-java-options", "-Dderby.system.durability=test",
unusedJar.toString)
@@ -370,7 +370,7 @@ class HiveSparkSubmitSuite
"--master", "local-cluster[2,1,512]",
"--conf", s"${EXECUTOR_MEMORY.key}=512m",
"--conf", s"${LEGACY_TIME_PARSER_POLICY.key}=LEGACY",
- "--conf", s"${HiveUtils.HIVE_METASTORE_VERSION.key}=2.3.9",
+ "--conf", s"${HiveUtils.HIVE_METASTORE_VERSION.key}=2.3.10",
"--conf", s"${HiveUtils.HIVE_METASTORE_JARS.key}=maven",
"--conf", s"spark.hadoop.javax.jdo.option.ConnectionURL=$metastore",
unusedJar.toString)
@@ -387,7 +387,7 @@ object SetMetastoreURLTest extends Logging {
val builder = SparkSession.builder()
.config(sparkConf)
.config(UI_ENABLED.key, "false")
- .config(HiveUtils.HIVE_METASTORE_VERSION.key, "2.3.9")
+ .config(HiveUtils.HIVE_METASTORE_VERSION.key, "2.3.10")
// The issue described in SPARK-16901 only appear when
// spark.sql.hive.metastore.jars is not set to builtin.
.config(HiveUtils.HIVE_METASTORE_JARS.key, "maven")
@@ -698,7 +698,7 @@ object SparkSQLConfTest extends Logging {
val filteredSettings = super.getAll.filterNot(e => isMetastoreSetting(e._1))
// Always add these two metastore settings at the beginning.
- (HiveUtils.HIVE_METASTORE_VERSION.key -> "2.3.9") +:
+ (HiveUtils.HIVE_METASTORE_VERSION.key -> "2.3.10") +:
(HiveUtils.HIVE_METASTORE_JARS.key -> "maven") +:
filteredSettings
}
@@ -726,7 +726,7 @@ object SPARK_9757 extends QueryTest {
val hiveWarehouseLocation = Utils.createTempDir()
val sparkContext = new SparkContext(
new SparkConf()
- .set(HiveUtils.HIVE_METASTORE_VERSION.key, "2.3.9")
+ .set(HiveUtils.HIVE_METASTORE_VERSION.key, "2.3.10")
.set(HiveUtils.HIVE_METASTORE_JARS.key, "maven")
.set(UI_ENABLED, false)
.set(WAREHOUSE_PATH.key, hiveWarehouseLocation.toString))
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala
index 397da6c18b50a..5e58959ca4f7d 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala
@@ -1627,10 +1627,8 @@ class HiveQuerySuite extends HiveComparisonTest with SQLTestUtils with BeforeAnd
test("SPARK-33084: Add jar support Ivy URI in SQL") {
val testData = TestHive.getHiveFile("data/files/sample.json").toURI
withTable("t") {
- // hive-catalog-core has some transitive dependencies which dont exist on maven central
- // and hence cannot be found in the test environment or are non-jar (.pom) which cause
- // failures in tests. Use transitive=false as it should be good enough to test the Ivy
- // support in Hive ADD JAR
+ // Use transitive=false as it should be good enough to test the Ivy support
+ // in Hive ADD JAR
sql(s"ADD JAR ivy://org.apache.hive.hcatalog:hive-hcatalog-core:$hiveVersion" +
"?transitive=false")
sql(