Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

support spark 3.5 #552

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 4 additions & 4 deletions .github/workflows/ClusterTest.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,17 +13,17 @@ jobs:
strategy:
matrix:
scala_version: [ '2.12.11' ]
spark_version: [ '3.4.0' ]
spark_version: [ '3.5.0' ]
use_copy_unload: [ 'true' ]
cloud_provider: [ 'gcp' ]
env:
SNOWFLAKE_TEST_CONFIG_SECRET: ${{ secrets.SNOWFLAKE_TEST_CONFIG_SECRET }}
TEST_SPARK_VERSION: '3.4'
DOCKER_IMAGE_TAG: 'snowflakedb/spark-base:3.4.0'
TEST_SPARK_VERSION: '3.5'
DOCKER_IMAGE_TAG: 'snowflakedb/spark-base:3.5.0'
TEST_SCALA_VERSION: '2.12'
TEST_COMPILE_SCALA_VERSION: '2.12.11'
TEST_SPARK_CONNECTOR_VERSION: '2.15.0'
TEST_JDBC_VERSION: '3.13.30'
TEST_JDBC_VERSION: '3.15.0'

steps:
- uses: actions/checkout@v2
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/IntegrationTest_2.12.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ jobs:
strategy:
matrix:
scala_version: [ '2.12.11' ]
spark_version: [ '3.4.0' ]
spark_version: [ '3.5.0' ]
use_copy_unload: [ 'true', 'false' ]
cloud_provider: [ 'aws', 'azure' ]
# run_query_in_async can be removed after async mode is stable
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/IntegrationTest_2.13.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ jobs:
strategy:
matrix:
scala_version: [ '2.13.9' ]
spark_version: [ '3.4.0' ]
spark_version: [ '3.5.0' ]
use_copy_unload: [ 'true', 'false' ]
cloud_provider: [ 'aws', 'azure' ]
# run_query_in_async can be removed after async mode is stable
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/IntegrationTest_gcp_2.12.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ jobs:
strategy:
matrix:
scala_version: [ '2.12.11' ]
spark_version: [ '3.4.0' ]
spark_version: [ '3.5.0' ]
use_copy_unload: [ 'false' ]
cloud_provider: [ 'gcp' ]
# run_query_in_async can be removed after async mode is stable
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/IntegrationTest_gcp_2.13.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ jobs:
strategy:
matrix:
scala_version: [ '2.13.9' ]
spark_version: [ '3.4.0' ]
spark_version: [ '3.5.0' ]
use_copy_unload: [ 'false' ]
cloud_provider: [ 'gcp' ]
# run_query_in_async can be removed after async mode is stable
Expand Down
4 changes: 2 additions & 2 deletions ClusterTest/build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@

val sparkConnectorVersion = "2.15.0"
val scalaVersionMajor = "2.12"
val sparkVersionMajor = "3.4"
val sparkVersionMajor = "3.5"
val sparkVersion = s"${sparkVersionMajor}.0"
val testSparkVersion = sys.props.get("spark.testVersion").getOrElse(sparkVersion)

Expand All @@ -37,7 +37,7 @@ lazy val root = project.withId("spark-snowflake").in(file("."))
"Sonatype OSS Snapshots" at "https://oss.sonatype.org/content/repositories/snapshots",
libraryDependencies ++= Seq(
"net.snowflake" % "snowflake-ingest-sdk" % "0.10.8",
"net.snowflake" % "snowflake-jdbc" % "3.13.30",
"net.snowflake" % "snowflake-jdbc" % "3.15.0",
// "net.snowflake" %% "spark-snowflake" % "2.8.0-spark_3.0",
// "com.google.guava" % "guava" % "14.0.1" % Test,
// "org.scalatest" %% "scalatest" % "3.0.5" % Test,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -161,7 +161,7 @@ object TestUtils {
*/
def getJDBCConnection(params: MergedParameters): Connection = {
// Derive class name
try Class.forName("com.snowflake.client.jdbc.SnowflakeDriver")
try Class.forName("net.snowflake.client.jdbc.SnowflakeDriver")
catch {
case _: ClassNotFoundException =>
System.err.println("Driver not found")
Expand Down
8 changes: 4 additions & 4 deletions build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,8 @@

import scala.util.Properties

val sparkVersion = "3.4"
val testSparkVersion = sys.props.get("spark.testVersion").getOrElse("3.4.0")
val sparkVersion = "3.5"
val testSparkVersion = sys.props.get("spark.testVersion").getOrElse("3.5.0")

/*
* Don't change the variable name "sparkConnectorVersion" because
Expand All @@ -41,7 +41,7 @@ lazy val root = project.withId("spark-snowflake").in(file("."))
.settings(
name := "spark-snowflake",
organization := "net.snowflake",
version := s"${sparkConnectorVersion}-spark_3.4",
version := s"${sparkConnectorVersion}-spark_3.5",
scalaVersion := sys.props.getOrElse("SPARK_SCALA_VERSION", default = "2.12.11"),
// Spark 3.2 supports scala 2.12 and 2.13
crossScalaVersions := Seq("2.12.11", "2.13.9"),
Expand All @@ -60,7 +60,7 @@ lazy val root = project.withId("spark-snowflake").in(file("."))
"Sonatype OSS Snapshots" at "https://oss.sonatype.org/content/repositories/snapshots",
libraryDependencies ++= Seq(
"net.snowflake" % "snowflake-ingest-sdk" % "0.10.8",
"net.snowflake" % "snowflake-jdbc" % "3.14.4",
"net.snowflake" % "snowflake-jdbc" % "3.15.0",
"org.scalatest" %% "scalatest" % "3.1.1" % Test,
"org.mockito" % "mockito-core" % "1.10.19" % Test,
"org.apache.commons" % "commons-lang3" % "3.5" % "provided",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ object SnowflakeConnectorUtils {
* Check Spark version, if Spark version matches SUPPORT_SPARK_VERSION enable PushDown,
* otherwise disable it.
*/
val SUPPORT_SPARK_VERSION = "3.4"
val SUPPORT_SPARK_VERSION = "3.5"

def checkVersionAndEnablePushdown(session: SparkSession): Boolean =
if (session.version.startsWith(SUPPORT_SPARK_VERSION)) {
Expand Down
2 changes: 1 addition & 1 deletion src/main/scala/net/snowflake/spark/snowflake/Utils.scala
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ object Utils {
/**
* The certified JDBC version to work with this spark connector version.
*/
val CERTIFIED_JDBC_VERSION = "3.14.4"
val CERTIFIED_JDBC_VERSION = "3.15.0"

/**
* Important:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,7 @@ private[querygeneration] object MiscStatement {
// Spark 3.4 introduce join hint. The join hint doesn't affect correctness.
// So it can be ignored in the pushdown process
// https://github.com/apache/spark/commit/0fa9c554fc0b3940a47c3d1c6a5a17ca9a8cee8e
case ScalarSubquery(subquery, _, _, joinCond, _) if joinCond.isEmpty =>
case ScalarSubquery(subquery, _, _, joinCond, _, _) if joinCond.isEmpty =>
blockStatement(new QueryBuilder(subquery).statement)

case UnscaledValue(child) =>
Expand Down