Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Upgrade to scala 2.13 and spark 3.3.2 in branch-0.8 #350

Closed
wants to merge 23 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
26 changes: 23 additions & 3 deletions build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -18,11 +18,12 @@ import sbt.ExclusionRule

ThisBuild / parallelExecution := false

val sparkVersion = "3.1.1"
val sparkVersion = "3.3.2"
val scala212 = "2.12.10"
val scala213 = "2.13.11"

lazy val commonSettings = Seq(
organization := "io.delta",
scalaVersion := "2.12.10",
fork := true,
javacOptions ++= Seq("-source", "1.8", "-target", "1.8"),
scalacOptions += "-target:jvm-1.8",
Expand All @@ -43,10 +44,13 @@ lazy val root = (project in file(".")).aggregate(spark, server)

lazy val spark = (project in file("spark")) settings(
name := "delta-sharing-spark",
crossScalaVersions := Seq(scala212, scala213),
commonSettings,
scalaStyleSettings,
releaseSettings,
libraryDependencies ++= Seq(
"org.apache.httpcomponents" % "httpclient" % "4.5.13",
"org.codehaus.jackson" % "jackson-mapper-asl" % "1.9.13",
"org.apache.spark" %% "spark-sql" % sparkVersion % "provided",
"org.apache.spark" %% "spark-catalyst" % sparkVersion % "test" classifier "tests",
"org.apache.spark" %% "spark-core" % sparkVersion % "test" classifier "tests",
Expand All @@ -68,6 +72,7 @@ lazy val spark = (project in file("spark")) settings(

lazy val server = (project in file("server")) enablePlugins(JavaAppPackaging) settings(
name := "delta-sharing-server",
scalaVersion := scala212,
commonSettings,
scalaStyleSettings,
releaseSettings,
Expand Down Expand Up @@ -179,6 +184,8 @@ import ReleaseTransformations._

lazy val releaseSettings = Seq(
publishMavenStyle := true,
publishArtifact := true,
Test / publishArtifact := false,

publishTo := {
val nexus = "https://oss.sonatype.org/"
Expand All @@ -189,6 +196,7 @@ lazy val releaseSettings = Seq(
}
},


releasePublishArtifactsAction := PgpKeys.publishSigned.value,

releaseCrossBuild := true,
Expand Down Expand Up @@ -227,6 +235,16 @@ lazy val releaseSettings = Seq(
<name>Shixiong Zhu</name>
<url>https://github.com/zsxwing</url>
</developer>
<developer>
<id>linzhou-db</id>
<name>Lin Zhou</name>
<url>https://github.com/linzhou-db</url>
</developer>
<developer>
<id>chakankardb</id>
<name>Abhijit Chakankar</name>
<url>https://github.com/chakankardb</url>
</developer>
</developers>
)

Expand All @@ -235,6 +253,8 @@ publishArtifact := false // Don't release the root project
publish := {}
publishTo := Some("snapshots" at "https://oss.sonatype.org/content/repositories/snapshots")
releaseCrossBuild := false
// crossScalaVersions must be set to Nil on the root project
crossScalaVersions := Nil
releaseProcess := Seq[ReleaseStep](
checkSnapshotDependencies,
inquireVersions,
Expand All @@ -243,7 +263,7 @@ releaseProcess := Seq[ReleaseStep](
setReleaseVersion,
commitReleaseVersion,
tagRelease,
publishArtifacts,
releaseStepCommandAndRemaining("+publishSigned"),
setNextVersion,
commitNextVersion
)
2 changes: 1 addition & 1 deletion python/delta_sharing/version.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,4 +14,4 @@
# limitations under the License.
#

__version__ = "0.7.1"
__version__ = "0.8.0"
Original file line number Diff line number Diff line change
Expand Up @@ -154,7 +154,7 @@ private[spark] class DeltaSharingRestClient(
shares ++= response.items
}
}
shares
shares.toSeq
}

private def listAllTablesInShare(share: Share): Seq[Table] = {
Expand All @@ -174,7 +174,7 @@ private[spark] class DeltaSharingRestClient(
tables ++= response.items
}
}
tables
tables.toSeq
}

override def getForStreaming(): Boolean = forStreaming
Expand Down Expand Up @@ -282,9 +282,9 @@ private[spark] class DeltaSharingRestClient(
version,
protocol,
metadata,
addFiles = addFiles,
removeFiles = removeFiles,
additionalMetadatas = additionalMetadatas
addFiles = addFiles.toSeq,
removeFiles = removeFiles.toSeq,
additionalMetadatas = additionalMetadatas.toSeq
)
}

Expand Down Expand Up @@ -319,10 +319,10 @@ private[spark] class DeltaSharingRestClient(
version,
protocol,
metadata,
addFiles = addFiles,
cdfFiles = cdfFiles,
removeFiles = removeFiles,
additionalMetadatas = additionalMetadatas
addFiles = addFiles.toSeq,
cdfFiles = cdfFiles.toSeq,
removeFiles = removeFiles.toSeq,
additionalMetadatas = additionalMetadatas.toSeq
)
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ private[sharing] class DeltaSharingDataSource
DeltaSharingSource(SparkSession.active, deltaLog, options)
}

override def shortName: String = "deltaSharing"
override def shortName(): String = "deltaSharing"
}

private[sharing] object DeltaSharingDataSource {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ import org.apache.hadoop.fs.Path
import io.delta.sharing.spark.util.JsonUtils

case class DeltaSharingProfile(
shareCredentialsVersion: Option[Int] = Some(DeltaSharingProfile.CURRENT),
shareCredentialsVersion: Option[Int],
endpoint: String = null,
bearerToken: String = null,
expirationTime: String = null)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -801,9 +801,9 @@ case class DeltaSharingSource(
DeltaSharingCDFReader.changesToDF(
new RemoteDeltaFileIndexParams(spark, initSnapshot, deltaLog.client.getProfileProvider),
schema.fields.map(f => f.name),
addFiles,
cdfFiles,
removeFiles,
addFiles.toSeq,
cdfFiles.toSeq,
removeFiles.toSeq,
schema,
isStreaming = true,
latestRefreshFunc,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,7 @@ object DeltaSharingCDFReader {
CachedTableManager.INSTANCE.register(
params.path.toString,
getIdToUrl(addFiles, cdfFiles, removeFiles),
refs,
refs.toSeq,
params.profileProvider,
refresher,
if (expirationTimestamp.isDefined) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ class DeltaSharingFileProfileProviderSuite extends SparkFunSuite {
)
}

test("version is missing") {
test("shareCredentialsVersion is missing") {
val e = intercept[IllegalArgumentException] {
testProfile(
"""{
Expand Down
4 changes: 1 addition & 3 deletions spark/src/test/scala/io/delta/sharing/spark/TestUtils.scala
Original file line number Diff line number Diff line change
Expand Up @@ -43,9 +43,7 @@ object TestUtils {
}

def sqlDate(date: String): java.sql.Date = {
toJavaDate(stringToDate(
UTF8String.fromString(date),
getZoneId(SQLConf.get.sessionLocalTimeZone)).get)
toJavaDate(stringToDate(UTF8String.fromString(date)).get)
}

def sqlTimestamp(timestamp: String): java.sql.Timestamp = {
Expand Down
2 changes: 1 addition & 1 deletion version.sbt
Original file line number Diff line number Diff line change
@@ -1 +1 @@
version in ThisBuild := "0.7.2-SNAPSHOT"
version in ThisBuild := "0.8.0"
Loading