Skip to content

Commit

Permalink
test: Run integration tests against v2 as well
Browse files Browse the repository at this point in the history
  • Loading branch information
nightscape committed Mar 6, 2024
1 parent 11f42d4 commit 3dc5e63
Showing 1 changed file with 13 additions and 4 deletions.
17 changes: 13 additions & 4 deletions src/test/scala/com/crealytics/spark/excel/IntegrationSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ import spoiwo.natures.xlsx.Model2XlsxConversions._
import java.io.{File, FileOutputStream}
import scala.collection.compat._

class IntegrationSuite
abstract class IntegrationSuite(implementation: String)
extends AnyFunSpec
with ScalaCheckPropertyChecks
with DataFrameSuiteBase
Expand Down Expand Up @@ -83,15 +83,19 @@ class IntegrationSuite
): DataFrame = {
val theFileName = fileName.getOrElse(File.createTempFile("spark_excel_test_", ".xlsx").getAbsolutePath)

val writer = df.write.excel(dataAddress = s"'$sheetName'!A1", header = header).mode(saveMode)
val writer = df.write
.format(implementation)
.option("dataAddress", s"'$sheetName'!A1")
.option("header", header)
.mode(saveMode)
val configuredWriter =
Map("dataAddress" -> dataAddress).foldLeft(writer) {
case (wri, (key, Some(value))) => wri.option(key, value)
case (wri, _) => wri
}
configuredWriter.save(theFileName)

val reader = spark.read.excel(dataAddress = s"'$sheetName'!A1", header = header)
val reader = spark.read.format(implementation).option("dataAddress", s"'$sheetName'!A1").option("header", header)
val configuredReader = Map(
"maxRowsInMemory" -> maxRowsInMemory,
"maxByteArraySize" -> maxByteArraySize,
Expand All @@ -117,7 +121,9 @@ class IntegrationSuite
assertDataFrameEquals(expected, inferred)
}

describe(s"with maxRowsInMemory = $maxRowsInMemory; maxByteArraySize = $maxByteArraySize") {
describe(
s"with implementation = $implementation, maxRowsInMemory = $maxRowsInMemory; maxByteArraySize = $maxByteArraySize"
) {
it("parses known datatypes correctly") {
forAll(rowsGen) { rows =>
val expected = spark.createDataset(rows).toDF()
Expand Down Expand Up @@ -352,3 +358,6 @@ class IntegrationSuite
runTests(maxRowsInMemory = Some(1))
runTests(maxRowsInMemory = Some(1), maxByteArraySize = Some(100000000))
}

class IntegrationSuiteV1 extends IntegrationSuite("com.crealytics.spark.excel")
class IntegrationSuiteV2 extends IntegrationSuite("excel")

0 comments on commit 3dc5e63

Please sign in to comment.