diff --git a/src/test/resources/log4j.properties b/src/test/resources/log4j2.properties similarity index 91% rename from src/test/resources/log4j.properties rename to src/test/resources/log4j2.properties index 9232a2fd..b3c2e25b 100644 --- a/src/test/resources/log4j.properties +++ b/src/test/resources/log4j2.properties @@ -18,6 +18,6 @@ appender.console.layout.type = PatternLayout appender.console.layout.pattern = %d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n # Root logger level -rootLogger.level = debug +rootLogger.level = warn # Root logger referring to console appender -rootLogger.appenderRef.stdout.ref = consoleLogger \ No newline at end of file +rootLogger.appenderRef.stdout.ref = consoleLogger diff --git a/src/test/scala/com/crealytics/spark/excel/IntegrationSuite.scala b/src/test/scala/com/crealytics/spark/excel/IntegrationSuite.scala index b3f83787..75c541af 100644 --- a/src/test/scala/com/crealytics/spark/excel/IntegrationSuite.scala +++ b/src/test/scala/com/crealytics/spark/excel/IntegrationSuite.scala @@ -33,7 +33,7 @@ import spoiwo.natures.xlsx.Model2XlsxConversions._ import java.io.{File, FileOutputStream} import scala.collection.compat._ -class IntegrationSuite +abstract class IntegrationSuite(implementation: String) extends AnyFunSpec with ScalaCheckPropertyChecks with DataFrameSuiteBase @@ -83,7 +83,11 @@ class IntegrationSuite ): DataFrame = { val theFileName = fileName.getOrElse(File.createTempFile("spark_excel_test_", ".xlsx").getAbsolutePath) - val writer = df.write.excel(dataAddress = s"'$sheetName'!A1", header = header).mode(saveMode) + val writer = df.write + .format(implementation) + .option("dataAddress", s"'$sheetName'!A1") + .option("header", header) + .mode(saveMode) val configuredWriter = Map("dataAddress" -> dataAddress).foldLeft(writer) { case (wri, (key, Some(value))) => wri.option(key, value) @@ -91,7 +95,7 @@ class IntegrationSuite } configuredWriter.save(theFileName) - val reader = spark.read.excel(dataAddress = s"'$sheetName'!A1", header = header) + val reader = spark.read.format(implementation).option("dataAddress", s"'$sheetName'!A1").option("header", header) val configuredReader = Map( "maxRowsInMemory" -> maxRowsInMemory, "maxByteArraySize" -> maxByteArraySize, @@ -117,7 +121,9 @@ class IntegrationSuite assertDataFrameEquals(expected, inferred) } - describe(s"with maxRowsInMemory = $maxRowsInMemory; maxByteArraySize = $maxByteArraySize") { + describe( + s"with implementation = $implementation, maxRowsInMemory = $maxRowsInMemory; maxByteArraySize = $maxByteArraySize" + ) { it("parses known datatypes correctly") { forAll(rowsGen) { rows => val expected = spark.createDataset(rows).toDF() @@ -352,3 +358,6 @@ class IntegrationSuite runTests(maxRowsInMemory = Some(1)) runTests(maxRowsInMemory = Some(1), maxByteArraySize = Some(100000000)) } + +class IntegrationSuiteV1 extends IntegrationSuite("com.crealytics.spark.excel") +class IntegrationSuiteV2 extends IntegrationSuite("excel")