Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Display java scala version for testing #50

Merged
merged 3 commits into from
Aug 30, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 17 additions & 0 deletions .run/ClosedIntervalTest.run.xml
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="ClosedIntervalTest" type="ScalaTestRunConfiguration" factoryName="ScalaTest"
nameIsGenerated="true" show_console_on_std_err="false" show_console_on_std_out="false">
<module name="spark-temporalquery_2.12"/>
<option name="allowRunningInParallel" value="false"/>
<option name="projectPathOnTarget"/>
<option name="selectedOptions">
<list/>
</option>
<option name="testKind" value="Class"/>
<option name="shortenClasspath" value="NONE"/>
<option name="testClassPath" value="ch.zzeekk.spark.temporalquery.ClosedIntervalTest"/>
<method v="2">
<option name="Make" enabled="true"/>
</method>
</configuration>
</component>
19 changes: 19 additions & 0 deletions src/main/scala/ch/zzeekk/spark/temporalquery/Logging.scala
Original file line number Diff line number Diff line change
@@ -1,7 +1,26 @@
package ch.zzeekk.spark.temporalquery

import org.apache.spark.sql.SparkSession
import org.slf4j.{Logger, LoggerFactory}

trait Logging {
@transient protected lazy val logger: Logger = LoggerFactory.getLogger(getClass.getName)

protected var _loggEnvDone: Boolean = false

protected def loggEnv(implicit session: SparkSession): Unit = {
if (!_loggEnvDone) {
val javaVersion: String = System.getProperty("java.version")
val scalaVersion: String = scala.util.Properties.versionString
val sparkVersion = session.sparkContext.version

logger.info(s"logger.isDebugEnabled ? ${logger.isDebugEnabled()}")
logger.info(s"Java Version : $javaVersion")
logger.info(s"Scala Version : $scalaVersion")
logger.info(s"Spark Version : $sparkVersion")

_loggEnvDone = true
}
}

}
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
package ch.zzeekk.spark.temporalquery

import java.sql.Timestamp
import java.time.temporal.ChronoUnit

import ch.zzeekk.spark.temporalquery.TemporalHelpers.intervalComplement
import ch.zzeekk.spark.temporalquery.TemporalQueryUtil.{TemporalClosedIntervalQueryConfig, TemporalQueryConfig}
import ch.zzeekk.spark.temporalquery.TemporalQueryUtil.TemporalClosedIntervalQueryConfig
import org.apache.spark.sql.Row
import org.scalatest.FunSuite

import java.sql.Timestamp
import java.time.temporal.ChronoUnit

class ClosedIntervalTest extends FunSuite with TestUtils {

implicit private val timestampOrdering: Ordering[Timestamp] = Ordering.fromLessThan[Timestamp]((a,b) => a.before(b))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,15 +9,18 @@ trait TestUtils extends Logging {

implicit val session: SparkSession = SparkSession.builder
.config("spark.port.maxRetries", 100)
.config("spark.ui.enabled", false)
.config("spark.ui.enabled", value = false)
.config("spark.sql.shuffle.partitions", 1)
.config("spark.task.maxFailures", 1)
.master("local").appName("TemporalQueryUtilTest").getOrCreate()

import session.implicits._

loggEnv

def symmetricDifference(df1: DataFrame, df2: DataFrame): DataFrame = {
// attention, "except" works on Dataset and not on DataFrame. We need to check that schema is equal.
require(df1.columns.toSeq==df2.columns.toSeq,
require(df1.columns.toSeq == df2.columns.toSeq,
s"""Cannot calculate symmetric difference for DataFrames with different schema.
|schema of df1: ${df1.columns.toSeq.mkString(",")}
|${df1.schema.treeString}
Expand Down
Loading