From f7b8a6c4d5dd1bf81fe70045d78b7f6555f9d4bd Mon Sep 17 00:00:00 2001 From: Chandresh Date: Mon, 27 Jan 2020 11:07:06 +0530 Subject: [PATCH] Add the test case for NULL value in MapType in Smart Connector mode --- .../smartConnectorMapTypes.conf | 7 +- .../SmartConnectorArrayTypeNULL.scala | 3 - .../SmartConnectorMapTypeNULL.scala | 129 ++++++++++++++++++ 3 files changed, 135 insertions(+), 4 deletions(-) create mode 100644 dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/SmartConnectorMapTypeNULL.scala diff --git a/dtests/src/test/java/io/snappydata/hydra/complexdatatypes/smartConnectorMapTypes.conf b/dtests/src/test/java/io/snappydata/hydra/complexdatatypes/smartConnectorMapTypes.conf index 07a66990c0..721af9a5e7 100644 --- a/dtests/src/test/java/io/snappydata/hydra/complexdatatypes/smartConnectorMapTypes.conf +++ b/dtests/src/test/java/io/snappydata/hydra/complexdatatypes/smartConnectorMapTypes.conf @@ -4,7 +4,6 @@ query as well as dataFrame API"; INCLUDE $JTESTS/io/snappydata/hydra/northwind/startDualModeCluster.conf; - TASK taskClass = io.snappydata.hydra.cluster.SnappyTest taskMethod = HydraTask_executeSparkJob io.snappydata.hydra.cluster.SnappyPrms-sparkJobClassNames = io.snappydata.hydra.complexdatatypes.SmartConnectorMapType io.snappydata.hydra.cluster.SnappyPrms-userAppArgs = "${dataFilesLocation}" @@ -19,6 +18,12 @@ TASK taskClass = io.snappydata.hydra.cluster.SnappyTest taskMethod = HydraTask_e threadGroups = snappyStoreThreads maxTimesToRun = 1; +TASK taskClass = io.snappydata.hydra.cluster.SnappyTest taskMethod = HydraTask_executeSparkJob + io.snappydata.hydra.cluster.SnappyPrms-sparkJobClassNames = io.snappydata.hydra.complexdatatypes.SmartConnectorMapTypeNULL + io.snappydata.hydra.cluster.SnappyPrms-userAppJar = snappydata-store-scala-tests*tests.jar + threadGroups = snappyStoreThreads + maxTimesToRun = 1; + INCLUDE $JTESTS/io/snappydata/hydra/northwind/stopDualModeCluster.conf; hydra.Prms-maxResultWaitSec = 7200; hydra.Prms-totalTaskTimeSec = 7200; diff --git a/dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/SmartConnectorArrayTypeNULL.scala b/dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/SmartConnectorArrayTypeNULL.scala index 1363fa3551..f4b07ee9e3 100644 --- a/dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/SmartConnectorArrayTypeNULL.scala +++ b/dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/SmartConnectorArrayTypeNULL.scala @@ -35,9 +35,6 @@ object SmartConnectorArrayTypeNULL { val spark : SparkSession = SparkSession.builder().enableHiveSupport().config(conf).getOrCreate() val sqlContext = spark.sqlContext -// def getCurrentDirectory = new java.io.File(".").getCanonicalPath() -// val dataLocation = args(0) -// println("DataLocation : " + dataLocation) val pw : PrintWriter = new PrintWriter(new FileOutputStream( new File("ValidateSmartConnectorArrayTypeNULL" + "_" + "column" + System.currentTimeMillis()) , false)) diff --git a/dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/SmartConnectorMapTypeNULL.scala b/dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/SmartConnectorMapTypeNULL.scala new file mode 100644 index 0000000000..8232790ed7 --- /dev/null +++ b/dtests/src/test/scala/io/snappydata/hydra/complexdatatypes/SmartConnectorMapTypeNULL.scala @@ -0,0 +1,129 @@ +/* + * Copyright (c) 2017-2019 TIBCO Software Inc. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you + * may not use this file except in compliance with the License. You + * may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + * implied. See the License for the specific language governing + * permissions and limitations under the License. See accompanying + * LICENSE file. + */ +package io.snappydata.hydra.complexdatatypes + +import java.io.{File, FileOutputStream, PrintWriter} + +import org.apache.spark.sql.{SnappyContext, SparkSession} +import org.apache.spark.{SparkConf, SparkContext} + +object SmartConnectorMapTypeNULL { + def main(args: Array[String]): Unit = { + // scalastyle:off println + println("Smart Connector NULL value in MapType Job started...") + val connectionURL = args(args.length - 1) + println("Connection URL is : " + connectionURL) + val conf = new SparkConf() + .setAppName("Spark_ComplexType_MapTypeNULL_Validation") + .set("snappydata.connection", connectionURL) + val sc : SparkContext = SparkContext.getOrCreate(conf) + val snc : SnappyContext = SnappyContext(sc) + val spark : SparkSession = SparkSession.builder().enableHiveSupport().config(conf).getOrCreate() + val sqlContext = spark.sqlContext + val pw : PrintWriter = new PrintWriter(new FileOutputStream( + new File("ValidateSmartConnectorMapTypeNULL" + "_" + "column" + System.currentTimeMillis()) + , false)) + /** + * Test : NULL value in Map Type column. + */ + snc.sql(ComplexTypeUtils.createSchemaST) + spark.sql(ComplexTypeUtils.createSchemaST) + + /** + * Test Case 1 : MapType Column is last column in the table. + */ + snc.sql(ComplexTypeUtils.createTableLastColumnMapType) + spark.sql(ComplexTypeUtils.createTableInSparkMapTypeLastColumn) + snc.sql(ComplexTypeUtils.insertNULLMapTypeLast) + spark.sql(ComplexTypeUtils.insertNULLMapTypeLast) + snc.sql(ComplexTypeUtils.insertNormalDataMapTypeLast) + spark.sql(ComplexTypeUtils.insertNormalDataMapTypeLast) + /** + * Test Case 2 : MapType Column is between (say middle) the other data types in the table. + */ + snc.sql(ComplexTypeUtils.createTableMiddleColumnMapType) + spark.sql(ComplexTypeUtils.createTableInSparkMapTypeMiddleColumn) + snc.sql(ComplexTypeUtils.insertNULLMapTypeMiddle) + spark.sql(ComplexTypeUtils.insertNULLMapTypeMiddle) + snc.sql(ComplexTypeUtils.insertNormalDataMapTypeMiddle) + spark.sql(ComplexTypeUtils.insertNormalDataMapTypeMiddle) + /** + * Test Case 3: ArrayType Column is the first column in the table. + */ + snc.sql(ComplexTypeUtils.createTableFirstColumnMapType) + spark.sql(ComplexTypeUtils.createTableInSparkMapTypeFirstColumn) + snc.sql(ComplexTypeUtils.insertNULLMapTypeFirst) + spark.sql(ComplexTypeUtils.insertNULLMapTypeFirst) + snc.sql(ComplexTypeUtils.insertNormalDataMapTypeFirst) + spark.sql(ComplexTypeUtils.insertNormalDataMapTypeFirst) + /** + * Validation Routine + */ + val snappyDFLast = snc.sql(ComplexTypeUtils.selectMapLast) + val sparkDFLast = spark.sql(ComplexTypeUtils.selectMapLast) + val df1Last = snappyDFLast.collect() + val df2Last = sparkDFLast.collect() + var result1 = df1Last.mkString(",") + var result2 = df2Last.mkString(",") + pw.println(result1) + pw.println(result2) + if(df1Last.equals(df2Last)) { + pw.println("-- Insertion of NULL value in MapType last column OK --") + } else { + pw.println("-- Insertion of NULL value in MapType last column OK --") + } + pw.flush() + val snappyDFMiddle = snc.sql(ComplexTypeUtils.selectMapMiddle) + val sparkDFMiddle = spark.sql(ComplexTypeUtils.selectMapMiddle) + val df1Middle = snappyDFMiddle.collect() + val df2Middle = sparkDFMiddle.collect() + result1 = df1Middle.mkString(",") + result2 = df2Middle.mkString(",") + pw.println(result1) + pw.println(result2) + if(df1Middle.equals(df2Middle)) { + pw.println("-- Insertion of NULL value in MapType Middle column OK --") + } else { + pw.println("-- Insertion of NULL value in MapType Middle column OK --") + } + pw.flush() + val snappyDFFirst = snc.sql(ComplexTypeUtils.selectMapFirst) + val sparkDFFirst = spark.sql(ComplexTypeUtils.selectMapFirst) + val df1First = snappyDFFirst.collect() + val df2First = sparkDFFirst.collect() + result1 = df1First.mkString(",") + result2 = df2First.mkString(",") + pw.println(result1) + pw.println(result2) + if(df1First.equals(df2First)) { + pw.println("-- Insertion of NULL value in MapType First column OK --") + } else { + pw.println("-- Insertion of NULL value in MapType First column OK --") + } + pw.flush() + pw.close() + snc.sql(ComplexTypeUtils.dropTableMapLast) + snc.sql(ComplexTypeUtils.dropTableMapMiddle) + snc.sql(ComplexTypeUtils.dropTableMapFirst) + spark.sql(ComplexTypeUtils.dropTableMapLast) + spark.sql(ComplexTypeUtils.dropTableMapMiddle) + spark.sql(ComplexTypeUtils.dropTableMapFirst) + snc.sql(ComplexTypeUtils.dropDatabaseST) + spark.sql(ComplexTypeUtils.dropDatabaseST) + + } +}