diff --git a/README.md b/README.md
index 9c864cca55..c8ab09b3cc 100644
--- a/README.md
+++ b/README.md
@@ -85,8 +85,8 @@ Since the first release of Linkis in 2019, it has accumulated more than **700**
| **Engine Name** | **Suppor Component Version
(Default Dependent Version)** | **Linkis Version Requirements** | **Included in Release Package
By Default** | **Description** |
|:---- |:---- |:---- |:---- |:---- |
-|Spark|Apache 2.0.0~2.4.7,
CDH >= 5.4.0,
(default Apache Spark 2.4.3)|\>=1.0.3|Yes|Spark EngineConn, supports SQL , Scala, Pyspark and R code|
-|Hive|Apache >= 1.0.0,
CDH >= 5.4.0,
(default Apache Hive 2.3.3)|\>=1.0.3|Yes |Hive EngineConn, supports HiveQL code|
+|Spark|Apache >= 2.0.0,
CDH >= 5.4.0,
(default Apache Spark 3.2.1)|\>=1.0.3|Yes|Spark EngineConn, supports SQL , Scala, Pyspark and R code|
+|Hive|Apache >= 1.0.0,
CDH >= 5.4.0,
(default Apache Hive 3.1.3)|\>=1.0.3|Yes |Hive EngineConn, supports HiveQL code|
|Python|Python >= 2.6,
(default Python2*)|\>=1.0.3|Yes |Python EngineConn, supports python code|
|Shell|Bash >= 2.0|\>=1.0.3|Yes|Shell EngineConn, supports Bash shell code|
|JDBC|MySQL >= 5.0, Hive >=1.2.1,
(default Hive-jdbc 2.3.4)|\>=1.0.3|No|JDBC EngineConn, already supports MySQL and HiveQL, can be extended quickly Support other engines with JDBC Driver package, such as Oracle|
diff --git a/README_CN.md b/README_CN.md
index 34de369094..b47497789e 100644
--- a/README_CN.md
+++ b/README_CN.md
@@ -82,8 +82,8 @@ Linkis 自 2019 年开源发布以来,已累计积累了 700 多家试验企
| **引擎名** | **支持底层组件版本
(默认依赖版本)** | **Linkis 版本要求** | **是否默认包含在发布包中** | **说明** |
|:---- |:---- |:---- |:---- |:---- |
-|Spark|Apache 2.0.0~2.4.7,
CDH >= 5.4.0,
(默认 Apache Spark 2.4.3)|\>=1.0.3|是|Spark EngineConn, 支持 SQL, Scala, Pyspark 和 R 代码|
-|Hive|Apache >= 1.0.0,
CDH >= 5.4.0,
(默认 Apache Hive 2.3.3)|\>=1.0.3|是|Hive EngineConn, 支持 HiveQL 代码|
+|Spark|Apache >= 2.0.0,
CDH >= 5.4.0,
(默认 Apache Spark 3.2.1)|\>=1.0.3|是|Spark EngineConn, 支持 SQL, Scala, Pyspark 和 R 代码|
+|Hive|Apache >= 1.0.0,
CDH >= 5.4.0,
(默认 Apache Hive 3.1.3)|\>=1.0.3|是|Hive EngineConn, 支持 HiveQL 代码|
|Python|Python >= 2.6,
(默认 Python2*)|\>=1.0.3|是|Python EngineConn, 支持 python 代码|
|Shell|Bash >= 2.0|\>=1.0.3|是|Shell EngineConn, 支持 Bash shell 代码|
|JDBC|MySQL >= 5.0, Hive >=1.2.1,
(默认 Hive-jdbc 2.3.4)|\>=1.0.3|否|JDBC EngineConn, 已支持 MySQL 和 HiveQL,可快速扩展支持其他有 JDBC Driver 包的引擎, 如 Oracle|
diff --git a/docs/configuration/linkis-computation-governance-common.md b/docs/configuration/linkis-computation-governance-common.md
index 0fc5900ef3..e0bae1ae31 100644
--- a/docs/configuration/linkis-computation-governance-common.md
+++ b/docs/configuration/linkis-computation-governance-common.md
@@ -4,8 +4,8 @@
| Module Name (Service Name) | Parameter Name | Default Value | Description |
| -------- | -------- | ----- |----- |
|linkis-computation-governance-common|wds.linkis.rm| | wds.linkis.rm |
-|linkis-computation-governance-common|wds.linkis.spark.engine.version|2.4.3 |spark.engine.version|
-|linkis-computation-governance-common|wds.linkis.hive.engine.version| 1.2.1 |hive.engine.version|
+|linkis-computation-governance-common|wds.linkis.spark.engine.version|3.2.1 |spark.engine.version|
+|linkis-computation-governance-common|wds.linkis.hive.engine.version| 3.1.3 |hive.engine.version|
|linkis-computation-governance-common|wds.linkis.python.engine.version|python2 | python.engine.version |
|linkis-computation-governance-common|wds.linkis.python.code_parser.enabled| false |python.code_parser.enabled|
|linkis-computation-governance-common|wds.linkis.scala.code_parser.enabled| false | scala.code_parser.enabled |
diff --git a/docs/configuration/linkis-manager-common.md b/docs/configuration/linkis-manager-common.md
index 1ef0475bd1..d84b06ea57 100644
--- a/docs/configuration/linkis-manager-common.md
+++ b/docs/configuration/linkis-manager-common.md
@@ -4,7 +4,7 @@
| Module Name (Service Name) | Parameter Name | Default Value | Description |Used|
| -------- | -------- | ----- |----- | ----- |
|linkis-manager-common|wds.linkis.default.engine.type |spark|engine.type|
-|linkis-manager-common|wds.linkis.default.engine.version |2.4.3|engine.version|
+|linkis-manager-common|wds.linkis.default.engine.version |3.2.1|engine.version|
|linkis-manager-common|wds.linkis.manager.admin|hadoop|manager.admin|
|linkis-manager-common|wds.linkis.rm.application.name|ResourceManager|rm.application.name|
|linkis-manager-common|wds.linkis.rm.wait.event.time.out| 1000 * 60 * 12L |event.time.out|
diff --git a/docs/configuration/linkis-udf.md b/docs/configuration/linkis-udf.md
index 76a9460cfa..dd8aeed169 100644
--- a/docs/configuration/linkis-udf.md
+++ b/docs/configuration/linkis-udf.md
@@ -3,7 +3,7 @@
| Module Name (Service Name) | Parameter Name | Default Value | Description |Used|
| -------- | -------- | ----- |----- | ----- |
-|linkis-udf|wds.linkis.udf.hive.exec.path |/appcom/Install/DataWorkCloudInstall/linkis-linkis-Udf-0.0.3-SNAPSHOT/lib/hive-exec-1.2.1.jar|udf.hive.exec.path|
+|linkis-udf|wds.linkis.udf.hive.exec.path |/appcom/Install/DataWorkCloudInstall/linkis-linkis-Udf-0.0.3-SNAPSHOT/lib/hive-exec-3.1.3.jar|udf.hive.exec.path|
|linkis-udf|wds.linkis.udf.tmp.path|/tmp/udf/|udf.tmp.path|
|linkis-udf|wds.linkis.udf.share.path|/mnt/bdap/udf/|udf.share.path|
|linkis-udf|wds.linkis.udf.share.proxy.user| hadoop|udf.share.proxy.user|
diff --git a/docs/errorcode/linkis-configuration-errorcode.md b/docs/errorcode/linkis-configuration-errorcode.md
index c261f1852e..299ac0e60f 100644
--- a/docs/errorcode/linkis-configuration-errorcode.md
+++ b/docs/errorcode/linkis-configuration-errorcode.md
@@ -15,7 +15,7 @@
|linkis-configuration |14100|CategoryName cannot be included '-'(类别名称不能包含 '-')|CANNOT_BE_INCLUDED|LinkisConfigurationErrorCodeSummary|
|linkis-configuration |14100|Creator is null, cannot be added(创建者为空,无法添加)|CREATOR_IS_NULL_CANNOT_BE_ADDED|LinkisConfigurationErrorCodeSummary|
|linkis-configuration |14100|Engine type is null, cannot be added(引擎类型为空,无法添加)|ENGINE_TYPE_IS_NULL|LinkisConfigurationErrorCodeSummary|
-|linkis-configuration |14100|The saved engine type parameter is incorrect, please send it in a fixed format, such as spark-2.4.3(保存的引擎类型参数有误,请按照固定格式传送,例如spark-2.4.3)|INCORRECT_FIXED_SUCH|LinkisConfigurationErrorCodeSummary|
+|linkis-configuration |14100|The saved engine type parameter is incorrect, please send it in a fixed format, such as spark-3.2.1(保存的引擎类型参数有误,请按照固定格式传送,例如spark-3.2.1)|INCORRECT_FIXED_SUCH|LinkisConfigurationErrorCodeSummary|
|linkis-configuration |14100|Incomplete request parameters, please reconfirm(请求参数不完整,请重新确认)|INCOMPLETE_RECONFIRM|LinkisConfigurationErrorCodeSummary|
|linkis-configuration |14100|Only admin can modify category(只有管理员才能修改目录)|ONLY_ADMIN_CAN_MODIFY|LinkisConfigurationErrorCodeSummary|
|linkis-configuration |14100|The label parameter is empty(标签参数为空)|THE_LABEL_PARAMETER_IS_EMPTY|LinkisConfigurationErrorCodeSummary|
diff --git a/docs/trino-usage.md b/docs/trino-usage.md
index cfd199f8db..10b7a835bf 100644
--- a/docs/trino-usage.md
+++ b/docs/trino-usage.md
@@ -46,7 +46,7 @@ Linkis1.X是通过标签来进行的,所以需要在我们数据库中插入
```
linkis_ps_configuration_config_key: 插入引擎的配置参数的key和默认values
-linkis_cg_manager_label:插入引擎label如:hive-2.3.3
+linkis_cg_manager_label:插入引擎label如:hive-3.1.3
linkis_ps_configuration_category: 插入引擎的目录关联关系
linkis_ps_configuration_config_value: 插入引擎需要展示的配置
linkis_ps_configuration_key_engine_relation:配置项和引擎的关联关系
diff --git a/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/utils/ByteTimeUtils.java b/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/utils/ByteTimeUtils.java
index d23f4a0867..be64584615 100644
--- a/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/utils/ByteTimeUtils.java
+++ b/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/utils/ByteTimeUtils.java
@@ -350,7 +350,7 @@ public long convertTo(long d, ByteUnit u) {
}
}
- public double toBytes(long d) {
+ public long toBytes(long d) {
if (d < 0) {
throw new IllegalArgumentException("Negative size value. Size must be positive: " + d);
}
diff --git a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/Interpreter/Interpreter.scala b/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/utils/CloseIoUtils.java
similarity index 56%
rename from linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/Interpreter/Interpreter.scala
rename to linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/utils/CloseIoUtils.java
index f6d6c797ec..c8aa2e1ff3 100644
--- a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/Interpreter/Interpreter.scala
+++ b/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/utils/CloseIoUtils.java
@@ -15,29 +15,24 @@
* limitations under the License.
*/
-package org.apache.linkis.engineplugin.spark.Interpreter
-
-import org.apache.linkis.common.utils.Utils
-import org.apache.linkis.engineplugin.spark.common.State
-import org.apache.linkis.scheduler.executer.ExecuteResponse
-
-import scala.concurrent.TimeoutException
-import scala.concurrent.duration.Duration
-
-/**
- */
-
-trait Interpreter {
- def state: State
-
- def execute(code: String): ExecuteResponse
-
- def close(): Unit
-
- @throws(classOf[TimeoutException])
- @throws(classOf[InterruptedException])
- final def waitForStateChange(oldState: State, atMost: Duration): Unit = {
- Utils.waitUntil({ () => state != oldState }, atMost)
+package org.apache.linkis.common.utils;
+
+import java.io.Closeable;
+import java.io.IOException;
+
+public class CloseIoUtils {
+
+ public static void closeAll(Closeable... cs) {
+ if (cs != null) {
+ for (Closeable c : cs) {
+ if (c != null) {
+ try {
+ c.close();
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ }
+ }
+ }
}
-
}
diff --git a/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/variable/CustomDateType.scala b/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/variable/CustomDateType.scala
index 4359df3398..ec8c911b76 100644
--- a/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/variable/CustomDateType.scala
+++ b/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/variable/CustomDateType.scala
@@ -79,20 +79,13 @@ class CustomMonthType(date: String, std: Boolean = true, isEnd: Boolean = false)
def -(months: Int): String = {
val dateFormat = DateTypeUtils.dateFormatLocal.get()
- if (std) {
- DateTypeUtils.getMonth(std, isEnd, DateUtils.addMonths(dateFormat.parse(date), -months))
- } else {
- DateTypeUtils.getMonth(std, isEnd, DateUtils.addMonths(dateFormat.parse(date), -months))
- }
+ DateTypeUtils.getMonth(std, isEnd, DateUtils.addMonths(dateFormat.parse(date), -months))
+
}
def +(months: Int): String = {
val dateFormat = DateTypeUtils.dateFormatLocal.get()
- if (std) {
- DateTypeUtils.getMonth(std, isEnd, DateUtils.addMonths(dateFormat.parse(date), months))
- } else {
- DateTypeUtils.getMonth(std, isEnd, DateUtils.addMonths(dateFormat.parse(date), months))
- }
+ DateTypeUtils.getMonth(std, isEnd, DateUtils.addMonths(dateFormat.parse(date), months))
}
override def toString: String = {
@@ -111,20 +104,14 @@ class CustomMonType(date: String, std: Boolean = true, isEnd: Boolean = false) {
def -(months: Int): String = {
val dateFormat = DateTypeUtils.dateFormatMonLocal.get()
- if (std) {
- DateTypeUtils.getMon(std, isEnd, DateUtils.addMonths(dateFormat.parse(date), -months))
- } else {
- DateTypeUtils.getMon(std, isEnd, DateUtils.addMonths(dateFormat.parse(date), -months))
- }
+ DateTypeUtils.getMon(std, isEnd, DateUtils.addMonths(dateFormat.parse(date), -months))
+
}
def +(months: Int): String = {
val dateFormat = DateTypeUtils.dateFormatMonLocal.get()
- if (std) {
- DateTypeUtils.getMon(std, isEnd, DateUtils.addMonths(dateFormat.parse(date), months))
- } else {
- DateTypeUtils.getMon(std, isEnd, DateUtils.addMonths(dateFormat.parse(date), months))
- }
+ DateTypeUtils.getMon(std, isEnd, DateUtils.addMonths(dateFormat.parse(date), months))
+
}
override def toString: String = {
diff --git a/linkis-commons/linkis-protocol/src/main/java/org/apache/linkis/protocol/util/ImmutablePair.java b/linkis-commons/linkis-protocol/src/main/java/org/apache/linkis/protocol/util/ImmutablePair.java
index 28fb7a040e..93f0f114aa 100644
--- a/linkis-commons/linkis-protocol/src/main/java/org/apache/linkis/protocol/util/ImmutablePair.java
+++ b/linkis-commons/linkis-protocol/src/main/java/org/apache/linkis/protocol/util/ImmutablePair.java
@@ -18,6 +18,7 @@
package org.apache.linkis.protocol.util;
import java.util.AbstractMap;
+import java.util.Objects;
public class ImmutablePair {
@@ -62,4 +63,9 @@ private boolean eq(Object o1, Object o2) {
return false;
}
}
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(entry);
+ }
}
diff --git a/linkis-commons/linkis-rpc/src/main/java/org/apache/linkis/rpc/serializer/ProtostuffSerializeUtil.java b/linkis-commons/linkis-rpc/src/main/java/org/apache/linkis/rpc/serializer/ProtostuffSerializeUtil.java
index 6743e66a85..23c3fbd166 100644
--- a/linkis-commons/linkis-rpc/src/main/java/org/apache/linkis/rpc/serializer/ProtostuffSerializeUtil.java
+++ b/linkis-commons/linkis-rpc/src/main/java/org/apache/linkis/rpc/serializer/ProtostuffSerializeUtil.java
@@ -18,6 +18,7 @@
package org.apache.linkis.rpc.serializer;
import java.util.Map;
+import java.util.Objects;
import java.util.concurrent.ConcurrentHashMap;
import scala.Option;
@@ -49,6 +50,7 @@ public static String serialize(T obj) {
}
Class clazz = (Class) obj.getClass();
Schema schema = getSchema(clazz);
+ Objects.requireNonNull(schema, "schema must not be null");
byte[] data;
LinkedBuffer buffer = LinkedBuffer.allocate(LinkedBuffer.DEFAULT_BUFFER_SIZE);
try {
@@ -61,6 +63,7 @@ public static String serialize(T obj) {
public static T deserialize(String str, Class clazz) {
Schema schema = getSchema(clazz);
+ Objects.requireNonNull(schema, "schema must not be null");
T obj = schema.newMessage();
ProtostuffIOUtil.mergeFrom(toByteArray(str), obj, schema);
return obj;
diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/ExcelStorageReader.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/ExcelStorageReader.java
index 2e3ca6e085..910c3d7817 100644
--- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/ExcelStorageReader.java
+++ b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/ExcelStorageReader.java
@@ -37,7 +37,7 @@ public static List> getExcelTitle(
} else {
res = XlsxUtils.getBasicInfo(in, file);
}
- if (res == null && res.size() < 2) {
+ if (res == null || res.size() < 2) {
throw new Exception("There is a problem with the file format(文件格式有问题)");
}
List headerType = new ArrayList<>();
diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/fs/impl/LocalFileSystem.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/fs/impl/LocalFileSystem.java
index ce2ee43b7e..64ed6df39a 100644
--- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/fs/impl/LocalFileSystem.java
+++ b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/fs/impl/LocalFileSystem.java
@@ -213,7 +213,9 @@ public boolean copy(String origin, String dest) throws IOException {
setOwner(new FsPath(dest), user, null);
}
} catch (Throwable e) {
- file.delete();
+ if (!file.delete()) {
+ LOG.error("File deletion failed(文件删除失败)");
+ }
if (e instanceof IOException) {
throw (IOException) e;
} else {
@@ -370,14 +372,18 @@ public boolean create(String dest) throws IOException {
if (!isOwner(file.getParent())) {
throw new IOException("you have on permission to create file " + dest);
}
- file.createNewFile();
+ if (!file.createNewFile()) {
+ LOG.error("File creation failed(文件创建失败)");
+ }
try {
setPermission(new FsPath(dest), this.getDefaultFilePerm());
if (!user.equals(getOwner(dest))) {
setOwner(new FsPath(dest), user, null);
}
} catch (Throwable e) {
- file.delete();
+ if (!file.delete()) {
+ LOG.error("File deletion failed(文件删除失败)");
+ }
if (e instanceof IOException) {
throw (IOException) e;
} else {
diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/excel/StorageExcelWriter.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/excel/StorageExcelWriter.scala
index 2aff4af617..fc4d87c96e 100644
--- a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/excel/StorageExcelWriter.scala
+++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/excel/StorageExcelWriter.scala
@@ -98,7 +98,6 @@ class StorageExcelWriter(
case TimestampType => style.setDataFormat(format.getFormat("m/d/yy h:mm"))
case DecimalType => style.setDataFormat(format.getFormat("#.000000000"))
case BigDecimalType => style.setDataFormat(format.getFormat("#.000000000"))
- case _ => style.setDataFormat(format.getFormat("@"))
}
}
style
diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/operator/ujes/UJESConstants.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/operator/ujes/UJESConstants.java
index 845949079f..a9bb552386 100644
--- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/operator/ujes/UJESConstants.java
+++ b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/operator/ujes/UJESConstants.java
@@ -24,7 +24,7 @@ public class UJESConstants {
public static final String QUERY_PAGE_SIZE_NAME = "pageSize";
public static final int QUERY_PAGE_SIZE_DEFAULT_VALUE = 100;
- public static final Long DRIVER_QUERY_SLEEP_MILLS = 500l;
+ public static final Long DRIVER_QUERY_SLEEP_MILLS = 500L;
public static final Integer DRIVER_REQUEST_MAX_RETRY_TIME = 3;
public static final String QUERY_STATUS_NAME = "status";
@@ -40,7 +40,4 @@ public class UJESConstants {
public static final Integer IDX_FOR_LOG_TYPE_ALL = 3; // 0: Error 1: WARN 2:INFO 3: ALL
public static final int DEFAULT_PAGE_SIZE = 500;
-
- public static final String DEFAULT_SPARK_ENGINE = "spark-2.4.3";
- public static final String DEFAULT_HIVE_ENGINE = "hive-1.2.1";
}
diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/operator/ujes/UJESResultAdapter.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/operator/ujes/UJESResultAdapter.java
index d402e01fe8..f5d8dcb920 100644
--- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/operator/ujes/UJESResultAdapter.java
+++ b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/operator/ujes/UJESResultAdapter.java
@@ -182,8 +182,7 @@ public Float getJobProgress() {
return null;
}
if (result instanceof JobInfoResult) {
- if (((JobInfoResult) result).getRequestPersistTask() != null
- && ((JobInfoResult) result).getRequestPersistTask() != null) {
+ if (((JobInfoResult) result).getRequestPersistTask() != null) {
return ((JobInfoResult) result).getRequestPersistTask().getProgress();
}
}
diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/utils/ExecutionUtils.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/utils/ExecutionUtils.java
index 82a3d8bff2..3231177a35 100644
--- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/utils/ExecutionUtils.java
+++ b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/utils/ExecutionUtils.java
@@ -24,6 +24,7 @@
import org.apache.linkis.cli.core.exception.BuilderException;
import org.apache.linkis.cli.core.exception.error.CommonErrMsg;
import org.apache.linkis.cli.core.utils.LogUtils;
+import org.apache.linkis.common.utils.CloseIoUtils;
import org.apache.commons.lang3.StringUtils;
@@ -167,12 +168,13 @@ public static String getProxyUser(
}
public static String readFile(String path) {
+ BufferedReader bufReader = null;
try {
File inputFile = new File(path);
InputStream inputStream = new FileInputStream(inputFile);
InputStreamReader iReader = new InputStreamReader(inputStream);
- BufferedReader bufReader = new BufferedReader(iReader);
+ bufReader = new BufferedReader(iReader);
StringBuilder sb = new StringBuilder();
StringBuilder line;
@@ -180,7 +182,6 @@ public static String readFile(String path) {
line = new StringBuilder(bufReader.readLine());
sb.append(line).append(System.lineSeparator());
}
-
return sb.toString();
} catch (FileNotFoundException fe) {
@@ -197,6 +198,8 @@ public static String readFile(String path) {
CommonErrMsg.BuilderBuildErr,
"Cannot read user specified script file: " + path,
e);
+ } finally {
+ CloseIoUtils.closeAll(bufReader);
}
}
}
diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/LinkisClientApplicationTest.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/LinkisClientApplicationTest.java
index 0af2226661..14b0bfee79 100644
--- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/LinkisClientApplicationTest.java
+++ b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/LinkisClientApplicationTest.java
@@ -85,12 +85,12 @@ public void before() {
/* Test different task type */
- // "-engineType", "spark-2.4.3",
+ // "-engineType", "spark-3.2.1",
// "-codeType", "sql",
// "-code", "show tables;show tables;show tables",
//
- // "-engineType", "hive-1.2.1",
+ // "-engineType", "hive-3.1.3",
// "-codeType", "sql",
// "-code", "show tables;",
@@ -101,11 +101,11 @@ public void before() {
"-code",
"whoami",
- // "-engineType", "spark-2.4.3",
+ // "-engineType", "spark-3.2.1",
// "-codeType", "py",
// "-code", "print ('hello')",
- // "-engineType", "spark-2.4.3",
+ // "-engineType", "spark-3.2.1",
// "-codeType", "scala",
// "-codePath", "src/test/resources/testScala.scala",
diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/template/option/Parameter.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/template/option/Parameter.java
index 09422106c8..bc4231477b 100644
--- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/template/option/Parameter.java
+++ b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/template/option/Parameter.java
@@ -21,6 +21,8 @@
import org.apache.commons.lang3.StringUtils;
+import scala.tools.nsc.doc.model.Object;
+
/** Data Structure for command Parameter. Command String does not contain the name of Parameter. */
public class Parameter extends BaseOption implements Cloneable {
final String paramName;
diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/properties/reader/PropsFileReader.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/properties/reader/PropsFileReader.java
index 024a83311e..2cb1328aa0 100644
--- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/properties/reader/PropsFileReader.java
+++ b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/properties/reader/PropsFileReader.java
@@ -27,6 +27,7 @@
import java.io.File;
import java.io.FileInputStream;
import java.io.InputStream;
+import java.util.Objects;
import java.util.Properties;
import org.slf4j.Logger;
@@ -73,6 +74,7 @@ public Properties getProperties() {
"PRP0002", ErrorLevel.ERROR, CommonErrMsg.PropsReaderErr, "Source: " + propsPath, e);
} finally {
try {
+ Objects.requireNonNull(in, "InputStream must not be null");
in.close();
} catch (Exception ignore) {
// ignore
diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/result/PresentResultHandler.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/result/PresentResultHandler.java
index 5a3fbfa4a2..628dd2e10c 100644
--- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/result/PresentResultHandler.java
+++ b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/result/PresentResultHandler.java
@@ -27,6 +27,7 @@
import org.apache.linkis.cli.core.exception.error.CommonErrMsg;
import java.util.Map;
+import java.util.Objects;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -66,6 +67,7 @@ public void process(ExecutionResult executionResult) {
model.buildModel(job.getJobData());
}
try {
+ Objects.requireNonNull(job, "job must not be null");
presenter.present(model, job.getPresentWay());
} catch (Exception e) {
logger.error("Execution failed because exception thrown when presenting data.", e);
diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/display/PlainTextFileWriter.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/display/PlainTextFileWriter.java
index 789b344501..bd89e8fa18 100644
--- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/display/PlainTextFileWriter.java
+++ b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/display/PlainTextFileWriter.java
@@ -28,7 +28,13 @@
import java.io.FileOutputStream;
import java.io.OutputStreamWriter;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
public class PlainTextFileWriter implements DisplayOperator {
+
+ private static final Logger LOG = LoggerFactory.getLogger(PlainTextFileWriter.class);
+
@Override
public void doOutput(DisplayData data) {
if (!(data instanceof FileDisplayData)) {
@@ -62,7 +68,9 @@ public void doOutput(DisplayData data) {
if (overWrite || !file.exists()) {
try {
- file.createNewFile();
+ if (!file.createNewFile()) {
+ LOG.error("File creation failed(文件创建失败)");
+ }
} catch (Exception e) {
throw new PresenterException(
"PST0006",
diff --git a/linkis-computation-governance/linkis-client/linkis-computation-client/src/test/java/org/apache/linkis/computation/client/InteractiveJobTest.java b/linkis-computation-governance/linkis-client/linkis-computation-client/src/test/java/org/apache/linkis/computation/client/InteractiveJobTest.java
index 4ee0384076..843e3d30a1 100644
--- a/linkis-computation-governance/linkis-client/linkis-computation-client/src/test/java/org/apache/linkis/computation/client/InteractiveJobTest.java
+++ b/linkis-computation-governance/linkis-client/linkis-computation-client/src/test/java/org/apache/linkis/computation/client/InteractiveJobTest.java
@@ -18,6 +18,7 @@
package org.apache.linkis.computation.client;
import org.apache.linkis.computation.client.interactive.SubmittableInteractiveJob;
+import org.apache.linkis.manager.label.conf.LabelCommonConfig;
/** A test class for submit a sql to hive engineConn. */
public class InteractiveJobTest {
@@ -29,7 +30,7 @@ public static void main(String[] args) {
SubmittableInteractiveJob job =
LinkisJobClient.interactive()
.builder()
- .setEngineType("hive-2.3.3")
+ .setEngineType("hive-" + LabelCommonConfig.HIVE_ENGINE_VERSION.getValue())
.setRunTypeStr("sql")
.setCreator("IDE")
.setCode("show tables")
diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/conf/GovernaceCommonConf.scala b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/conf/GovernanceCommonConf.scala
similarity index 89%
rename from linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/conf/GovernaceCommonConf.scala
rename to linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/conf/GovernanceCommonConf.scala
index 5fdc9cf7f8..1181cd7d2c 100644
--- a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/conf/GovernaceCommonConf.scala
+++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/conf/GovernanceCommonConf.scala
@@ -18,14 +18,17 @@
package org.apache.linkis.governance.common.conf
import org.apache.linkis.common.conf.{CommonVars, Configuration}
+import org.apache.linkis.manager.label.conf.LabelCommonConfig
object GovernanceCommonConf {
val CONF_FILTER_RM = "wds.linkis.rm"
- val SPARK_ENGINE_VERSION = CommonVars("wds.linkis.spark.engine.version", "2.4.3")
+ val SPARK_ENGINE_VERSION =
+ CommonVars("wds.linkis.spark.engine.version", LabelCommonConfig.SPARK_ENGINE_VERSION.getValue)
- val HIVE_ENGINE_VERSION = CommonVars("wds.linkis.hive.engine.version", "1.2.1")
+ val HIVE_ENGINE_VERSION =
+ CommonVars("wds.linkis.hive.engine.version", LabelCommonConfig.HIVE_ENGINE_VERSION.getValue)
val PYTHON_ENGINE_VERSION = CommonVars("wds.linkis.python.engine.version", "python2")
diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/test/scala/org/apache/linkis/governance/common/conf/GovernanceCommonConfTest.scala b/linkis-computation-governance/linkis-computation-governance-common/src/test/scala/org/apache/linkis/governance/common/conf/GovernanceCommonConfTest.scala
index 7988a6c95d..96b6e9a1c2 100644
--- a/linkis-computation-governance/linkis-computation-governance-common/src/test/scala/org/apache/linkis/governance/common/conf/GovernanceCommonConfTest.scala
+++ b/linkis-computation-governance/linkis-computation-governance-common/src/test/scala/org/apache/linkis/governance/common/conf/GovernanceCommonConfTest.scala
@@ -42,8 +42,8 @@ class GovernanceCommonConfTest {
val errorcodedesclen = GovernanceCommonConf.ERROR_CODE_DESC_LEN
Assertions.assertEquals("wds.linkis.rm", conffilterrm)
- Assertions.assertEquals("2.4.3", sparkengineversion)
- Assertions.assertEquals("1.2.1", hiveengineversion)
+ Assertions.assertEquals("3.2.1", sparkengineversion)
+ Assertions.assertEquals("3.1.3", hiveengineversion)
Assertions.assertEquals("python2", pythonengineversion)
Assertions.assertFalse(pythoncodeparserswitch)
Assertions.assertFalse(scalacodeparserswitch)
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/java/org/apache/linkis/manager/engineplugin/errorcode/EngineconnCoreErrorCodeSummary.java b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/java/org/apache/linkis/manager/engineplugin/errorcode/EngineconnCoreErrorCodeSummary.java
index f7fd0165b6..58e3b5fad6 100644
--- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/java/org/apache/linkis/manager/engineplugin/errorcode/EngineconnCoreErrorCodeSummary.java
+++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/java/org/apache/linkis/manager/engineplugin/errorcode/EngineconnCoreErrorCodeSummary.java
@@ -27,12 +27,14 @@ public enum EngineconnCoreErrorCodeSummary implements LinkisErrorCode {
CANNOT_DEFAULT_EF(20000, "Cannot find default ExecutorFactory(找不到默认的 ExecutorFactory)"),
ETL_NOT_EXISTS(20000, "EngineTypeLabel does not exist(EngineTypeLabel 不存在)"),
UCL_NOT_EXISTS(20000, "UserCreatorLabel does not exist(UserCreatorLabel 不存在)"),
- CANNOT_HOME_PATH_EC(20001, "Cannot find the home path of engineConn(找不到 engineConn 的 home 路径)"),
+ CANNOT_HOME_PATH_EC(
+ 20001, "Cannot find the home path of engineConn at: {0}(找不到 engineConn 的 home 路径,该路径为:{0})"),
CANNOT_HOME_PATH_DIST(
- 20001, "Cannot find the home path of engineconn dist(找不到 engineconn dist 的 home 路径)"),
+ 20001,
+ "Could not find the home path for engineconn dist at: {0}(找不到 engineconn dist 的 home 路径,该路径为:{0})"),
DIST_IS_EMPTY(
20001,
- "The dist of EngineConn is empty,engineConnType is:{0}(EngineConn 的 dist 为空,engineConnType为:{})"),
+ "The dist of EngineConn is empty,engineConnType is:{0}(EngineConn 的 dist 为空,engineConnType为:{0})"),
ENGINE_VERSION_NOT_FOUND(
20001,
"Cannot find the path of engineConn with specified version: {0} and engineConnType: {1}(找不到版本为:{0} engineConnType 为:{1}的engineConn路径"),
diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/EntranceWebSocketService.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/EntranceWebSocketService.scala
index 714b9f0cc2..98b0dfc890 100644
--- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/EntranceWebSocketService.scala
+++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/EntranceWebSocketService.scala
@@ -380,8 +380,8 @@ class EntranceWebSocketService
flag ++= log ++= "\n"
all ++= log ++= "\n"
} else {
- flag ++= log ++= "\n"
- all ++= log ++= "\n"
+ flag ++= log ++= "\n\n"
+ all ++= log ++= "\n\n"
}
}
diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/parser/CommonEntranceParser.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/parser/CommonEntranceParser.scala
index 5108a7bf4c..f6d20d6d5c 100644
--- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/parser/CommonEntranceParser.scala
+++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/parser/CommonEntranceParser.scala
@@ -28,6 +28,7 @@ import org.apache.linkis.manager.label.builder.factory.{
LabelBuilderFactory,
LabelBuilderFactoryContext
}
+import org.apache.linkis.manager.label.conf.LabelCommonConfig
import org.apache.linkis.manager.label.constant.LabelKeyConstant
import org.apache.linkis.manager.label.entity.Label
import org.apache.linkis.manager.label.entity.engine.{CodeLanguageLabel, UserCreatorLabel}
@@ -134,7 +135,8 @@ class CommonEntranceParser(val persistenceManager: PersistenceManager)
private def checkEngineTypeLabel(labels: util.Map[String, Label[_]]): Unit = {
val engineTypeLabel = labels.getOrDefault(LabelKeyConstant.ENGINE_TYPE_KEY, null)
if (null == engineTypeLabel) {
- val msg = s"You need to specify engineTypeLabel in labels, such as spark-2.4.3"
+ val msg = s"You need to specify engineTypeLabel in labels," +
+ s"such as spark-${LabelCommonConfig.SPARK_ENGINE_VERSION.getValue}"
throw new EntranceIllegalParamException(
EntranceErrorCode.LABEL_PARAMS_INVALID.getErrCode,
EntranceErrorCode.LABEL_PARAMS_INVALID.getDesc + msg
diff --git a/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLTypeParser.scala b/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLTypeParser.scala
index ba4cd3878f..7aa0d1fbf9 100644
--- a/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLTypeParser.scala
+++ b/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLTypeParser.scala
@@ -66,7 +66,6 @@ object UJESSQLTypeParser {
def parserFromMetaData(dataType: Int): String = {
dataType match {
- case Types.CHAR => "string"
case Types.SMALLINT => "short"
case Types.INTEGER => "int"
case Types.BIGINT => "long"
diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/loader/loaders/DefaultEngineConnPluginLoader.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/loader/loaders/DefaultEngineConnPluginLoader.java
index 2649adfd6f..507da0767c 100644
--- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/loader/loaders/DefaultEngineConnPluginLoader.java
+++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/loader/loaders/DefaultEngineConnPluginLoader.java
@@ -18,6 +18,7 @@
package org.apache.linkis.engineplugin.loader.loaders;
import org.apache.linkis.common.exception.ErrorException;
+import org.apache.linkis.common.utils.CloseIoUtils;
import org.apache.linkis.engineplugin.loader.EngineConnPluginLoaderConf;
import org.apache.linkis.engineplugin.loader.classloader.EngineConnPluginClassLoader;
import org.apache.linkis.engineplugin.loader.loaders.resource.LocalEngineConnPluginResourceLoader;
@@ -258,12 +259,15 @@ private Class extends EngineConnPlugin> loadEngineConnPluginClass(
private Map readFromProperties(String propertiesFile) {
Map map = new HashMap<>();
Properties properties = new Properties();
+ BufferedReader reader = null;
try {
- BufferedReader reader = new BufferedReader(new FileReader(propertiesFile));
+ reader = new BufferedReader(new FileReader(propertiesFile));
properties.load(reader);
map = new HashMap((Map) properties);
} catch (IOException e) {
// Just warn
+ } finally {
+ CloseIoUtils.closeAll(reader);
}
return map;
}
diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/loader/utils/EngineConnPluginUtils.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/loader/utils/EngineConnPluginUtils.java
index 2bfcd00aca..e3deb1c43c 100644
--- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/loader/utils/EngineConnPluginUtils.java
+++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/loader/utils/EngineConnPluginUtils.java
@@ -17,6 +17,7 @@
package org.apache.linkis.engineplugin.loader.utils;
+import org.apache.linkis.common.utils.CloseIoUtils;
import org.apache.linkis.manager.engineplugin.common.EngineConnPlugin;
import org.apache.commons.io.IOUtils;
@@ -132,8 +133,9 @@ private static String getEngineConnPluginClassFromURL(
}
return acceptedFunction.apply(className) ? className : null;
} else if (url.endsWith(JAR_SUF_NAME)) {
+ JarFile jarFile = null;
try {
- JarFile jarFile = new JarFile(new File(url));
+ jarFile = new JarFile(new File(url));
Enumeration en = jarFile.entries();
while (en.hasMoreElements()) {
String name = en.nextElement().getName();
@@ -151,6 +153,8 @@ private static String getEngineConnPluginClassFromURL(
// Trace
LOG.trace("Fail to parse jar file:[" + url + "] in plugin classpath");
return null;
+ } finally {
+ CloseIoUtils.closeAll(jarFile);
}
}
return null;
diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/service/impl/EnginePluginAdminServiceImpl.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/service/impl/EnginePluginAdminServiceImpl.java
index a5656d4396..056e00a0e0 100644
--- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/service/impl/EnginePluginAdminServiceImpl.java
+++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/service/impl/EnginePluginAdminServiceImpl.java
@@ -21,6 +21,7 @@
import org.apache.linkis.bml.client.BmlClientFactory;
import org.apache.linkis.bml.protocol.BmlResourceVersionsResponse;
import org.apache.linkis.bml.protocol.Version;
+import org.apache.linkis.common.utils.CloseIoUtils;
import org.apache.linkis.common.utils.ZipUtils;
import org.apache.linkis.engineplugin.server.dao.EngineConnBmlResourceDao;
import org.apache.linkis.engineplugin.server.entity.EngineConnBmlResource;
@@ -33,10 +34,7 @@
import org.springframework.stereotype.Service;
import org.springframework.web.multipart.MultipartFile;
-import java.io.File;
-import java.io.FileOutputStream;
-import java.io.InputStream;
-import java.io.OutputStream;
+import java.io.*;
import java.util.List;
import com.github.pagehelper.PageHelper;
@@ -115,28 +113,32 @@ public PageInfo queryDataSourceInfoPage(
@Override
public void uploadToECHome(MultipartFile mfile) {
String engineConnsHome = defaultEngineConnBmlResourceGenerator.getEngineConnsHome();
+ InputStream in = null;
+ OutputStream out = null;
try {
- InputStream in = mfile.getInputStream();
+ mfile.getInputStream();
byte[] buffer = new byte[1024];
int len = 0;
File file = new File(engineConnsHome);
if (!file.exists()) {
log.info("engineplugin's home doesn’t exist");
}
- OutputStream out = new FileOutputStream(engineConnsHome + "/" + mfile.getOriginalFilename());
+ out = new FileOutputStream(engineConnsHome + "/" + mfile.getOriginalFilename());
while ((len = in.read(buffer)) != -1) {
out.write(buffer, 0, len);
}
- out.close();
- in.close();
} catch (Exception e) {
log.info("file {} upload fail", mfile.getOriginalFilename());
+ } finally {
+ CloseIoUtils.closeAll(out, in);
}
ZipUtils.fileToUnzip(engineConnsHome + "/" + mfile.getOriginalFilename(), engineConnsHome);
File file = new File(engineConnsHome + "/" + mfile.getOriginalFilename());
if (file.exists()) {
- file.delete();
+ if (!file.delete()) {
+ log.error("File deletion failed(文件删除失败)");
+ }
log.info("file {} delete success", mfile.getOriginalFilename());
}
}
@@ -147,9 +149,13 @@ public static void deleteDir(File directory) {
if (file.isDirectory()) {
deleteDir(file);
} else {
- file.delete();
+ if (!file.delete()) {
+ log.error("File deletion failed(文件删除失败)");
+ }
}
}
- directory.delete();
+ if (!directory.delete()) {
+ log.error("directory deletion failed(目录删除失败)");
+ }
}
}
diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/rm/utils/RequestKerberosUrlUtils.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/rm/utils/RequestKerberosUrlUtils.java
index 12b729a054..86422a55f8 100644
--- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/rm/utils/RequestKerberosUrlUtils.java
+++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/rm/utils/RequestKerberosUrlUtils.java
@@ -43,6 +43,7 @@
import java.security.PrivilegedAction;
import java.util.HashMap;
import java.util.HashSet;
+import java.util.Map;
import java.util.Set;
import org.slf4j.Logger;
@@ -103,6 +104,16 @@ public HttpResponse callRestUrl(final String url, final String userId) {
logger.warn(
String.format(
"Calling KerberosHttpClient %s %s %s", this.principal, this.keyTabLocation, url));
+ Map initMap = new HashMap<>();
+ initMap.put("useTicketCache", "false");
+ initMap.put("useKeyTab", "true");
+ initMap.put("keyTab", keyTabLocation);
+ initMap.put("refreshKrb5Config", "true");
+ initMap.put("principal", principal);
+ initMap.put("storeKey", "true");
+ initMap.put("doNotPrompt", "true");
+ initMap.put("isInitiator", "true");
+ initMap.put("debug", "false");
Configuration config =
new Configuration() {
@SuppressWarnings("serial")
@@ -112,19 +123,7 @@ public AppConfigurationEntry[] getAppConfigurationEntry(String name) {
new AppConfigurationEntry(
"com.sun.security.auth.module.Krb5LoginModule",
AppConfigurationEntry.LoginModuleControlFlag.REQUIRED,
- new HashMap() {
- {
- put("useTicketCache", "false");
- put("useKeyTab", "true");
- put("keyTab", keyTabLocation);
- put("refreshKrb5Config", "true");
- put("principal", principal);
- put("storeKey", "true");
- put("doNotPrompt", "true");
- put("isInitiator", "true");
- put("debug", "false");
- }
- })
+ initMap)
};
}
};
diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/engineplugin/server/localize/AbstractEngineConnBmlResourceGenerator.scala b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/engineplugin/server/localize/AbstractEngineConnBmlResourceGenerator.scala
index d62f8996f0..476a969788 100644
--- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/engineplugin/server/localize/AbstractEngineConnBmlResourceGenerator.scala
+++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/engineplugin/server/localize/AbstractEngineConnBmlResourceGenerator.scala
@@ -34,7 +34,7 @@ abstract class AbstractEngineConnBmlResourceGenerator extends EngineConnBmlResou
if (!new File(getEngineConnsHome).exists) {
throw new EngineConnPluginErrorException(
CANNOT_HOME_PATH_EC.getErrorCode,
- CANNOT_HOME_PATH_EC.getErrorDesc
+ MessageFormat.format(CANNOT_HOME_PATH_EC.getErrorDesc, getEngineConnsHome)
)
}
@@ -70,7 +70,7 @@ abstract class AbstractEngineConnBmlResourceGenerator extends EngineConnBmlResou
if (!engineConnPackageHome.exists()) {
throw new EngineConnPluginErrorException(
CANNOT_HOME_PATH_DIST.getErrorCode,
- CANNOT_HOME_PATH_DIST.getErrorDesc
+ MessageFormat.format(CANNOT_HOME_PATH_DIST.getErrorDesc, engineConnPackageHome.getPath)
)
}
}
diff --git a/linkis-computation-governance/linkis-manager/linkis-label-common/src/main/java/org/apache/linkis/manager/label/conf/LabelCommonConfig.java b/linkis-computation-governance/linkis-manager/linkis-label-common/src/main/java/org/apache/linkis/manager/label/conf/LabelCommonConfig.java
index 04805860ce..d0854186a5 100644
--- a/linkis-computation-governance/linkis-manager/linkis-label-common/src/main/java/org/apache/linkis/manager/label/conf/LabelCommonConfig.java
+++ b/linkis-computation-governance/linkis-manager/linkis-label-common/src/main/java/org/apache/linkis/manager/label/conf/LabelCommonConfig.java
@@ -34,10 +34,10 @@ public class LabelCommonConfig {
CommonVars.apply("wds.linkis.label.entity.packages", "");
public static final CommonVars SPARK_ENGINE_VERSION =
- CommonVars.apply("wds.linkis.spark.engine.version", "2.4.3");
+ CommonVars.apply("wds.linkis.spark.engine.version", "3.2.1");
public static final CommonVars HIVE_ENGINE_VERSION =
- CommonVars.apply("wds.linkis.hive.engine.version", "2.3.3");
+ CommonVars.apply("wds.linkis.hive.engine.version", "3.1.3");
public static final CommonVars PYTHON_ENGINE_VERSION =
CommonVars.apply("wds.linkis.python.engine.version", "python2");
diff --git a/linkis-computation-governance/linkis-manager/linkis-label-common/src/main/java/org/apache/linkis/manager/label/entity/CombinedLabelImpl.java b/linkis-computation-governance/linkis-manager/linkis-label-common/src/main/java/org/apache/linkis/manager/label/entity/CombinedLabelImpl.java
index 50ff5dd2a3..e514f14929 100644
--- a/linkis-computation-governance/linkis-manager/linkis-label-common/src/main/java/org/apache/linkis/manager/label/entity/CombinedLabelImpl.java
+++ b/linkis-computation-governance/linkis-manager/linkis-label-common/src/main/java/org/apache/linkis/manager/label/entity/CombinedLabelImpl.java
@@ -25,6 +25,7 @@
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
+import java.util.Objects;
import java.util.stream.Collectors;
public class CombinedLabelImpl implements CombinedLabel {
@@ -79,6 +80,11 @@ public Boolean isEmpty() {
return CollectionUtils.isEmpty(getValue());
}
+ @Override
+ public int hashCode() {
+ return Objects.hash(value, feature);
+ }
+
@Override
public boolean equals(Object obj) {
if (obj != null && obj instanceof CombinedLabel) {
diff --git a/linkis-computation-governance/linkis-manager/linkis-label-common/src/main/java/org/apache/linkis/manager/label/entity/SerializableLabel.java b/linkis-computation-governance/linkis-manager/linkis-label-common/src/main/java/org/apache/linkis/manager/label/entity/SerializableLabel.java
index d4f4eecb10..52b8a3ecc7 100644
--- a/linkis-computation-governance/linkis-manager/linkis-label-common/src/main/java/org/apache/linkis/manager/label/entity/SerializableLabel.java
+++ b/linkis-computation-governance/linkis-manager/linkis-label-common/src/main/java/org/apache/linkis/manager/label/entity/SerializableLabel.java
@@ -135,6 +135,11 @@ public final String toString() {
+ "]";
}
+ @Override
+ public int hashCode() {
+ return Objects.hash(value, stringValue);
+ }
+
@Override
public boolean equals(Object other) {
if (other instanceof SerializableLabel) {
diff --git a/linkis-computation-governance/linkis-manager/linkis-label-common/src/main/java/org/apache/linkis/manager/label/entity/engine/EngineTypeLabel.java b/linkis-computation-governance/linkis-manager/linkis-label-common/src/main/java/org/apache/linkis/manager/label/entity/engine/EngineTypeLabel.java
index 09492b146a..912f6c9f94 100644
--- a/linkis-computation-governance/linkis-manager/linkis-label-common/src/main/java/org/apache/linkis/manager/label/entity/engine/EngineTypeLabel.java
+++ b/linkis-computation-governance/linkis-manager/linkis-label-common/src/main/java/org/apache/linkis/manager/label/entity/engine/EngineTypeLabel.java
@@ -74,4 +74,19 @@ public void setVersion(String version) {
public Boolean isEmpty() {
return StringUtils.isBlank(getEngineType()) || StringUtils.isBlank(getVersion());
}
+
+ @Override
+ protected void setStringValue(String stringValue) {
+ String version;
+ String engineType = stringValue.split("-")[0];
+
+ if (engineType.equals("*")) {
+ version = stringValue.replaceFirst("[" + engineType + "]-", "");
+ } else {
+ version = stringValue.replaceFirst(engineType + "-", "");
+ }
+
+ setEngineType(engineType);
+ setVersion(version);
+ }
}
diff --git a/linkis-computation-governance/linkis-manager/linkis-label-common/src/main/java/org/apache/linkis/manager/label/utils/LabelUtils.java b/linkis-computation-governance/linkis-manager/linkis-label-common/src/main/java/org/apache/linkis/manager/label/utils/LabelUtils.java
index 52c41e5f69..b417424ab5 100644
--- a/linkis-computation-governance/linkis-manager/linkis-label-common/src/main/java/org/apache/linkis/manager/label/utils/LabelUtils.java
+++ b/linkis-computation-governance/linkis-manager/linkis-label-common/src/main/java/org/apache/linkis/manager/label/utils/LabelUtils.java
@@ -28,13 +28,7 @@
import org.apache.commons.lang3.StringUtils;
import java.lang.reflect.Method;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
+import java.util.*;
import java.util.stream.Collectors;
import com.fasterxml.jackson.core.JsonParser;
@@ -126,6 +120,11 @@ public int compareTo(MethodWrapper o) {
return this.order - o.order;
}
+ @Override
+ public int hashCode() {
+ return Objects.hash(methodName, order);
+ }
+
@Override
public boolean equals(Object obj) {
if (obj instanceof MethodWrapper) {
diff --git a/linkis-computation-governance/linkis-manager/linkis-label-common/src/test/java/org/apache/linkis/manager/label/TestLabelBuilder.java b/linkis-computation-governance/linkis-manager/linkis-label-common/src/test/java/org/apache/linkis/manager/label/TestLabelBuilder.java
index 8b6e49570c..cffa7891d7 100644
--- a/linkis-computation-governance/linkis-manager/linkis-label-common/src/test/java/org/apache/linkis/manager/label/TestLabelBuilder.java
+++ b/linkis-computation-governance/linkis-manager/linkis-label-common/src/test/java/org/apache/linkis/manager/label/TestLabelBuilder.java
@@ -19,6 +19,7 @@
import org.apache.linkis.manager.label.builder.factory.LabelBuilderFactory;
import org.apache.linkis.manager.label.builder.factory.LabelBuilderFactoryContext;
+import org.apache.linkis.manager.label.conf.LabelCommonConfig;
import org.apache.linkis.manager.label.entity.Label;
import org.apache.linkis.manager.label.entity.node.AliasServiceInstanceLabel;
import org.apache.linkis.manager.label.exception.LabelErrorException;
@@ -27,7 +28,9 @@ public class TestLabelBuilder {
public static void main(String[] args) throws LabelErrorException {
LabelBuilderFactory labelBuilderFactory = LabelBuilderFactoryContext.getLabelBuilderFactory();
- Label> engineType = labelBuilderFactory.createLabel("engineType", "hive-1.2.1");
+ Label> engineType =
+ labelBuilderFactory.createLabel(
+ "engineType", "hive-" + LabelCommonConfig.HIVE_ENGINE_VERSION.getValue());
System.out.println(engineType.getFeature());
AliasServiceInstanceLabel emInstanceLabel =
diff --git a/linkis-computation-governance/linkis-manager/linkis-label-common/src/test/java/org/apache/linkis/manager/label/entity/engine/EngineTypeLabelTest.java b/linkis-computation-governance/linkis-manager/linkis-label-common/src/test/java/org/apache/linkis/manager/label/entity/engine/EngineTypeLabelTest.java
new file mode 100644
index 0000000000..cc4171b437
--- /dev/null
+++ b/linkis-computation-governance/linkis-manager/linkis-label-common/src/test/java/org/apache/linkis/manager/label/entity/engine/EngineTypeLabelTest.java
@@ -0,0 +1,47 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.linkis.manager.label.entity.engine;
+
+import org.apache.linkis.manager.label.builder.factory.LabelBuilderFactory;
+import org.apache.linkis.manager.label.builder.factory.LabelBuilderFactoryContext;
+
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
+
+/** EngineTypeLabel Tester */
+public class EngineTypeLabelTest {
+
+ @Test
+ public void testSetStringValue() {
+ String engineType = "hive";
+ String version = "1.1.0-cdh5.12.0";
+
+ String engineType1 = "*";
+ String version1 = "*";
+
+ LabelBuilderFactory labelBuilderFactory = LabelBuilderFactoryContext.getLabelBuilderFactory();
+ EngineTypeLabel engineTypeLabel = labelBuilderFactory.createLabel(EngineTypeLabel.class);
+ engineTypeLabel.setStringValue(engineType + "-" + version);
+ Assertions.assertEquals(engineTypeLabel.getEngineType(), engineType);
+ Assertions.assertEquals(engineTypeLabel.getVersion(), version);
+
+ engineTypeLabel.setStringValue(engineType1 + "-" + version1);
+ Assertions.assertEquals(engineTypeLabel.getEngineType(), engineType1);
+ Assertions.assertEquals(engineTypeLabel.getVersion(), version1);
+ }
+}
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/scala/org/apache/linkis/manager/common/conf/ManagerCommonConf.scala b/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/scala/org/apache/linkis/manager/common/conf/ManagerCommonConf.scala
index c37d6700f3..81f7294ba0 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/scala/org/apache/linkis/manager/common/conf/ManagerCommonConf.scala
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/scala/org/apache/linkis/manager/common/conf/ManagerCommonConf.scala
@@ -18,12 +18,16 @@
package org.apache.linkis.manager.common.conf
import org.apache.linkis.common.conf.CommonVars
+import org.apache.linkis.manager.label.conf.LabelCommonConfig
object ManagerCommonConf {
val DEFAULT_ENGINE_TYPE = CommonVars("wds.linkis.default.engine.type", "spark")
- val DEFAULT_ENGINE_VERSION = CommonVars("wds.linkis.default.engine.version", "2.4.3")
+ val DEFAULT_ENGINE_VERSION = CommonVars(
+ "wds.linkis.default.engine.version",
+ LabelCommonConfig.SPARK_ENGINE_VERSION.defaultValue
+ )
val DEFAULT_ADMIN = CommonVars("wds.linkis.manager.admin", "hadoop")
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/resources/mapper/common/ECResourceRecordMapper.xml b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/resources/mapper/common/ECResourceRecordMapper.xml
index 543d20234a..ad2c710f0c 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/resources/mapper/common/ECResourceRecordMapper.xml
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/resources/mapper/common/ECResourceRecordMapper.xml
@@ -71,7 +71,7 @@
service_instance = #{instance}
and create_user = #{username}
-
+
and label_value like concat('%,',#{engineType},'%')
and create_time BETWEEN #{startDate} AND #{endDate}
@@ -93,7 +93,7 @@
-
+
and SUBSTRING_INDEX(SUBSTRING_INDEX(ecr.label_value,',',-1),"-",1) in
#{i}
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/test/java/org/apache/linkis/manager/dao/ResourceManagerMapperTest.java b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/test/java/org/apache/linkis/manager/dao/ResourceManagerMapperTest.java
index c7e719afe2..f2fd85e25e 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/test/java/org/apache/linkis/manager/dao/ResourceManagerMapperTest.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/test/java/org/apache/linkis/manager/dao/ResourceManagerMapperTest.java
@@ -78,7 +78,8 @@ void nodeResourceUpdateByResourceId() {
persistenceResource.setLeftResource("left");
persistenceResource.setUsedResource("user");
resourceManagerMapper.nodeResourceUpdateByResourceId(1, persistenceResource);
- assertTrue(persistenceResource.getMaxResource() == persistenceResource.getMaxResource());
+ PersistenceResource persistenceResources = resourceManagerMapper.getResourceById(1);
+ assertTrue(persistenceResources.getMaxResource() == persistenceResource.getMaxResource());
}
@Test
diff --git a/linkis-dist/bin/checkEnv.sh b/linkis-dist/bin/checkEnv.sh
index 0af9b12d67..82b434e520 100644
--- a/linkis-dist/bin/checkEnv.sh
+++ b/linkis-dist/bin/checkEnv.sh
@@ -37,7 +37,7 @@ function checkPythonAndJava(){
function checkHdfs(){
hadoopVersion="`hdfs version`"
- defaultHadoopVersion="2.7"
+ defaultHadoopVersion="3.3"
checkversion "$hadoopVersion" $defaultHadoopVersion hadoop
}
diff --git a/linkis-dist/bin/install.sh b/linkis-dist/bin/install.sh
index 038d278466..87e01885cb 100644
--- a/linkis-dist/bin/install.sh
+++ b/linkis-dist/bin/install.sh
@@ -219,13 +219,13 @@ SERVER_IP=$local_host
##Label set start
if [ "$SPARK_VERSION" != "" ]
then
- sed -i ${txt} "s#spark-2.4.3#spark-$SPARK_VERSION#g" $LINKIS_HOME/db/linkis_dml.sql
+ sed -i ${txt} "s#spark-3.2.1#spark-$SPARK_VERSION#g" $LINKIS_HOME/db/linkis_dml.sql
sed -i ${txt} "s#\#wds.linkis.spark.engine.version.*#wds.linkis.spark.engine.version=$SPARK_VERSION#g" $common_conf
fi
if [ "$HIVE_VERSION" != "" ]
then
- sed -i ${txt} "s#hive-2.3.3#hive-$HIVE_VERSION#g" $LINKIS_HOME/db/linkis_dml.sql
+ sed -i ${txt} "s#hive-3.1.3#hive-$HIVE_VERSION#g" $LINKIS_HOME/db/linkis_dml.sql
sed -i ${txt} "s#\#wds.linkis.hive.engine.version.*#wds.linkis.hive.engine.version=$HIVE_VERSION#g" $common_conf
fi
diff --git a/linkis-dist/deploy-config/linkis-env.sh b/linkis-dist/deploy-config/linkis-env.sh
index 9197f7be97..f4d497a4ac 100644
--- a/linkis-dist/deploy-config/linkis-env.sh
+++ b/linkis-dist/deploy-config/linkis-env.sh
@@ -78,7 +78,7 @@ HADOOP_CONF_DIR=${HADOOP_CONF_DIR:-"/appcom/config/hadoop-config"}
HADOOP_KERBEROS_ENABLE=${HADOOP_KERBEROS_ENABLE:-"false"}
HADOOP_KEYTAB_PATH=${HADOOP_KEYTAB_PATH:-"/appcom/keytab/"}
## Hadoop env version
-HADOOP_VERSION=${HADOOP_VERSION:-"2.7.2"}
+HADOOP_VERSION=${HADOOP_VERSION:-"3.3.4"}
#Hive
HIVE_HOME=/appcom/Install/hive
@@ -91,10 +91,10 @@ SPARK_CONF_DIR=/appcom/config/spark-config
## Engine version conf
#SPARK_VERSION
-#SPARK_VERSION=2.4.3
+#SPARK_VERSION=3.2.1
##HIVE_VERSION
-#HIVE_VERSION=2.3.3
+#HIVE_VERSION=3.1.3
#PYTHON_VERSION=python2
diff --git a/linkis-dist/docker/ldh.Dockerfile b/linkis-dist/docker/ldh.Dockerfile
index dcd01bdf99..05e7d77e2b 100644
--- a/linkis-dist/docker/ldh.Dockerfile
+++ b/linkis-dist/docker/ldh.Dockerfile
@@ -27,10 +27,10 @@ ARG JDK_VERSION=1.8.0-openjdk
ARG JDK_BUILD_REVISION=1.8.0.332.b09-1.el7_9
ARG MYSQL_JDBC_VERSION=8.0.28
-ARG HADOOP_VERSION=2.7.2
-ARG HIVE_VERSION=2.3.3
-ARG SPARK_VERSION=2.4.3
-ARG SPARK_HADOOP_VERSION=2.7
+ARG HADOOP_VERSION=3.3.4
+ARG HIVE_VERSION=3.1.3
+ARG SPARK_VERSION=3.2.1
+ARG SPARK_HADOOP_VERSION=3.2
ARG FLINK_VERSION=1.12.2
ARG ZOOKEEPER_VERSION=3.5.9
diff --git a/linkis-dist/docker/scripts/prepare-ldh-image.sh b/linkis-dist/docker/scripts/prepare-ldh-image.sh
index 791c7c731b..d37719c1a7 100755
--- a/linkis-dist/docker/scripts/prepare-ldh-image.sh
+++ b/linkis-dist/docker/scripts/prepare-ldh-image.sh
@@ -27,10 +27,10 @@ rm -rf ${LDH_TAR_DIR} && mkdir -p ${LDH_TAR_DIR}
rm -rf ${PROJECT_TARGET}/entry-point-ldh.sh
cp ${WORK_DIR}/entry-point-ldh.sh ${PROJECT_TARGET}/
-HADOOP_VERSION=${HADOOP_VERSION:-2.7.2}
-HIVE_VERSION=${HIVE_VERSION:-2.3.3}
-SPARK_VERSION=${SPARK_VERSION:-2.4.3}
-SPARK_HADOOP_VERSION=${SPARK_HADOOP_VERSION:-2.7}
+HADOOP_VERSION=${HADOOP_VERSION:-3.3.4}
+HIVE_VERSION=${HIVE_VERSION:-3.1.3}
+SPARK_VERSION=${SPARK_VERSION:-3.2.1}
+SPARK_HADOOP_VERSION=${SPARK_HADOOP_VERSION:-3.2}
FLINK_VERSION=${FLINK_VERSION:-1.12.2}
ZOOKEEPER_VERSION=${ZOOKEEPER_VERSION:-3.5.9}
MYSQL_JDBC_VERSION=${MYSQL_JDBC_VERSION:-8.0.28}
diff --git a/linkis-dist/helm/README.md b/linkis-dist/helm/README.md
index 274de3dc2a..b1cce7ce75 100644
--- a/linkis-dist/helm/README.md
+++ b/linkis-dist/helm/README.md
@@ -201,9 +201,9 @@ $> kind delete cluster --name test-helm
We introduced a new image, called LDH (Linkis's hadoop all-in-one image), which provides a pseudo-distributed hadoop cluster for testing quickly. This image contains the following hadoop components, the default mode for engines in LDH is on-yarn.
-* Hadoop 2.7.2 , including HDFS and YARN
-* Hive 2.3.3
-* Spark 2.4.3
+* Hadoop 3.3.4 , including HDFS and YARN
+* Hive 3.1.3
+* Spark 3.2.1
* Flink 1.12.2
* ZooKeeper 3.5.9
@@ -245,10 +245,10 @@ drwxrwxrwx - root supergroup 0 2022-07-31 02:48 /user
[root@ldh-96bdc757c-dnkbs /]# beeline -u jdbc:hive2://ldh.ldh.svc.cluster.local:10000/ -n hadoop
Connecting to jdbc:hive2://ldh.ldh.svc.cluster.local:10000/
-Connected to: Apache Hive (version 2.3.3)
-Driver: Hive JDBC (version 2.3.3)
+Connected to: Apache Hive (version 3.1.3)
+Driver: Hive JDBC (version 3.1.3)
Transaction isolation: TRANSACTION_REPEATABLE_READ
-Beeline version 2.3.3 by Apache Hive
+Beeline version 3.1.3 by Apache Hive
0: jdbc:hive2://ldh.ldh.svc.cluster.local:100> create database demo;
No rows affected (1.306 seconds)
0: jdbc:hive2://ldh.ldh.svc.cluster.local:100> use demo;
@@ -271,7 +271,7 @@ No rows affected (5.491 seconds)
22/07/31 02:53:18 INFO hive.metastore: Trying to connect to metastore with URI thrift://ldh.ldh.svc.cluster.local:9083
22/07/31 02:53:18 INFO hive.metastore: Connected to metastore.
...
-22/07/31 02:53:19 INFO spark.SparkContext: Running Spark version 2.4.3
+22/07/31 02:53:19 INFO spark.SparkContext: Running Spark version 3.2.1
22/07/31 02:53:19 INFO spark.SparkContext: Submitted application: SparkSQL::10.244.0.6
...
22/07/31 02:53:27 INFO yarn.Client: Submitting application application_1659235712576_0001 to ResourceManager
diff --git a/linkis-dist/helm/README_CN.md b/linkis-dist/helm/README_CN.md
index e756dc73fc..832530147a 100644
--- a/linkis-dist/helm/README_CN.md
+++ b/linkis-dist/helm/README_CN.md
@@ -190,9 +190,9 @@ $> kind delete cluster --name test-helm
## 使用 LDH 进行测试
我们引入了一个新的镜像,叫做LDH(Linkis 的 hadoop 一体式镜像),它提供了一个伪分布式的 hadoop 集群,方便快速测试 On Hadoop 的部署模式。
这个镜像包含以下多个 hadoop 组件,LDH 中引擎的默认模式是 on-yarn 的。
-* Hadoop 2.7.2 , 包括 HDFS and YARN
-* Hive 2.3.3
-* Spark 2.4.3
+* Hadoop 3.3.4 , 包括 HDFS and YARN
+* Hive 3.1.3
+* Spark 3.2.1
* Flink 1.12.2
* ZooKeeper 3.5.9
@@ -236,10 +236,10 @@ drwxrwxrwx - root supergroup 0 2022-07-31 02:48 /user
[root@ldh-96bdc757c-dnkbs /]# beeline -u jdbc:hive2://ldh.ldh.svc.cluster.local:10000/ -n hadoop
Connecting to jdbc:hive2://ldh.ldh.svc.cluster.local:10000/
-Connected to: Apache Hive (version 2.3.3)
-Driver: Hive JDBC (version 2.3.3)
+Connected to: Apache Hive (version 3.1.3)
+Driver: Hive JDBC (version 3.1.3)
Transaction isolation: TRANSACTION_REPEATABLE_READ
-Beeline version 2.3.3 by Apache Hive
+Beeline version 3.1.3 by Apache Hive
0: jdbc:hive2://ldh.ldh.svc.cluster.local:100> create database demo;
No rows affected (1.306 seconds)
0: jdbc:hive2://ldh.ldh.svc.cluster.local:100> use demo;
@@ -262,7 +262,7 @@ No rows affected (5.491 seconds)
22/07/31 02:53:18 INFO hive.metastore: Trying to connect to metastore with URI thrift://ldh.ldh.svc.cluster.local:9083
22/07/31 02:53:18 INFO hive.metastore: Connected to metastore.
...
-22/07/31 02:53:19 INFO spark.SparkContext: Running Spark version 2.4.3
+22/07/31 02:53:19 INFO spark.SparkContext: Running Spark version 3.2.1
22/07/31 02:53:19 INFO spark.SparkContext: Submitted application: SparkSQL::10.244.0.6
...
22/07/31 02:53:27 INFO yarn.Client: Submitting application application_1659235712576_0001 to ResourceManager
diff --git a/linkis-dist/helm/charts/linkis/templates/configmap-init-sql.yaml b/linkis-dist/helm/charts/linkis/templates/configmap-init-sql.yaml
index 175f2cb7ad..30db9e61a0 100644
--- a/linkis-dist/helm/charts/linkis/templates/configmap-init-sql.yaml
+++ b/linkis-dist/helm/charts/linkis/templates/configmap-init-sql.yaml
@@ -1183,12 +1183,12 @@ data:
(select `relation`.`config_key_id` AS `config_key_id`, '' AS `config_value`, `relation`.`engine_type_label_id` AS `config_label_id` FROM linkis_ps_configuration_key_engine_relation relation
INNER JOIN linkis_cg_manager_label label ON relation.engine_type_label_id = label.id AND label.label_value = '*-*,*-*');
- -- spark2.4.3 default configuration
+ -- spark default configuration
insert into `linkis_ps_configuration_config_value` (`config_key_id`, `config_value`, `config_label_id`)
(select `relation`.`config_key_id` AS `config_key_id`, '' AS `config_value`, `relation`.`engine_type_label_id` AS `config_label_id` FROM linkis_ps_configuration_key_engine_relation relation
INNER JOIN linkis_cg_manager_label label ON relation.engine_type_label_id = label.id AND label.label_value = @SPARK_ALL);
- -- hive1.2.1 default configuration
+ -- hive default configuration
insert into `linkis_ps_configuration_config_value` (`config_key_id`, `config_value`, `config_label_id`)
(select `relation`.`config_key_id` AS `config_key_id`, '' AS `config_value`, `relation`.`engine_type_label_id` AS `config_label_id` FROM linkis_ps_configuration_key_engine_relation relation
INNER JOIN linkis_cg_manager_label label ON relation.engine_type_label_id = label.id AND label.label_value = @HIVE_ALL);
diff --git a/linkis-dist/helm/charts/linkis/values.yaml b/linkis-dist/helm/charts/linkis/values.yaml
index 638f75134e..89017dbb29 100644
--- a/linkis-dist/helm/charts/linkis/values.yaml
+++ b/linkis-dist/helm/charts/linkis/values.yaml
@@ -111,7 +111,7 @@ linkis:
python:
version: 2.7
hadoop:
- version: 2.7.2
+ version: 3.3.4
configMapName: hadoop-conf
yarn:
restfulUrl: http://ldh.ldh.svc.cluster.local:8088
@@ -123,10 +123,10 @@ linkis:
keytab: /etc/hadoop-conf/yarn.keytab
krb5: /etc/krb5.keytab
spark:
- version: 2.4.3
+ version: 3.2.1
configMapName: spark-conf
hive:
- version: 2.3.3
+ version: 3.1.3
configMapName: hive-conf
meta:
url: "jdbc:mysql://mysql.mysql.svc.cluster.local:3306/hive_metadata?&createDatabaseIfNotExist=true&characterEncoding=UTF-8&useSSL=false" # jdbc:mysql://localhost:3306/metastore?useUnicode=true
diff --git a/linkis-dist/helm/scripts/prepare-for-spark.sh b/linkis-dist/helm/scripts/prepare-for-spark.sh
index 2bbd1123a3..5b2b35a824 100644
--- a/linkis-dist/helm/scripts/prepare-for-spark.sh
+++ b/linkis-dist/helm/scripts/prepare-for-spark.sh
@@ -28,10 +28,10 @@ ECM_POD_NAME=`kubectl get pods -n linkis -l app.kubernetes.io/instance=linkis-de
kubectl cp ./ldh -n linkis ${ECM_POD_NAME}:/opt/ ;
-kubectl exec -it -n linkis ${ECM_POD_NAME} -- bash -c "chmod +x /opt/ldh/1.3.0/spark-2.4.3-bin-hadoop2.7/bin/*"
-kubectl exec -it -n linkis ${ECM_POD_NAME} -- bash -c "ln -s /opt/ldh/1.3.0/spark-2.4.3-bin-hadoop2.7 /opt/ldh/current/spark"
-kubectl exec -it -n linkis ${ECM_POD_NAME} -- bash -c "ln -s /opt/ldh/1.3.0/hadoop-2.7.2 /opt/ldh/current/hadoop"
-kubectl exec -it -n linkis ${ECM_POD_NAME} -- bash -c "ln -s /opt/ldh/1.3.0/apache-hive-2.3.3-bin /opt/ldh/current/hive"
+kubectl exec -it -n linkis ${ECM_POD_NAME} -- bash -c "chmod +x /opt/ldh/1.3.0/spark-3.2.1-bin-hadoop3.2/bin/*"
+kubectl exec -it -n linkis ${ECM_POD_NAME} -- bash -c "ln -s /opt/ldh/1.3.0/spark-3.2.1-bin-hadoop3.2 /opt/ldh/current/spark"
+kubectl exec -it -n linkis ${ECM_POD_NAME} -- bash -c "ln -s /opt/ldh/1.3.0/hadoop-3.3.4 /opt/ldh/current/hadoop"
+kubectl exec -it -n linkis ${ECM_POD_NAME} -- bash -c "ln -s /opt/ldh/1.3.0/apache-hive-3.1.3-bin /opt/ldh/current/hive"
kubectl exec -it -n linkis ${ECM_POD_NAME} -- bash -c "echo 'export SPARK_HOME=/opt/ldh/current/spark' |sudo tee --append /etc/profile"
diff --git a/linkis-dist/package/bin/linkis-cli-hive b/linkis-dist/package/bin/linkis-cli-hive
index 31ef0c54f0..7c8da89c4c 100644
--- a/linkis-dist/package/bin/linkis-cli-hive
+++ b/linkis-dist/package/bin/linkis-cli-hive
@@ -161,6 +161,6 @@ else
parse
fi
-exec ${WORK_DIR}/bin/linkis-cli-pre -engineType hive-2.3.3 -codeType hql "${PARSED_CMD[@]}"
+exec ${WORK_DIR}/bin/linkis-cli-pre -engineType hive-3.1.3 -codeType hql "${PARSED_CMD[@]}"
diff --git a/linkis-dist/package/bin/linkis-cli-spark-submit b/linkis-dist/package/bin/linkis-cli-spark-submit
index 2ae2304668..c3f62efc51 100644
--- a/linkis-dist/package/bin/linkis-cli-spark-submit
+++ b/linkis-dist/package/bin/linkis-cli-spark-submit
@@ -192,9 +192,9 @@ else
fi
if [ "$IS_PYSPARK"x == "true"x ]; then
- exec ${WORK_DIR}/bin/linkis-cli-pre -engineType spark-2.4.3 -codeType py "${PARSED_CMD[@]}"
+ exec ${WORK_DIR}/bin/linkis-cli-pre -engineType spark-3.2.1 -codeType py "${PARSED_CMD[@]}"
elif [ "IS_SCALA"x == "true"x ]; then
- exec ${WORK_DIR}/bin/linkis-cli-pre -engineType spark-2.4.3 -codeType scala "${PARSED_CMD[@]}"
+ exec ${WORK_DIR}/bin/linkis-cli-pre -engineType spark-3.2.1 -codeType scala "${PARSED_CMD[@]}"
else
- exec ${WORK_DIR}/bin/linkis-cli-pre -engineType spark-2.4.3 "${PARSED_CMD[@]}"
+ exec ${WORK_DIR}/bin/linkis-cli-pre -engineType spark-3.2.1 "${PARSED_CMD[@]}"
fi
\ No newline at end of file
diff --git a/linkis-dist/package/conf/linkis.properties b/linkis-dist/package/conf/linkis.properties
index 9116486b0d..66ed15cba3 100644
--- a/linkis-dist/package/conf/linkis.properties
+++ b/linkis-dist/package/conf/linkis.properties
@@ -87,4 +87,6 @@ linkis.session.redis.port=6379
# redis password
linkis.session.redis.password=test123
# redis sso switch
-linkis.session.redis.cache.enabled=false
\ No newline at end of file
+linkis.session.redis.cache.enabled=false
+wds.linkis.workspace.filesystem.owner.check=true
+wds.linkis.workspace.filesystem.path.check=true
\ No newline at end of file
diff --git a/linkis-dist/package/db/linkis_dml.sql b/linkis-dist/package/db/linkis_dml.sql
index dc9a1c1ecc..b193cc5112 100644
--- a/linkis-dist/package/db/linkis_dml.sql
+++ b/linkis-dist/package/db/linkis_dml.sql
@@ -18,8 +18,8 @@
-- 变量:
-SET @SPARK_LABEL="spark-2.4.3";
-SET @HIVE_LABEL="hive-2.3.3";
+SET @SPARK_LABEL="spark-3.2.1";
+SET @HIVE_LABEL="hive-3.1.3";
SET @PYTHON_LABEL="python-python2";
SET @PIPELINE_LABEL="pipeline-1";
SET @JDBC_LABEL="jdbc-4";
@@ -189,18 +189,18 @@ insert into `linkis_cg_manager_label` (`label_key`, `label_value`, `label_featur
insert into `linkis_cg_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_userCreator_engineType', @PRESTO_ALL, 'OPTIONAL', 2, now(), now());
insert into `linkis_cg_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_userCreator_engineType', @TRINO_ALL, 'OPTIONAL', 2, now(), now());
--- Custom correlation engine (e.g. spark-2.4.3) and configKey value
+-- Custom correlation engine (e.g. spark) and configKey value
-- Global Settings
insert into `linkis_ps_configuration_key_engine_relation` (`config_key_id`, `engine_type_label_id`)
(select config.id as `config_key_id`, label.id AS `engine_type_label_id` FROM linkis_ps_configuration_config_key config
INNER JOIN linkis_cg_manager_label label ON config.engine_conn_type is null and label.label_value = "*-*,*-*");
--- spark-2.4.3(Here choose to associate all spark type Key values with spark2.4.3)
+-- spark(Here choose to associate all spark type Key values with spark)
insert into `linkis_ps_configuration_key_engine_relation` (`config_key_id`, `engine_type_label_id`)
(select config.id as `config_key_id`, label.id AS `engine_type_label_id` FROM linkis_ps_configuration_config_key config
INNER JOIN linkis_cg_manager_label label ON config.engine_conn_type = 'spark' and label.label_value = @SPARK_ALL);
--- hive-1.2.1
+-- hive
insert into `linkis_ps_configuration_key_engine_relation` (`config_key_id`, `engine_type_label_id`)
(select config.id as `config_key_id`, label.id AS `engine_type_label_id` FROM linkis_ps_configuration_config_key config
INNER JOIN linkis_cg_manager_label label ON config.engine_conn_type = 'hive' and label_value = @HIVE_ALL);
@@ -318,12 +318,12 @@ insert into `linkis_ps_configuration_config_value` (`config_key_id`, `config_val
(select `relation`.`config_key_id` AS `config_key_id`, '' AS `config_value`, `relation`.`engine_type_label_id` AS `config_label_id` FROM linkis_ps_configuration_key_engine_relation relation
INNER JOIN linkis_cg_manager_label label ON relation.engine_type_label_id = label.id AND label.label_value = '*-*,*-*');
--- spark2.4.3 default configuration
+-- spark default configuration
insert into `linkis_ps_configuration_config_value` (`config_key_id`, `config_value`, `config_label_id`)
(select `relation`.`config_key_id` AS `config_key_id`, '' AS `config_value`, `relation`.`engine_type_label_id` AS `config_label_id` FROM linkis_ps_configuration_key_engine_relation relation
INNER JOIN linkis_cg_manager_label label ON relation.engine_type_label_id = label.id AND label.label_value = @SPARK_ALL);
--- hive1.2.1 default configuration
+-- hive default configuration
insert into `linkis_ps_configuration_config_value` (`config_key_id`, `config_value`, `config_label_id`)
(select `relation`.`config_key_id` AS `config_key_id`, '' AS `config_value`, `relation`.`engine_type_label_id` AS `config_label_id` FROM linkis_ps_configuration_key_engine_relation relation
INNER JOIN linkis_cg_manager_label label ON relation.engine_type_label_id = label.id AND label.label_value = @HIVE_ALL);
diff --git a/linkis-dist/package/db/module/linkis_configuration_dml.sql b/linkis-dist/package/db/module/linkis_configuration_dml.sql
index 3e71eaeba0..0d989eba38 100644
--- a/linkis-dist/package/db/module/linkis_configuration_dml.sql
+++ b/linkis-dist/package/db/module/linkis_configuration_dml.sql
@@ -18,8 +18,8 @@
-- 变量:
-SET @SPARK_LABEL="spark-2.4.3";
-SET @HIVE_LABEL="hive-1.2.1";
+SET @SPARK_LABEL="spark-3.2.1";
+SET @HIVE_LABEL="hive-3.1.3";
SET @PYTHON_LABEL="python-python2";
SET @PIPELINE_LABEL="pipeline-*";
SET @JDBC_LABEL="jdbc-4";
@@ -109,18 +109,18 @@ insert into `linkis_cg_manager_label` (`label_key`, `label_value`, `label_featur
insert into `linkis_cg_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_userCreator_engineType',@PIPELINE_ALL, 'OPTIONAL', 2, now(), now());
insert into `linkis_cg_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_userCreator_engineType',@JDBC_ALL, 'OPTIONAL', 2, now(), now());
--- Custom correlation engine (e.g. spark-2.4.3) and configKey value
+-- Custom correlation engine (e.g. spark) and configKey value
-- Global Settings
insert into `linkis_ps_configuration_key_engine_relation` (`config_key_id`, `engine_type_label_id`)
(select config.id as `config_key_id`, label.id AS `engine_type_label_id` FROM linkis_ps_configuration_config_key config
INNER JOIN linkis_cg_manager_label label ON config.engine_conn_type is null and label.label_value = "*-*,*-*");
--- spark-2.4.3(Here choose to associate all spark type Key values with spark2.4.3)
+-- spark(Here choose to associate all spark type Key values with spark)
insert into `linkis_ps_configuration_key_engine_relation` (`config_key_id`, `engine_type_label_id`)
(select config.id as `config_key_id`, label.id AS `engine_type_label_id` FROM linkis_ps_configuration_config_key config
INNER JOIN linkis_cg_manager_label label ON config.engine_conn_type = 'spark' and label.label_value = @SPARK_ALL);
--- hive-1.2.1
+-- hive
insert into `linkis_ps_configuration_key_engine_relation` (`config_key_id`, `engine_type_label_id`)
(select config.id as `config_key_id`, label.id AS `engine_type_label_id` FROM linkis_ps_configuration_config_key config
INNER JOIN linkis_cg_manager_label label ON config.engine_conn_type = 'hive' and label_value = @HIVE_ALL);
@@ -206,12 +206,12 @@ insert into `linkis_ps_configuration_config_value` (`config_key_id`, `config_val
(select `relation`.`config_key_id` AS `config_key_id`, '' AS `config_value`, `relation`.`engine_type_label_id` AS `config_label_id` FROM linkis_ps_configuration_key_engine_relation relation
INNER JOIN linkis_cg_manager_label label ON relation.engine_type_label_id = label.id AND label.label_value = '*-*,*-*');
--- spark2.4.3 default configuration
+-- spark default configuration
insert into `linkis_ps_configuration_config_value` (`config_key_id`, `config_value`, `config_label_id`)
(select `relation`.`config_key_id` AS `config_key_id`, '' AS `config_value`, `relation`.`engine_type_label_id` AS `config_label_id` FROM linkis_ps_configuration_key_engine_relation relation
INNER JOIN linkis_cg_manager_label label ON relation.engine_type_label_id = label.id AND label.label_value = @SPARK_ALL);
--- hive1.2.1 default configuration
+-- hive default configuration
insert into `linkis_ps_configuration_config_value` (`config_key_id`, `config_value`, `config_label_id`)
(select `relation`.`config_key_id` AS `config_key_id`, '' AS `config_value`, `relation`.`engine_type_label_id` AS `config_label_id` FROM linkis_ps_configuration_key_engine_relation relation
INNER JOIN linkis_cg_manager_label label ON relation.engine_type_label_id = label.id AND label.label_value = @HIVE_ALL);
diff --git a/linkis-dist/pom.xml b/linkis-dist/pom.xml
index b847950a79..6ffc994062 100644
--- a/linkis-dist/pom.xml
+++ b/linkis-dist/pom.xml
@@ -211,10 +211,10 @@
/opt/linkis
/etc/linkis-conf
/var/logs/linkis
- 2.7.2
- 2.3.3
- 2.4.3
- 2.7
+ 3.3.4
+ 3.1.3
+ 3.2.1
+ 3.2
1.12.2
3.5.9
diff --git a/linkis-dist/release-docs/LICENSE b/linkis-dist/release-docs/LICENSE
index 7fd83cb1c3..8fb5148924 100644
--- a/linkis-dist/release-docs/LICENSE
+++ b/linkis-dist/release-docs/LICENSE
@@ -446,10 +446,10 @@ See licenses/ for text of these licenses.
(Apache License, Version 2.0) jackson-databind (com.fasterxml.jackson.core:jackson-databind:2.13.4.1 - http://github.com/FasterXML/jackson)
(Apache License, Version 2.0) jackson-module-scala (com.fasterxml.jackson.module:jackson-module-scala_2.11:2.13.4 - http://wiki.fasterxml.com/JacksonModuleScala)
(Apache License, Version 2.0) javax.inject (javax.inject:javax.inject:1 - http://code.google.com/p/atinject/)
- (Apache License, Version 2.0) json4s-ast (org.json4s:json4s-ast_2.11:3.5.3 - https://github.com/json4s/json4s)
- (Apache License, Version 2.0) json4s-core (org.json4s:json4s-core_2.11:3.5.3 - https://github.com/json4s/json4s)
- (Apache License, Version 2.0) json4s-jackson (org.json4s:json4s-jackson_2.11:3.5.3 - https://github.com/json4s/json4s)
- (Apache License, Version 2.0) json4s-scalap (org.json4s:json4s-scalap_2.11:3.5.3 - https://github.com/json4s/json4s)
+ (Apache License, Version 2.0) json4s-ast (org.json4s:json4s-ast_2.11:3.7.0-M11 - https://github.com/json4s/json4s)
+ (Apache License, Version 2.0) json4s-core (org.json4s:json4s-core_2.11:3.7.0-M11 - https://github.com/json4s/json4s)
+ (Apache License, Version 2.0) json4s-jackson (org.json4s:json4s-jackson_2.11:3.7.0-M11 - https://github.com/json4s/json4s)
+ (Apache License, Version 2.0) json4s-scalap (org.json4s:json4s-scalap_2.11:3.7.0-M11 - https://github.com/json4s/json4s)
(Apache License, Version 2.0) jna (net.java.dev.jna:jna:5.6.0 - https://github.com/java-native-access/jna)
(Apache License, Version 2.0) jna-platform (net.java.dev.jna:jna-platform:5.6.0 - https://github.com/java-native-access/jna)
(Apache License, Version 2.0) micrometer-core (io.micrometer:micrometer-core:1.3.1 - https://github.com/micrometer-metrics/micrometer)
diff --git a/linkis-engineconn-plugins/elasticsearch/pom.xml b/linkis-engineconn-plugins/elasticsearch/pom.xml
index 6b8cc7d1a0..cb54e5d77f 100644
--- a/linkis-engineconn-plugins/elasticsearch/pom.xml
+++ b/linkis-engineconn-plugins/elasticsearch/pom.xml
@@ -26,10 +26,6 @@
linkis-engineplugin-elasticsearch
-
- 7.6.2
-
-
org.apache.linkis
diff --git a/linkis-engineconn-plugins/flink/pom.xml b/linkis-engineconn-plugins/flink/pom.xml
index a6c0894a5c..ed9474e42c 100644
--- a/linkis-engineconn-plugins/flink/pom.xml
+++ b/linkis-engineconn-plugins/flink/pom.xml
@@ -25,11 +25,6 @@
linkis-engineconn-plugin-flink
-
- 1.12.2
- 2.3.3
- 1.3.1
-
@@ -415,13 +410,6 @@
provided
-
- org.json4s
- json4s-jackson_${scala.binary.version}
- ${json4s.version}
- provided
-
-
io.netty
netty-all
diff --git a/linkis-engineconn-plugins/hive/pom.xml b/linkis-engineconn-plugins/hive/pom.xml
index 8fe446167c..74cc14314e 100644
--- a/linkis-engineconn-plugins/hive/pom.xml
+++ b/linkis-engineconn-plugins/hive/pom.xml
@@ -26,10 +26,6 @@
linkis-engineplugin-hive
-
- 2.3.3
-
-
@@ -316,13 +312,6 @@
provided
-
- org.json4s
- json4s-jackson_${scala.binary.version}
- ${json4s.version}
- provided
-
-
io.netty
netty-all
diff --git a/linkis-engineconn-plugins/hive/src/main/java/org/apache/linkis/engineplugin/hive/serde/CustomerDelimitedJSONSerDe.java b/linkis-engineconn-plugins/hive/src/main/java/org/apache/linkis/engineplugin/hive/serde/CustomerDelimitedJSONSerDe.java
index 671b0c1d19..fe948f7952 100644
--- a/linkis-engineconn-plugins/hive/src/main/java/org/apache/linkis/engineplugin/hive/serde/CustomerDelimitedJSONSerDe.java
+++ b/linkis-engineconn-plugins/hive/src/main/java/org/apache/linkis/engineplugin/hive/serde/CustomerDelimitedJSONSerDe.java
@@ -17,6 +17,8 @@
package org.apache.linkis.engineplugin.hive.serde;
+import org.apache.linkis.common.utils.ClassUtils;
+
import org.apache.commons.codec.binary.Base64;
import org.apache.hadoop.hive.serde2.ByteStream;
import org.apache.hadoop.hive.serde2.SerDeException;
@@ -33,6 +35,7 @@
import java.io.IOException;
import java.io.OutputStream;
+import java.lang.reflect.InvocationTargetException;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
@@ -309,18 +312,6 @@ private static void writePrimitiveUTF8(
binaryData = Base64.encodeBase64(String.valueOf(wc).getBytes());
break;
}
- case INTERVAL_YEAR_MONTH:
- {
- wc = ((HiveIntervalYearMonthObjectInspector) oi).getPrimitiveWritableObject(o);
- binaryData = Base64.encodeBase64(String.valueOf(wc).getBytes());
- break;
- }
- case INTERVAL_DAY_TIME:
- {
- wc = ((HiveIntervalDayTimeObjectInspector) oi).getPrimitiveWritableObject(o);
- binaryData = Base64.encodeBase64(String.valueOf(wc).getBytes());
- break;
- }
case DECIMAL:
{
HiveDecimalObjectInspector decimalOI = (HiveDecimalObjectInspector) oi;
@@ -329,7 +320,52 @@ private static void writePrimitiveUTF8(
}
default:
{
- throw new RuntimeException("Unknown primitive type: " + category);
+ if (!"INTERVAL_YEAR_MONTH".equals(category.name())
+ && !"INTERVAL_DAY_TIME".equals(category.name())) {
+ throw new RuntimeException("Unknown primitive type: " + category);
+ }
+ boolean containsIntervalYearMonth = false;
+ boolean containsIntervalDayTime = false;
+ for (PrimitiveObjectInspector.PrimitiveCategory primitiveCategory :
+ PrimitiveObjectInspector.PrimitiveCategory.values()) {
+ containsIntervalYearMonth =
+ "INTERVAL_YEAR_MONTH".equals(primitiveCategory.name())
+ && "INTERVAL_YEAR_MONTH".equals(category.name());
+ containsIntervalDayTime =
+ "INTERVAL_DAY_TIME".equals(primitiveCategory.name())
+ && "INTERVAL_DAY_TIME".equals(category.name());
+ try {
+ if (containsIntervalYearMonth) {
+ wc =
+ (WritableComparable)
+ ClassUtils.getClassInstance(
+ "org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveIntervalYearMonthObjectInspector")
+ .getClass()
+ .getMethod("getPrimitiveWritableObject", Object.class)
+ .invoke(oi, o);
+ binaryData = Base64.encodeBase64(String.valueOf(wc).getBytes());
+ break;
+ }
+ if (containsIntervalDayTime) {
+ wc =
+ (WritableComparable)
+ ClassUtils.getClassInstance(
+ "org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveIntervalDayTimeObjectInspector")
+ .getClass()
+ .getMethod("getPrimitiveWritableObject", Object.class)
+ .invoke(oi, o);
+ binaryData = Base64.encodeBase64(String.valueOf(wc).getBytes());
+ break;
+ }
+ } catch (IllegalAccessException | InvocationTargetException | NoSuchMethodException e) {
+ LOG.error("Fail to invoke method:[getPrimitiveWritableObject]!", e);
+ }
+ }
+ if (containsIntervalYearMonth || containsIntervalDayTime) {
+ break;
+ } else {
+ throw new RuntimeException("Unknown primitive type: " + category);
+ }
}
}
if (binaryData == null) {
diff --git a/linkis-engineconn-plugins/io_file/pom.xml b/linkis-engineconn-plugins/io_file/pom.xml
index 4aeddd125c..cc8136e966 100644
--- a/linkis-engineconn-plugins/io_file/pom.xml
+++ b/linkis-engineconn-plugins/io_file/pom.xml
@@ -25,9 +25,6 @@
linkis-engineplugin-io_file
-
- 1.0
-
@@ -80,13 +77,6 @@
provided
-
- org.json4s
- json4s-jackson_${scala.binary.version}
- ${json4s.version}
- provided
-
-
io.netty
netty-all
diff --git a/linkis-engineconn-plugins/io_file/src/main/scala/org/apache/linkis/manager/engineplugin/io/executor/IoEngineConnExecutor.scala b/linkis-engineconn-plugins/io_file/src/main/scala/org/apache/linkis/manager/engineplugin/io/executor/IoEngineConnExecutor.scala
index fbc4a77d12..ef9ba73b3b 100644
--- a/linkis-engineconn-plugins/io_file/src/main/scala/org/apache/linkis/manager/engineplugin/io/executor/IoEngineConnExecutor.scala
+++ b/linkis-engineconn-plugins/io_file/src/main/scala/org/apache/linkis/manager/engineplugin/io/executor/IoEngineConnExecutor.scala
@@ -29,7 +29,6 @@ import org.apache.linkis.manager.common.entity.resource.{
LoadResource,
NodeResource
}
-import org.apache.linkis.manager.engineplugin.common.conf.EngineConnPluginConf
import org.apache.linkis.manager.engineplugin.common.util.NodeResourceUtils
import org.apache.linkis.manager.engineplugin.io.conf.IOEngineConnConfiguration
import org.apache.linkis.manager.engineplugin.io.domain.FSInfo
@@ -61,14 +60,10 @@ import java.util.concurrent.atomic.AtomicLong
import scala.collection.JavaConverters._
import scala.collection.mutable.ArrayBuffer
-import org.json4s.DefaultFormats
-
class IoEngineConnExecutor(val id: Int, val outputLimit: Int = 10)
extends ConcurrentComputationExecutor(outputLimit)
with Logging {
- implicit val formats = DefaultFormats
-
val fsIdCount = new AtomicLong()
val FS_ID_LIMIT = IOEngineConnConfiguration.IO_FS_ID_LIMIT.getValue
diff --git a/linkis-engineconn-plugins/jdbc/pom.xml b/linkis-engineconn-plugins/jdbc/pom.xml
index b42cb1ae8d..bb4367308e 100644
--- a/linkis-engineconn-plugins/jdbc/pom.xml
+++ b/linkis-engineconn-plugins/jdbc/pom.xml
@@ -172,13 +172,6 @@
provided
-
- org.json4s
- json4s-jackson_${scala.binary.version}
- ${json4s.version}
- provided
-
-
io.netty
netty-all
diff --git a/linkis-engineconn-plugins/jdbc/src/test/java/org/apache/linkis/manager/engineplugin/jdbc/ProgressMonitorTest.java b/linkis-engineconn-plugins/jdbc/src/test/java/org/apache/linkis/manager/engineplugin/jdbc/ProgressMonitorTest.java
index 1c7f3a2b76..4fd92e4fc4 100644
--- a/linkis-engineconn-plugins/jdbc/src/test/java/org/apache/linkis/manager/engineplugin/jdbc/ProgressMonitorTest.java
+++ b/linkis-engineconn-plugins/jdbc/src/test/java/org/apache/linkis/manager/engineplugin/jdbc/ProgressMonitorTest.java
@@ -70,5 +70,6 @@ public void testProgressMonitor() throws SQLException {
0,
Optional.empty()));
Assertions.assertTrue(callbackFlag.get());
+ connection.close();
}
}
diff --git a/linkis-engineconn-plugins/openlookeng/pom.xml b/linkis-engineconn-plugins/openlookeng/pom.xml
index 7f9f02f3af..8ccc2b4dc5 100644
--- a/linkis-engineconn-plugins/openlookeng/pom.xml
+++ b/linkis-engineconn-plugins/openlookeng/pom.xml
@@ -27,10 +27,6 @@
linkis-engineplugin-openlookeng
-
- 1.5.0
-
-
org.apache.linkis
diff --git a/linkis-engineconn-plugins/pipeline/pom.xml b/linkis-engineconn-plugins/pipeline/pom.xml
index 4ff0bdf93a..2f720f1e37 100644
--- a/linkis-engineconn-plugins/pipeline/pom.xml
+++ b/linkis-engineconn-plugins/pipeline/pom.xml
@@ -25,9 +25,6 @@
linkis-engineplugin-pipeline
-
- 1
-
@@ -80,13 +77,6 @@
provided
-
- org.json4s
- json4s-jackson_${scala.binary.version}
- ${json4s.version}
- provided
-
-
io.netty
netty-all
diff --git a/linkis-engineconn-plugins/presto/pom.xml b/linkis-engineconn-plugins/presto/pom.xml
index 0394a0fcbf..2b2d234d74 100644
--- a/linkis-engineconn-plugins/presto/pom.xml
+++ b/linkis-engineconn-plugins/presto/pom.xml
@@ -26,10 +26,6 @@
linkis-engineplugin-presto
-
- 0.234
-
-
org.apache.linkis
diff --git a/linkis-engineconn-plugins/python/pom.xml b/linkis-engineconn-plugins/python/pom.xml
index 6ede3100fc..6a9020dc8a 100644
--- a/linkis-engineconn-plugins/python/pom.xml
+++ b/linkis-engineconn-plugins/python/pom.xml
@@ -25,9 +25,6 @@
linkis-engineplugin-python
-
- python2
-
@@ -90,13 +87,6 @@
provided
-
- org.json4s
- json4s-jackson_${scala.binary.version}
- ${json4s.version}
- provided
-
-
io.netty
netty-all
diff --git a/linkis-engineconn-plugins/seatunnel/pom.xml b/linkis-engineconn-plugins/seatunnel/pom.xml
index e831db2be4..ed843f9f2c 100644
--- a/linkis-engineconn-plugins/seatunnel/pom.xml
+++ b/linkis-engineconn-plugins/seatunnel/pom.xml
@@ -25,9 +25,6 @@
linkis-engineplugin-seatunnel
-
- 2.1.2
-
diff --git a/linkis-engineconn-plugins/shell/pom.xml b/linkis-engineconn-plugins/shell/pom.xml
index b622d77567..ad10c0c1a5 100755
--- a/linkis-engineconn-plugins/shell/pom.xml
+++ b/linkis-engineconn-plugins/shell/pom.xml
@@ -25,9 +25,6 @@
linkis-engineplugin-shell
-
- 1
-
@@ -98,13 +95,6 @@
provided
-
- org.json4s
- json4s-jackson_${scala.binary.version}
- ${json4s.version}
- provided
-
-
io.netty
netty-all
diff --git a/linkis-engineconn-plugins/spark/pom.xml b/linkis-engineconn-plugins/spark/pom.xml
index 46ed7abab1..a5e7523d0b 100644
--- a/linkis-engineconn-plugins/spark/pom.xml
+++ b/linkis-engineconn-plugins/spark/pom.xml
@@ -159,13 +159,6 @@
provided
-
- org.json4s
- json4s-jackson_${scala.binary.version}
- ${json4s.version}
- provided
-
-
io.netty
netty-all
@@ -187,12 +180,22 @@
linkis-rpc
${project.version}
+
+ net.sf.py4j
+ py4j
+ 0.10.7
+ provided
+
org.apache.spark
spark-core_${scala.binary.version}
${spark.version}
provided
+
+ net.sf.py4j
+ py4j
+
org.apache.hadoop
hadoop-common
@@ -435,16 +438,21 @@
- org.apache.hadoop
- hadoop-common
- ${hadoop.version}
+ org.eclipse.jetty
+ jetty-client
provided
- org.apache.hadoop
- hadoop-hdfs
- ${hadoop.version}
- provided
+ ${spark.hadoop.groupid}
+ ${spark.hadoop-common.artifactId}
+ ${spark.hadoop.version}
+ ${spark.hadoop.scope}
+
+
+ ${spark.hadoop.groupid}
+ ${spark.hadoop-hdfs.artifactId}
+ ${spark.hadoop.version}
+ ${spark.hadoop.scope}
@@ -485,98 +493,4 @@
-
-
-
- spark-2.4-hadoop-3.3
-
- ${hadoop-hdfs-client-shade.version}
-
-
-
- org.apache.linkis
- linkis-hadoop-hdfs-client-shade
- ${project.version}
-
-
- commmons-logging
- commons-logging
-
-
- log4j
- log4j
-
-
- org.mortbay.jetty
- jetty
-
-
- org.mortbay.jetty
- jetty-util
-
-
- com.sun.jersey
- jersey-core
-
-
- com.sun.jersey
- jersey-server
-
-
- com.sun.jersey
- jersey-json
-
-
- javax.ws.rs
- jsr311-api
-
-
- net.java.dev.jets3t
- jets3t
-
-
- com.jcraft
- jsch
-
-
- com.google.code.findbugs
- jsr305
-
-
- xmlenc
- xmlenc
-
-
- net.java.dev.jets3t
- jets3t
-
-
- org.apache.avro
- avro
-
-
- com.jcraft
- jsch
-
-
- com.google.code.findbugs
- jsr305
-
-
- javax.servlet
- servlet-api
-
-
- org.slf4j
- slf4j-log4j12
-
-
- org.eclipse.jetty
- *
-
-
-
-
-
-
diff --git a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/Interpreter/ProcessInterpreter.scala b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/Interpreter/ProcessInterpreter.scala
deleted file mode 100644
index 171d48e4f1..0000000000
--- a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/Interpreter/ProcessInterpreter.scala
+++ /dev/null
@@ -1,125 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.linkis.engineplugin.spark.Interpreter
-
-import org.apache.linkis.common.utils.{Logging, Utils}
-import org.apache.linkis.engineplugin.spark.common._
-import org.apache.linkis.scheduler.executer.{
- ErrorExecuteResponse,
- ExecuteResponse,
- SuccessExecuteResponse
-}
-
-import org.apache.commons.io.IOUtils
-
-import java.io.{BufferedReader, InputStreamReader, PrintWriter}
-import java.util.concurrent.TimeUnit
-
-import scala.concurrent.{Await, ExecutionContext, Future}
-import scala.concurrent.duration.Duration
-
-import org.json4s._
-
-/**
- */
-abstract class ProcessInterpreter(process: Process) extends Interpreter with Logging {
-
- implicit val executor: ExecutionContext = ExecutionContext.global
-
- protected[this] var _state: State = Starting()
-
- protected[this] val stdin = new PrintWriter(process.getOutputStream)
-
- protected[this] val stdout =
- new BufferedReader(new InputStreamReader(process.getInputStream()), 1)
-
- protected[this] val errOut = new LineBufferedStream(process.getErrorStream())
-
- override def state: State = _state
-
- override def execute(code: String): ExecuteResponse = {
- if (code == "sc.cancelAllJobs" || code == "sc.cancelAllJobs()") {
- sendExecuteRequest(code)
- }
- _state match {
- case (Dead() | ShuttingDown() | Error() | Success()) =>
- throw new IllegalStateException("interpreter is not running")
- case Idle() =>
- require(state == Idle())
- code match {
- case "SHUTDOWN" =>
- sendShutdownRequest()
- close()
- ErrorExecuteResponse("shutdown", new Exception("shutdown"))
- case _ =>
- _state = Busy()
- sendExecuteRequest(code) match {
- case Some(rep) =>
- _state = Idle()
- // ExecuteComplete(rep)
- SuccessExecuteResponse()
- case None =>
- _state = Error()
- val errorMsg = errOut.lines.mkString(", ")
- throw new Exception(errorMsg)
- }
- }
- case _ =>
- throw new IllegalStateException(s"interpreter is in ${_state} state, cannot do query.")
- }
- }
-
- Future {
- val exitCode = process.waitFor()
- if (exitCode != 0) {
- // scalastyle:off println
- errOut.lines.foreach(println)
- println(getClass.getSimpleName + " has stopped with exit code " + process.exitValue)
- _state = Error()
- } else {
- println(getClass.getSimpleName + " has finished.")
- _state = Success()
- }
- }
-
- protected def waitUntilReady(): Unit
-
- protected def sendExecuteRequest(request: String): Option[JValue]
-
- protected def sendShutdownRequest(): Unit = {}
-
- override def close(): Unit = {
- val future = Future {
- _state match {
- case (Dead() | ShuttingDown() | Success()) =>
- Future.successful()
- case _ =>
- sendShutdownRequest()
- }
- }
- _state = Dead()
- IOUtils.closeQuietly(stdin)
- IOUtils.closeQuietly(stdout)
- errOut.close
- // scalastyle:off awaitresult
- Utils.tryFinally(Await.result(future, Duration(10, TimeUnit.SECONDS))) {
- process.destroy()
- }
- }
-
-}
diff --git a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/Interpreter/PythonInterpreter.scala b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/Interpreter/PythonInterpreter.scala
index 4223db8ba7..dbbac2623f 100644
--- a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/Interpreter/PythonInterpreter.scala
+++ b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/Interpreter/PythonInterpreter.scala
@@ -17,178 +17,27 @@
package org.apache.linkis.engineplugin.spark.Interpreter
-import org.apache.linkis.common.conf.CommonVars
import org.apache.linkis.common.io.FsPath
import org.apache.linkis.common.utils.{ClassUtils, Logging, Utils}
-import org.apache.linkis.engineplugin.spark.common.LineBufferedStream
import org.apache.linkis.engineplugin.spark.config.SparkConfiguration
import org.apache.linkis.storage.FSFactory
import org.apache.commons.io.IOUtils
-import org.apache.spark.{SparkContext, SparkException}
+import org.apache.spark.SparkContext
import org.apache.spark.sql.DataFrame
import org.apache.spark.sql.catalyst.expressions.Attribute
import java.io._
import java.nio.file.Files
-import scala.collection.JavaConverters._
import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer
-import org.json4s.{DefaultFormats, JValue}
-import org.json4s.jackson.JsonMethods._
-import org.json4s.jackson.Serialization
-import py4j.GatewayServer
-
/**
*/
object PythonInterpreter {
- def create(): Interpreter = {
- val pythonExec = CommonVars("PYSPARK_DRIVER_PYTHON", "python").getValue
-
- val gatewayServer = new GatewayServer(SQLSession, 0)
- gatewayServer.start()
-
- val builder = new ProcessBuilder(Array(pythonExec, createFakeShell().toString).toList.asJava)
-
- val env = builder.environment()
- env.put("PYTHONPATH", pythonPath)
- env.put("PYTHONUNBUFFERED", "YES")
- env.put("PYSPARK_GATEWAY_PORT", "" + gatewayServer.getListeningPort)
- env.put("SPARK_HOME", SparkConfiguration.SPARK_HOME.getValue)
-
- val process = builder.start()
-
- new PythonInterpreter(process, gatewayServer)
- }
-
- def pythonPath: String = {
- val pythonPath = new ArrayBuffer[String]
- val pythonHomePath = new File(SparkConfiguration.SPARK_HOME.getValue, "python").getPath
- val pythonParentPath = new File(pythonHomePath, "lib")
- pythonPath += pythonHomePath
- pythonParentPath
- .listFiles(new FileFilter {
- override def accept(pathname: File): Boolean = pathname.getName.endsWith(".zip")
- })
- .foreach(f => pythonPath += f.getPath)
- ClassUtils.jarOfClass(classOf[SparkContext]).foreach(pythonPath += _)
- pythonPath.mkString(File.pathSeparator)
- }
-
- def createFakeShell(): File = createFakeShell("python/fake_shell.py")
-
- def createFakeShell(script: String, fileType: String = ".py"): File = {
- val source: InputStream = getClass.getClassLoader.getResourceAsStream(script)
-
- val file = Files.createTempFile("", fileType).toFile
- file.deleteOnExit()
-
- val sink = new FileOutputStream(file)
- val buf = new Array[Byte](1024)
- var n = source.read(buf)
-
- while (n > 0) {
- sink.write(buf, 0, n)
- n = source.read(buf)
- }
-
- source.close()
- sink.close()
-
- file
- }
-
- private def createFakePySpark(): File = {
- val source: InputStream = getClass.getClassLoader.getResourceAsStream("fake_pyspark.sh")
-
- val file = Files.createTempFile("", "").toFile
- file.deleteOnExit()
-
- file.setExecutable(true)
-
- val sink = new FileOutputStream(file)
- val buf = new Array[Byte](1024)
- var n = source.read(buf)
-
- while (n > 0) {
- sink.write(buf, 0, n)
- n = source.read(buf)
- }
-
- source.close()
- sink.close()
-
- file
- }
-
-}
-
-private class PythonInterpreter(process: Process, gatewayServer: GatewayServer)
- extends ProcessInterpreter(process)
- with Logging {
- implicit val formats = DefaultFormats
-
- override def close(): Unit = {
- try {
- super.close()
- } finally {
- gatewayServer.shutdown()
- }
- }
-
- final override protected def waitUntilReady(): Unit = {
- var running = false
- val code =
- try process.exitValue
- catch { case t: IllegalThreadStateException => running = true; -1 }
- if (!running) {
- throw new SparkException(
- s"Spark python application has already finished with exit code $code, now exit..."
- )
- }
- var continue = true
- val initOut = new LineBufferedStream(process.getInputStream)
- val iterable = initOut.iterator
- while (continue && iterable.hasNext) {
- iterable.next match {
- // scalastyle:off println
- case "READY" => println("Start python application succeed."); continue = false
- case str: String => println(str)
- case _ =>
- }
- }
- initOut.close
- }
-
- override protected def sendExecuteRequest(code: String): Option[JValue] = {
- val rep = sendRequest(Map("msg_type" -> "execute_request", "content" -> Map("code" -> code)))
- rep.map { rep =>
- assert((rep \ "msg_type").extract[String] == "execute_reply")
-
- val content: JValue = rep \ "content"
-
- content
- }
- }
-
- override protected def sendShutdownRequest(): Unit = {
- sendRequest(Map("msg_type" -> "shutdown_request", "content" -> ())).foreach { rep =>
- logger.warn(f"process failed to shut down while returning $rep")
- }
- }
-
- private def sendRequest(request: Map[String, Any]): Option[JValue] = {
- // scalastyle:off println
- stdin.println(Serialization.write(request))
- stdin.flush()
-
- Option(stdout.readLine()).map { line => parse(line) }
- }
-
def pythonPath: String = {
val pythonPath = new ArrayBuffer[String]
val pythonHomePath = new File(SparkConfiguration.SPARK_HOME.getValue, "python").getPath
@@ -296,7 +145,7 @@ object SQLSession extends Logging {
logger.warn(s"Fetched $colCount col(s) : $index row(s).")
sc.clearJobGroup()
Utils.tryFinally({
- msg.flush();
+ msg.flush()
msg.toString
}) { () => IOUtils.closeQuietly(msg) }
}
diff --git a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/common/SparkKind.scala b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/common/SparkKind.scala
index 46e8b5defb..26c8ea3fc9 100644
--- a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/common/SparkKind.scala
+++ b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/common/SparkKind.scala
@@ -17,9 +17,6 @@
package org.apache.linkis.engineplugin.spark.common
-import org.json4s.CustomSerializer
-import org.json4s.JsonAST.JString
-
/**
*/
object SparkKind {
@@ -91,23 +88,3 @@ case class SparkDataCalc() extends Kind {
case class SparkMLSQL() extends Kind {
override val toString = SparkKind.SPARKMLSQL_TYPE
}
-
-case object SparkSessionKindSerializer
- extends CustomSerializer[Kind](implicit formats =>
- (
- {
- case JString(SparkKind.SPARKSCALA_TYPE) | JString(SparkKind.SCALA_LAN) => SparkScala()
- case JString(SparkKind.PYSPARK_TYPE) | JString(SparkKind.PYTHON_LAN) | JString(
- SparkKind.PYTHON_END
- ) =>
- PySpark()
- case JString(SparkKind.SPARKR_TYPE) | JString(SparkKind.R_LAN) => SparkR()
- case JString(SparkKind.SPARKMIX_TYPE) | JString(SparkKind.MIX_TYPE) => SparkMix()
- case JString(SparkKind.SQL_LAN) | JString(SparkKind.SPARKSQL_TYPE) => SparkSQL()
- case JString(SparkKind.SPARKMLSQL_TYPE) | JString(SparkKind.ML_LAN) => SparkMLSQL()
- },
- { case kind: Kind =>
- JString(kind.toString)
- }
- )
- )
diff --git a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/datacalc/sink/HiveSink.scala b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/datacalc/sink/HiveSink.scala
index 8ba618776b..1a81d6537e 100644
--- a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/datacalc/sink/HiveSink.scala
+++ b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/datacalc/sink/HiveSink.scala
@@ -24,14 +24,11 @@ import org.apache.linkis.engineplugin.spark.errorcode.SparkErrorCodeSummary
import org.apache.commons.lang3.StringUtils
import org.apache.spark.sql._
-import org.apache.spark.sql.catalyst.catalog.HiveTableRelation
import org.apache.spark.sql.execution.datasources.{HadoopFsRelation, LogicalRelation}
import org.apache.spark.sql.functions.col
import org.apache.spark.sql.sources.DataSourceRegister
import org.apache.spark.sql.types.StructField
-import org.slf4j.{Logger, LoggerFactory}
-
class HiveSink extends DataCalcSink[HiveSinkConfig] with Logging {
def output(spark: SparkSession, ds: Dataset[Row]): Unit = {
@@ -122,7 +119,9 @@ class HiveSink extends DataCalcSink[HiveSinkConfig] with Logging {
logFields(sourceFields, targetFields)
throw new HiveSinkException(
SparkErrorCodeSummary.DATA_CALC_COLUMN_NUM_NOT_MATCH.getErrorCode,
- s"$targetTable requires that the data to be inserted have the same number of columns as the target table: target table has ${targetFields.length} column(s) but the inserted data has ${sourceFields.length} column(s)"
+ s"$targetTable requires that the data to be inserted have the same number of columns " +
+ s"as the target table: target table has ${targetFields.length} column(s) " +
+ s"but the inserted data has ${sourceFields.length} column(s)"
)
}
@@ -184,17 +183,18 @@ class HiveSink extends DataCalcSink[HiveSinkConfig] with Logging {
logicalRelation.relation match {
case hadoopFsRelation: HadoopFsRelation =>
hadoopFsRelation.fileFormat match {
- case _: org.apache.spark.sql.execution.datasources.orc.OrcFileFormat =>
- fileFormat = FileFormat.ORC
case _: org.apache.spark.sql.execution.datasources.parquet.ParquetFileFormat =>
fileFormat = FileFormat.PARQUET
case dataSourceRegister: DataSourceRegister =>
fileFormat = FileFormat.withName(dataSourceRegister.shortName.toUpperCase)
case _ =>
+ if (hadoopFsRelation.fileFormat.getClass.getSimpleName.equals("OrcFileFormat")) {
+ fileFormat = FileFormat.ORC
+ }
}
}
- case hiveTableRelation: HiveTableRelation =>
- // todo
+ // case hiveTableRelation: HiveTableRelation =>
+ // todo please note `HiveTableRelation` was added after spark 2.2.1
}
fileFormat
} catch {
diff --git a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/datacalc/sink/JdbcSink.scala b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/datacalc/sink/JdbcSink.scala
index ab8a21c3f7..e9d60bd2b3 100644
--- a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/datacalc/sink/JdbcSink.scala
+++ b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/datacalc/sink/JdbcSink.scala
@@ -17,14 +17,16 @@
package org.apache.linkis.engineplugin.spark.datacalc.sink
+import org.apache.linkis.common.utils.ClassUtils.getFieldVal
import org.apache.linkis.common.utils.Logging
import org.apache.linkis.engineplugin.spark.datacalc.api.DataCalcSink
import org.apache.commons.lang3.StringUtils
+import org.apache.spark.SPARK_VERSION
import org.apache.spark.sql.{Dataset, Row, SparkSession}
-import org.apache.spark.sql.execution.datasources.jdbc.{JDBCOptions, JdbcUtils}
+import org.apache.spark.sql.execution.datasources.jdbc.JDBCOptions
-import java.sql.Connection
+import java.sql.{Connection, DriverManager}
import scala.collection.JavaConverters._
@@ -58,7 +60,8 @@ class JdbcSink extends DataCalcSink[JdbcSinkConfig] with Logging {
.repartition(1)
.foreachPartition((_: Iterator[Row]) => {
val jdbcOptions = new JDBCOptions(options)
- val conn: Connection = JdbcUtils.createConnectionFactory(jdbcOptions)()
+ val conn: Connection =
+ DriverManager.getConnection(config.getUrl, config.getUser, config.getPassword)
try {
config.getPreQueries.asScala.foreach(query => {
logger.info(s"Execute pre query: $query")
@@ -86,7 +89,12 @@ class JdbcSink extends DataCalcSink[JdbcSinkConfig] with Logging {
logger.info("Execute query: {}", query)
val statement = conn.prepareStatement(query)
try {
- statement.setQueryTimeout(jdbcOptions.queryTimeout)
+ // `queryTimeout` was added after spark2.4.0, more details please check SPARK-23856
+ if (SPARK_VERSION >= "2.4") {
+ val queryTimeout = getFieldVal(jdbcOptions, "queryTimeout").asInstanceOf[Int]
+ statement.setQueryTimeout(queryTimeout)
+ }
+
val rows = statement.executeUpdate()
logger.info("{} rows affected", rows)
} catch {
diff --git a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/imexport/ExportData.scala b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/imexport/ExportData.scala
index 187277349d..cbfb5195c9 100644
--- a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/imexport/ExportData.scala
+++ b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/imexport/ExportData.scala
@@ -20,22 +20,19 @@ package org.apache.linkis.engineplugin.spark.imexport
import org.apache.linkis.common.utils.Logging
import org.apache.linkis.engineplugin.spark.config.SparkConfiguration
import org.apache.linkis.engineplugin.spark.imexport.util.BackGroundServiceUtils
+import org.apache.linkis.server.BDPJettyServerHelper
import org.apache.spark.sql.SparkSession
-import org.json4s.{DefaultFormats, _}
-import org.json4s.jackson.JsonMethods._
-
/**
*/
object ExportData extends Logging {
- implicit val formats = DefaultFormats
def exportData(spark: SparkSession, dataInfo: String, destination: String): Unit = {
exportDataFromFile(
spark,
- parse(dataInfo).extract[Map[String, Any]],
- parse(destination).extract[Map[String, Any]]
+ BDPJettyServerHelper.gson.fromJson(dataInfo, classOf[Map[String, Any]]),
+ BDPJettyServerHelper.gson.fromJson(destination, classOf[Map[String, Any]])
)
}
@@ -43,8 +40,8 @@ object ExportData extends Logging {
val dataInfo = BackGroundServiceUtils.exchangeExecutionCode(dataInfoPath)
exportDataFromFile(
spark,
- parse(dataInfo).extract[Map[String, Any]],
- parse(destination).extract[Map[String, Any]]
+ BDPJettyServerHelper.gson.fromJson(dataInfo, classOf[Map[String, Any]]),
+ BDPJettyServerHelper.gson.fromJson(destination, classOf[Map[String, Any]])
)
}
diff --git a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/imexport/LoadData.scala b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/imexport/LoadData.scala
index 6d278175b7..fd8e5cac50 100644
--- a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/imexport/LoadData.scala
+++ b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/imexport/LoadData.scala
@@ -22,6 +22,7 @@ import org.apache.linkis.engineplugin.spark.config.SparkConfiguration
import org.apache.linkis.engineplugin.spark.imexport.util.{BackGroundServiceUtils, ImExportUtils}
import org.apache.linkis.hadoop.common.conf.HadoopConf
import org.apache.linkis.hadoop.common.utils.HDFSUtils
+import org.apache.linkis.server.BDPJettyServerHelper
import org.apache.linkis.storage.excel.XlsUtils
import org.apache.commons.lang3.StringUtils
@@ -35,26 +36,22 @@ import java.util.Locale
import scala.collection.JavaConverters._
-import org.json4s._
-import org.json4s.jackson.JsonMethods._
-
/**
*/
object LoadData {
- implicit val formats = DefaultFormats
def loadDataToTable(spark: SparkSession, source: String, destination: String): Unit = {
- create_table_from_a_file(spark, parse(source), parse(destination))
+ create_table_from_a_file(spark, source, destination)
}
def loadDataToTableByFile(spark: SparkSession, destinationPath: String, source: String): Unit = {
val destination = BackGroundServiceUtils.exchangeExecutionCode(destinationPath)
- create_table_from_a_file(spark, parse(source), parse(destination))
+ create_table_from_a_file(spark, source, destination)
}
- def create_table_from_a_file(spark: SparkSession, src: JValue, dest: JValue): Unit = {
- val source = src.extract[Map[String, Any]]
- val destination = dest.extract[Map[String, Any]]
+ def create_table_from_a_file(spark: SparkSession, src: String, dest: String): Unit = {
+ val source = BDPJettyServerHelper.gson.fromJson(src, classOf[Map[String, Any]])
+ val destination = BDPJettyServerHelper.gson.fromJson(src, classOf[Map[String, Any]])
var path = getMapValue[String](source, "path")
val pathType = getMapValue[String](source, "pathType", "share")
@@ -79,7 +76,9 @@ object LoadData {
val partition = getMapValue[String](destination, "partition", "ds")
val partitionValue = getMapValue[String](destination, "partitionValue", "1993-01-02")
- val columns = (dest \ "columns").extract[List[Map[String, Any]]]
+ val columnsJson = getMapValue[String](destination, "columns", "")
+ val columns = BDPJettyServerHelper.gson.fromJson(columnsJson, classOf[List[Map[String, Any]]])
+
val dateFormats =
columns.map(_.get("dateFormat").get.toString).map(f => if (f isEmpty) "yyyy-MM-dd" else f)
var isFirst = true
@@ -204,20 +203,6 @@ object LoadData {
hdfsPath
}
- def getNodeValue[T](json: JValue, node: String, default: T = null.asInstanceOf[T])(implicit
- m: Manifest[T]
- ): T = {
- json \ node match {
- case JNothing => default
- case value: JValue =>
- if ("JString()".equals(value.toString)) default
- else {
- try value.extract[T]
- catch { case t: Throwable => default }
- }
- }
- }
-
def getMapValue[T](map: Map[String, Any], key: String, default: T = null.asInstanceOf[T]): T = {
val value = map.get(key).map(_.asInstanceOf[T]).getOrElse(default)
if (StringUtils.isEmpty(value.toString)) {
diff --git a/linkis-engineconn-plugins/sqoop/pom.xml b/linkis-engineconn-plugins/sqoop/pom.xml
index 5428047fe6..96ca23cd8d 100644
--- a/linkis-engineconn-plugins/sqoop/pom.xml
+++ b/linkis-engineconn-plugins/sqoop/pom.xml
@@ -25,10 +25,6 @@
linkis-engineplugin-sqoop
-
- 1.4.6
- 3.1.2
-
diff --git a/linkis-engineconn-plugins/trino/pom.xml b/linkis-engineconn-plugins/trino/pom.xml
index 246f547511..d9ea2d6868 100644
--- a/linkis-engineconn-plugins/trino/pom.xml
+++ b/linkis-engineconn-plugins/trino/pom.xml
@@ -25,9 +25,7 @@
linkis-engineplugin-trino
-
- 371
-
+
org.apache.linkis
diff --git a/linkis-hadoop-hdfs-client-shade/pom.xml b/linkis-hadoop-hdfs-client-shade/pom.xml
index e4990f857e..560bbaa698 100644
--- a/linkis-hadoop-hdfs-client-shade/pom.xml
+++ b/linkis-hadoop-hdfs-client-shade/pom.xml
@@ -207,7 +207,7 @@
org.apache.maven.plugins
maven-shade-plugin
- 3.3.0
+ ${maven-shade-plugin.version}
true
false
diff --git a/linkis-public-enhancements/linkis-basedata-manager/src/main/java/org/apache/linkis/basedatamanager/server/response/EngineLabelResponse.java b/linkis-public-enhancements/linkis-basedata-manager/src/main/java/org/apache/linkis/basedatamanager/server/response/EngineLabelResponse.java
index 9bf7e51bde..fdab5d6d1b 100644
--- a/linkis-public-enhancements/linkis-basedata-manager/src/main/java/org/apache/linkis/basedatamanager/server/response/EngineLabelResponse.java
+++ b/linkis-public-enhancements/linkis-basedata-manager/src/main/java/org/apache/linkis/basedatamanager/server/response/EngineLabelResponse.java
@@ -30,7 +30,7 @@ public class EngineLabelResponse implements Serializable {
@ApiModelProperty(value = "label id.")
private Integer labelId;
- @ApiModelProperty(value = "engine name. eg: spark-2.4.3")
+ @ApiModelProperty(value = "engine name. eg: spark-3.2.1")
private String engineName;
@ApiModelProperty(value = "install. eg: yes")
diff --git a/linkis-public-enhancements/linkis-basedata-manager/src/test/java/org/apache/linkis/basedatamanager/server/restful/DatasourceAccessRestfulApiTest.java b/linkis-public-enhancements/linkis-basedata-manager/src/test/java/org/apache/linkis/basedatamanager/server/restful/DatasourceAccessRestfulApiTest.java
index f6805bfd31..8e2e91b261 100644
--- a/linkis-public-enhancements/linkis-basedata-manager/src/test/java/org/apache/linkis/basedatamanager/server/restful/DatasourceAccessRestfulApiTest.java
+++ b/linkis-public-enhancements/linkis-basedata-manager/src/test/java/org/apache/linkis/basedatamanager/server/restful/DatasourceAccessRestfulApiTest.java
@@ -33,6 +33,7 @@
import org.springframework.util.MultiValueMap;
import java.util.Date;
+import java.util.Objects;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.junit.jupiter.api.Test;
@@ -143,6 +144,7 @@ public void sendUrl(String url, MultiValueMap paramsMap, String
mvcResult = mvcUtils.getMessage(mvcUtils.buildMvcResultPut(url));
}
}
+ Objects.requireNonNull(mvcResult, "mvcResult must not be null");
assertEquals(MessageStatus.SUCCESS(), mvcResult.getStatus());
logger.info(String.valueOf(mvcResult));
}
diff --git a/linkis-public-enhancements/linkis-basedata-manager/src/test/java/org/apache/linkis/basedatamanager/server/restful/DatasourceEnvRestfulApiTest.java b/linkis-public-enhancements/linkis-basedata-manager/src/test/java/org/apache/linkis/basedatamanager/server/restful/DatasourceEnvRestfulApiTest.java
index 5d3bb2413b..9d8410b402 100644
--- a/linkis-public-enhancements/linkis-basedata-manager/src/test/java/org/apache/linkis/basedatamanager/server/restful/DatasourceEnvRestfulApiTest.java
+++ b/linkis-public-enhancements/linkis-basedata-manager/src/test/java/org/apache/linkis/basedatamanager/server/restful/DatasourceEnvRestfulApiTest.java
@@ -32,6 +32,7 @@
import org.springframework.util.MultiValueMap;
import java.util.Date;
+import java.util.Objects;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.junit.jupiter.api.Test;
@@ -132,6 +133,7 @@ public void sendUrl(String url, MultiValueMap paramsMap, String
mvcResult = mvcUtils.getMessage(mvcUtils.buildMvcResultPut(url));
}
}
+ Objects.requireNonNull(mvcResult, "mvcResult must not be null");
assertEquals(MessageStatus.SUCCESS(), mvcResult.getStatus());
logger.info(String.valueOf(mvcResult));
}
diff --git a/linkis-public-enhancements/linkis-basedata-manager/src/test/java/org/apache/linkis/basedatamanager/server/restful/DatasourceTypeKeyRestfulApiTest.java b/linkis-public-enhancements/linkis-basedata-manager/src/test/java/org/apache/linkis/basedatamanager/server/restful/DatasourceTypeKeyRestfulApiTest.java
index e4ad07257f..945ae7be90 100644
--- a/linkis-public-enhancements/linkis-basedata-manager/src/test/java/org/apache/linkis/basedatamanager/server/restful/DatasourceTypeKeyRestfulApiTest.java
+++ b/linkis-public-enhancements/linkis-basedata-manager/src/test/java/org/apache/linkis/basedatamanager/server/restful/DatasourceTypeKeyRestfulApiTest.java
@@ -32,6 +32,7 @@
import org.springframework.util.MultiValueMap;
import java.util.Date;
+import java.util.Objects;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
@@ -152,6 +153,7 @@ public void sendUrl(String url, MultiValueMap paramsMap, String
mvcResult = mvcUtils.getMessage(mvcUtils.buildMvcResultPut(url));
}
}
+ Objects.requireNonNull(mvcResult, "mvcResult must not be null");
assertEquals(MessageStatus.SUCCESS(), mvcResult.getStatus());
logger.info(String.valueOf(mvcResult));
}
diff --git a/linkis-public-enhancements/linkis-basedata-manager/src/test/resources/data.sql b/linkis-public-enhancements/linkis-basedata-manager/src/test/resources/data.sql
index 680c530762..4b3b29fa65 100644
--- a/linkis-public-enhancements/linkis-basedata-manager/src/test/resources/data.sql
+++ b/linkis-public-enhancements/linkis-basedata-manager/src/test/resources/data.sql
@@ -49,21 +49,21 @@ INSERT INTO `linkis_cg_manager_label` (`id`, `label_key`, `label_value`, `label_
INSERT INTO `linkis_cg_manager_label` (`id`, `label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES (3, 'combined_userCreator_engineType', '*-Visualis,*-*', 'OPTIONAL', 2, '2022-11-24 20:46:21', '2022-11-24 20:46:21');
INSERT INTO `linkis_cg_manager_label` (`id`, `label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES (4, 'combined_userCreator_engineType', '*-nodeexecution,*-*', 'OPTIONAL', 2, '2022-11-24 20:46:21', '2022-11-24 20:46:21');
INSERT INTO `linkis_cg_manager_label` (`id`, `label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES (5, 'combined_userCreator_engineType', '*-*,*-*', 'OPTIONAL', 2, '2022-11-24 20:46:21', '2022-11-24 20:46:21');
-INSERT INTO `linkis_cg_manager_label` (`id`, `label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES (6, 'combined_userCreator_engineType', '*-*,spark-2.4.3', 'OPTIONAL', 2, '2022-11-24 20:46:21', '2022-11-24 20:46:21');
-INSERT INTO `linkis_cg_manager_label` (`id`, `label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES (7, 'combined_userCreator_engineType', '*-*,hive-2.3.3', 'OPTIONAL', 2, '2022-11-24 20:46:21', '2022-11-24 20:46:21');
+INSERT INTO `linkis_cg_manager_label` (`id`, `label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES (6, 'combined_userCreator_engineType', '*-*,spark-3.2.1', 'OPTIONAL', 2, '2022-11-24 20:46:21', '2022-11-24 20:46:21');
+INSERT INTO `linkis_cg_manager_label` (`id`, `label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES (7, 'combined_userCreator_engineType', '*-*,hive-3.1.3', 'OPTIONAL', 2, '2022-11-24 20:46:21', '2022-11-24 20:46:21');
INSERT INTO `linkis_cg_manager_label` (`id`, `label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES (8, 'combined_userCreator_engineType', '*-*,python-python2', 'OPTIONAL', 2, '2022-11-24 20:46:21', '2022-11-24 20:46:21');
INSERT INTO `linkis_cg_manager_label` (`id`, `label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES (9, 'combined_userCreator_engineType', '*-*,pipeline-1', 'OPTIONAL', 2, '2022-11-24 20:46:21', '2022-11-24 20:46:21');
INSERT INTO `linkis_cg_manager_label` (`id`, `label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES (10, 'combined_userCreator_engineType', '*-*,jdbc-4', 'OPTIONAL', 2, '2022-11-24 20:46:21', '2022-11-24 20:46:21');
INSERT INTO `linkis_cg_manager_label` (`id`, `label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES (11, 'combined_userCreator_engineType', '*-*,openlookeng-1.5.0', 'OPTIONAL', 2, '2022-11-24 20:46:21', '2022-11-24 20:46:21');
-INSERT INTO `linkis_cg_manager_label` (`id`, `label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES (12, 'combined_userCreator_engineType', '*-IDE,spark-2.4.3', 'OPTIONAL', 2, '2022-11-24 20:46:21', '2022-11-24 20:46:21');
-INSERT INTO `linkis_cg_manager_label` (`id`, `label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES (13, 'combined_userCreator_engineType', '*-IDE,hive-2.3.3', 'OPTIONAL', 2, '2022-11-24 20:46:21', '2022-11-24 20:46:21');
+INSERT INTO `linkis_cg_manager_label` (`id`, `label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES (12, 'combined_userCreator_engineType', '*-IDE,spark-3.2.1', 'OPTIONAL', 2, '2022-11-24 20:46:21', '2022-11-24 20:46:21');
+INSERT INTO `linkis_cg_manager_label` (`id`, `label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES (13, 'combined_userCreator_engineType', '*-IDE,hive-3.1.3', 'OPTIONAL', 2, '2022-11-24 20:46:21', '2022-11-24 20:46:21');
INSERT INTO `linkis_cg_manager_label` (`id`, `label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES (14, 'combined_userCreator_engineType', '*-IDE,python-python2', 'OPTIONAL', 2, '2022-11-24 20:46:21', '2022-11-24 20:46:21');
INSERT INTO `linkis_cg_manager_label` (`id`, `label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES (15, 'combined_userCreator_engineType', '*-IDE,pipeline-1', 'OPTIONAL', 2, '2022-11-24 20:46:21', '2022-11-24 20:46:21');
INSERT INTO `linkis_cg_manager_label` (`id`, `label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES (16, 'combined_userCreator_engineType', '*-IDE,jdbc-4', 'OPTIONAL', 2, '2022-11-24 20:46:21', '2022-11-24 20:46:21');
INSERT INTO `linkis_cg_manager_label` (`id`, `label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES (17, 'combined_userCreator_engineType', '*-IDE,openlookeng-1.5.0', 'OPTIONAL', 2, '2022-11-24 20:46:21', '2022-11-24 20:46:21');
-INSERT INTO `linkis_cg_manager_label` (`id`, `label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES (18, 'combined_userCreator_engineType', '*-Visualis,spark-2.4.3', 'OPTIONAL', 2, '2022-11-24 20:46:21', '2022-11-24 20:46:21');
-INSERT INTO `linkis_cg_manager_label` (`id`, `label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES (19, 'combined_userCreator_engineType', '*-nodeexecution,spark-2.4.3', 'OPTIONAL', 2, '2022-11-24 20:46:21', '2022-11-24 20:46:21');
-INSERT INTO `linkis_cg_manager_label` (`id`, `label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES (20, 'combined_userCreator_engineType', '*-nodeexecution,hive-2.3.3', 'OPTIONAL', 2, '2022-11-24 20:46:21', '2022-11-24 20:46:21');
+INSERT INTO `linkis_cg_manager_label` (`id`, `label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES (18, 'combined_userCreator_engineType', '*-Visualis,spark-3.2.1', 'OPTIONAL', 2, '2022-11-24 20:46:21', '2022-11-24 20:46:21');
+INSERT INTO `linkis_cg_manager_label` (`id`, `label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES (19, 'combined_userCreator_engineType', '*-nodeexecution,spark-3.2.1', 'OPTIONAL', 2, '2022-11-24 20:46:21', '2022-11-24 20:46:21');
+INSERT INTO `linkis_cg_manager_label` (`id`, `label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES (20, 'combined_userCreator_engineType', '*-nodeexecution,hive-3.1.3', 'OPTIONAL', 2, '2022-11-24 20:46:21', '2022-11-24 20:46:21');
INSERT INTO `linkis_cg_manager_label` (`id`, `label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES (21, 'combined_userCreator_engineType', '*-nodeexecution,python-python2', 'OPTIONAL', 2, '2022-11-24 20:46:21', '2022-11-24 20:46:21');
diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/errorcode/LinkisConfigurationErrorCodeSummary.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/errorcode/LinkisConfigurationErrorCodeSummary.java
index 3f58930ea1..f02e0398f5 100644
--- a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/errorcode/LinkisConfigurationErrorCodeSummary.java
+++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/errorcode/LinkisConfigurationErrorCodeSummary.java
@@ -39,7 +39,7 @@ public enum LinkisConfigurationErrorCodeSummary implements LinkisErrorCode {
ENGINE_TYPE_IS_NULL(14100, "Engine type is null, cannot be added(引擎类型为空,无法添加)"),
INCORRECT_FIXED_SUCH(
14100,
- "The saved engine type parameter is incorrect, please send it in a fixed format, such as spark-2.4.3(保存的引擎类型参数有误,请按照固定格式传送,例如spark-2.4.3)"),
+ "The saved engine type parameter is incorrect, please send it in a fixed format, such as spark-3.2.1(保存的引擎类型参数有误,请按照固定格式传送,例如spark-3.2.1)"),
INCOMPLETE_RECONFIRM(14100, "Incomplete request parameters, please reconfirm(请求参数不完整,请重新确认)"),
ONLY_ADMIN_CAN_MODIFY(14100, "Only admin can modify category(只有管理员才能修改目录)"),
THE_LABEL_PARAMETER_IS_EMPTY(14100, " The label parameter is empty(标签参数为空)"),
diff --git a/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/restful/api/ConfigurationRestfulApiTest.java b/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/restful/api/ConfigurationRestfulApiTest.java
index 77c77d926f..73ebe6549e 100644
--- a/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/restful/api/ConfigurationRestfulApiTest.java
+++ b/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/restful/api/ConfigurationRestfulApiTest.java
@@ -32,6 +32,8 @@
import org.springframework.util.LinkedMultiValueMap;
import org.springframework.util.MultiValueMap;
+import java.util.Objects;
+
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.Mock;
@@ -55,9 +57,9 @@ public class ConfigurationRestfulApiTest {
public void TestAddKeyForEngine() throws Exception {
MultiValueMap paramsMap = new LinkedMultiValueMap<>();
paramsMap.add("engineType", "spark");
- paramsMap.add("version", "2.4.3");
+ paramsMap.add("version", "3.2.1");
paramsMap.add("token", "e8724-e");
- paramsMap.add("keyJson", "{'engineType':'spark','version':'2.4.3'}");
+ paramsMap.add("keyJson", "{'engineType':'spark','version':'3.2.1'}");
String url = "/configuration/addKeyForEngine";
sendUrl(url, paramsMap, "get", null);
}
@@ -66,7 +68,7 @@ public void TestAddKeyForEngine() throws Exception {
public void TestGetFullTreesByAppName() throws Exception {
MultiValueMap paramsMap = new LinkedMultiValueMap<>();
paramsMap.add("engineType", "spark");
- paramsMap.add("version", "2.4.3");
+ paramsMap.add("version", "3.2.1");
paramsMap.add("creator", "sam");
String url = "/configuration/getFullTreesByAppName";
@@ -127,7 +129,7 @@ public void TestSaveFullTree() throws Exception {
// " }\n" +
// " ],\n" +
// " \"creator\": \"LINKISCLI\",\n" +
- // " \"engineType\": \"hive-2.3.3\"\n" +
+ // " \"engineType\": \"hive-3.1.3\"\n" +
// "}";
// String url = "/configuration/saveFullTree";
//
@@ -176,6 +178,7 @@ public void sendUrl(String url, MultiValueMap paramsMap, String
mvcResult = mvcUtils.getMessage(mvcUtils.buildMvcResultPost(url));
}
}
+ Objects.requireNonNull(mvcResult, "mvcResult must not be null");
assertEquals(MessageStatus.SUCCESS(), mvcResult.getStatus());
logger.info(String.valueOf(mvcResult));
}
diff --git a/linkis-public-enhancements/linkis-context-service/linkis-cs-client/src/main/java/org/apache/linkis/cs/client/service/CSTableService.java b/linkis-public-enhancements/linkis-context-service/linkis-cs-client/src/main/java/org/apache/linkis/cs/client/service/CSTableService.java
index 0c89f939de..c7f91750f5 100644
--- a/linkis-public-enhancements/linkis-context-service/linkis-cs-client/src/main/java/org/apache/linkis/cs/client/service/CSTableService.java
+++ b/linkis-public-enhancements/linkis-context-service/linkis-cs-client/src/main/java/org/apache/linkis/cs/client/service/CSTableService.java
@@ -39,6 +39,7 @@
import java.util.ArrayList;
import java.util.List;
+import java.util.Objects;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -101,6 +102,7 @@ public List getUpstreamTables(String contextIDStr, String nodeName)
searchService.searchUpstreamContext(
contextID, nodeName, Integer.MAX_VALUE, CSTable.class);
}
+ Objects.requireNonNull(contextID, "contextID must not be null");
if (null != rsList)
logger.info(
"contextID: {} and nodeName: {} succeed to get tables size {}",
diff --git a/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/main/java/org/apache/linkis/cs/condition/BinaryLogicCondition.java b/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/main/java/org/apache/linkis/cs/condition/BinaryLogicCondition.java
index 4e13d04e65..ce221e7361 100644
--- a/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/main/java/org/apache/linkis/cs/condition/BinaryLogicCondition.java
+++ b/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/main/java/org/apache/linkis/cs/condition/BinaryLogicCondition.java
@@ -46,6 +46,6 @@ public Condition getRight() {
}
public void setRight(Condition right) {
- right = right;
+ this.right = right;
}
}
diff --git a/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/test/java/org/apache/linkis/cs/parser/ApiJsonTest.java b/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/test/java/org/apache/linkis/cs/parser/ApiJsonTest.java
index 29d5a7ceda..4662682009 100644
--- a/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/test/java/org/apache/linkis/cs/parser/ApiJsonTest.java
+++ b/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/test/java/org/apache/linkis/cs/parser/ApiJsonTest.java
@@ -74,7 +74,6 @@ public void temp() {
String test =
"{\"cols\":[{\"name\":\"birthday\",\"visualType\":\"string\",\"type\":\"category\",\"config\":true,\"field\":{\"alias\":\"\",\"desc\":\"\",\"useExpression\":false},\"format\":{\"formatType\":\"default\"},\"from\":\"cols\"},{\"name\":\"name\",\"visualType\":\"string\",\"type\":\"category\",\"config\":true,\"field\":{\"alias\":\"\",\"desc\":\"\",\"useExpression\":false},\"format\":{\"formatType\":\"default\"},\"from\":\"cols\"}],\"rows\":[],\"metrics\":[{\"name\":\"score@Visualis@6F01974E\",\"visualType\":\"number\",\"type\":\"value\",\"agg\":\"sum\",\"config\":true,\"chart\":{\"id\":1,\"name\":\"table\",\"title\":\"表格\",\"icon\":\"icon-table\",\"coordinate\":\"other\",\"rules\":[{\"dimension\":[0,9999],\"metric\":[0,9999]}],\"data\":{\"cols\":{\"title\":\"列\",\"type\":\"category\"},\"rows\":{\"title\":\"行\",\"type\":\"category\"},\"metrics\":{\"title\":\"指标\",\"type\":\"value\"},\"filters\":{\"title\":\"筛选\",\"type\":\"all\"}},\"style\":{\"table\":{\"fontFamily\":\"PingFang SC\",\"fontSize\":\"12\",\"color\":\"#666\",\"lineStyle\":\"solid\",\"lineColor\":\"#D9D9D9\",\"headerBackgroundColor\":\"#f7f7f7\",\"headerConfig\":[],\"columnsConfig\":[],\"leftFixedColumns\":[],\"rightFixedColumns\":[],\"headerFixed\":true,\"autoMergeCell\":false,\"bordered\":true,\"size\":\"default\",\"withPaging\":true,\"pageSize\":\"20\",\"withNoAggregators\":false},\"spec\":{}}},\"field\":{\"alias\":\"\",\"desc\":\"\",\"useExpression\":false},\"format\":{\"formatType\":\"default\"},\"from\":\"metrics\"}],\"filters\":[],\"color\":{\"title\":\"颜色\",\"type\":\"category\",\"value\":{\"all\":\"#509af2\"},\"items\":[]},\"chartStyles\":{\"richText\":{\"content\":\"〖@dv_name_dv@〗
〖@dv_birthday_dv@〗
\"},\"spec\":{}},\"selectedChart\":15,\"data\":[],\"pagination\":{\"pageNo\":0,\"pageSize\":0,\"withPaging\":false,\"totalCount\":0},\"dimetionAxis\":\"col\",\"renderType\":\"rerender\",\"orders\":[],\"mode\":\"chart\",\"model\":{\"birthday\":{\"sqlType\":\"STRING\",\"visualType\":\"string\",\"modelType\":\"category\"},\"score\":{\"sqlType\":\"DOUBLE\",\"visualType\":\"number\",\"modelType\":\"value\"},\"teacher\":{\"sqlType\":\"STRING\",\"visualType\":\"string\",\"modelType\":\"category\"},\"city\":{\"sqlType\":\"STRING\",\"visualType\":\"string\",\"modelType\":\"category\"},\"sex\":{\"sqlType\":\"STRING\",\"visualType\":\"string\",\"modelType\":\"category\"},\"fee\":{\"sqlType\":\"DOUBLE\",\"visualType\":\"number\",\"modelType\":\"value\"},\"name\":{\"sqlType\":\"STRING\",\"visualType\":\"string\",\"modelType\":\"category\"},\"lesson\":{\"sqlType\":\"STRING\",\"visualType\":\"string\",\"modelType\":\"category\"},\"id\":{\"sqlType\":\"INT\",\"visualType\":\"number\",\"modelType\":\"value\"},\"class\":{\"sqlType\":\"STRING\",\"visualType\":\"string\",\"modelType\":\"category\"},\"exam_date\":{\"sqlType\":\"STRING\",\"visualType\":\"string\",\"modelType\":\"category\"},\"age\":{\"sqlType\":\"INT\",\"visualType\":\"number\",\"modelType\":\"value\"}},\"controls\":[],\"computed\":[],\"cache\":false,\"expired\":300,\"autoLoadData\":true,\"query\":{\"groups\":[\"birthday\",\"name\"],\"aggregators\":[{\"column\":\"score\",\"func\":\"sum\"}],\"filters\":[],\"orders\":[],\"pageNo\":0,\"pageSize\":0,\"nativeQuery\":false,\"cache\":false,\"expired\":0,\"flush\":false}}";
Set columns = getWidgetUsedColumns(test);
- columns.size();
}
private Set getWidgetUsedColumns(String config) {
diff --git a/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/server/src/test/java/org/apache/linkis/datasourcemanager/core/service/DataSourceInfoServiceTest.java b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/server/src/test/java/org/apache/linkis/datasourcemanager/core/service/DataSourceInfoServiceTest.java
index 300b2bd3b0..2869216ce1 100644
--- a/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/server/src/test/java/org/apache/linkis/datasourcemanager/core/service/DataSourceInfoServiceTest.java
+++ b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/server/src/test/java/org/apache/linkis/datasourcemanager/core/service/DataSourceInfoServiceTest.java
@@ -350,7 +350,7 @@ void testInsertDataSourceParameter() throws ErrorException {
Long res =
dataSourceInfoService.insertDataSourceParameter(
keyDefinitionList, datasourceId, connectParams, username, comment);
- assertTrue(expectedVersion == res);
+ assertTrue(expectedVersion.equals(res));
}
@Test
diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/server/src/main/java/org/apache/linkis/metadata/query/server/utils/MetadataUtils.java b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/server/src/main/java/org/apache/linkis/metadata/query/server/utils/MetadataUtils.java
index 32ed7c53d1..58116f4e64 100644
--- a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/server/src/main/java/org/apache/linkis/metadata/query/server/utils/MetadataUtils.java
+++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/server/src/main/java/org/apache/linkis/metadata/query/server/utils/MetadataUtils.java
@@ -18,6 +18,7 @@
package org.apache.linkis.metadata.query.server.utils;
import org.apache.linkis.common.conf.CommonVars;
+import org.apache.linkis.common.utils.CloseIoUtils;
import org.apache.linkis.metadata.query.common.exception.MetaRuntimeException;
import org.apache.linkis.metadata.query.common.service.BaseMetadataService;
@@ -160,8 +161,9 @@ private static List searchMetaServiceClassFormURI(
classNameList.add(className);
}
} else if (url.endsWith(JAR_SUF_NAME)) {
+ JarFile jarFile = null;
try {
- JarFile jarFile = new JarFile(new File(url));
+ jarFile = new JarFile(new File(url));
Enumeration en = jarFile.entries();
while (en.hasMoreElements()) {
String name = en.nextElement().getName();
@@ -178,6 +180,8 @@ private static List searchMetaServiceClassFormURI(
// Trace
LOG.trace("Fail to parse jar file:[" + url + "] in service classpath", e);
return classNameList;
+ } finally {
+ CloseIoUtils.closeAll(jarFile);
}
}
return classNameList;
diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/hive/pom.xml b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/hive/pom.xml
index 1279139446..b0c9cdef36 100644
--- a/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/hive/pom.xml
+++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata-query/service/hive/pom.xml
@@ -26,7 +26,6 @@
linkis-metadata-query-service-hive
- 2.3.3
2.7.2
4.2.4
diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/test/java/org/apache/linkis/metadata/hive/dao/HiveMetaDaoTest.java b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/test/java/org/apache/linkis/metadata/hive/dao/HiveMetaDaoTest.java
index a86cd96010..458f64d216 100644
--- a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/test/java/org/apache/linkis/metadata/hive/dao/HiveMetaDaoTest.java
+++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/test/java/org/apache/linkis/metadata/hive/dao/HiveMetaDaoTest.java
@@ -125,7 +125,7 @@ public void getColumnsTest() {
queryParam.setTableName("employee");
List