From 37b3b26a98b3c04074ce19de616a87b44e5bb54a Mon Sep 17 00:00:00 2001 From: benjobs Date: Sat, 28 Sep 2024 12:16:14 +0800 Subject: [PATCH] [Improve] spark-app run state style improvement (#4100) --- .../src/locales/lang/en/spark/app.ts | 2 +- .../src/views/spark/app/components/State.tsx | 14 +++++++------- .../views/spark/app/hooks/useSparkAction.tsx | 8 ++++---- .../spark/client/proxy/SparkShimsProxy.scala | 18 ++++++++++++------ 4 files changed, 24 insertions(+), 18 deletions(-) diff --git a/streampark-console/streampark-console-webapp/src/locales/lang/en/spark/app.ts b/streampark-console/streampark-console-webapp/src/locales/lang/en/spark/app.ts index 9d614c3993..bd2fd4398d 100644 --- a/streampark-console/streampark-console-webapp/src/locales/lang/en/spark/app.ts +++ b/streampark-console/streampark-console-webapp/src/locales/lang/en/spark/app.ts @@ -211,7 +211,7 @@ export default { startLog: 'View Spark Start Log', abort: 'Terminate Job', copy: 'Copy Job', - remapping: 'Remap Job', + remapping: 'Remapping Job', deleteTip: 'Are you sure you want to delete this job?', canceling: 'Current job is stopping', starting: 'Current job is starting', diff --git a/streampark-console/streampark-console-webapp/src/views/spark/app/components/State.tsx b/streampark-console/streampark-console-webapp/src/views/spark/app/components/State.tsx index 3895abd302..ff12feb5a6 100644 --- a/streampark-console/streampark-console-webapp/src/views/spark/app/components/State.tsx +++ b/streampark-console/streampark-console-webapp/src/views/spark/app/components/State.tsx @@ -43,19 +43,19 @@ export const stateMap = { class: 'status-processing-restarting', }, [AppStateEnum.ACCEPTED]: { - color: '#52c41a', + color: '#13c2c2', title: t('spark.app.runState.accept'), - class: 'status-processing-running', + class: 'status-processing-restarting', }, [AppStateEnum.SUCCEEDED]: { - color: '#52c41a', + color: '#1890ff', title: t('spark.app.runState.success'), class: 'status-processing-success', }, [AppStateEnum.RUNNING]: { - color: '#1890ff', + color: '#52c41a', title: t('spark.app.runState.running'), - class: 'status-processing-failing', + class: 'status-processing-running', }, [AppStateEnum.FINISHED]: { color: '#52c41a', title: t('spark.app.runState.finished') }, [AppStateEnum.FAILED]: { color: '#f5222d', title: t('spark.app.runState.failed') }, @@ -73,9 +73,9 @@ export const stateMap = { }, [AppStateEnum.STOPPING]: { - color: '#f5222d', + color: '#faad14', title: t('spark.app.runState.stopping'), - class: 'status-processing-initializing', + class: 'status-processing-cancelling', }, [AppStateEnum.KILLED]: { color: '#8E50FF', title: t('spark.app.runState.killed') }, }; diff --git a/streampark-console/streampark-console-webapp/src/views/spark/app/hooks/useSparkAction.tsx b/streampark-console/streampark-console-webapp/src/views/spark/app/hooks/useSparkAction.tsx index 9da246e16d..4c596d4619 100644 --- a/streampark-console/streampark-console-webapp/src/views/spark/app/hooks/useSparkAction.tsx +++ b/streampark-console/streampark-console-webapp/src/views/spark/app/hooks/useSparkAction.tsx @@ -253,8 +253,8 @@ export const useSparkAction = (optionApps: Recordable) => {
{ class="!pt-40px" ref={mappingRef} name="mappingForm" - labelCol={{ lg: { span: 7 }, sm: { span: 7 } }} - wrapperCol={{ lg: { span: 16 }, sm: { span: 4 } }} + layout="vertical" + baseColProps={{ span: 22, offset: 1 }} v-model:model={formValue} > diff --git a/streampark-spark/streampark-spark-client/streampark-spark-client-api/src/main/scala/org/apache/streampark/spark/client/proxy/SparkShimsProxy.scala b/streampark-spark/streampark-spark-client/streampark-spark-client-api/src/main/scala/org/apache/streampark/spark/client/proxy/SparkShimsProxy.scala index 9287f72dbb..86268f8c7c 100644 --- a/streampark-spark/streampark-spark-client/streampark-spark-client-api/src/main/scala/org/apache/streampark/spark/client/proxy/SparkShimsProxy.scala +++ b/streampark-spark/streampark-spark-client/streampark-spark-client-api/src/main/scala/org/apache/streampark/spark/client/proxy/SparkShimsProxy.scala @@ -76,11 +76,10 @@ object SparkShimsProxy extends Logger { logInfo(s"Add verify sql lib,spark version: $sparkVersion") VERIFY_SQL_CLASS_LOADER_CACHE.getOrElseUpdate( s"${sparkVersion.fullVersion}", { - val libUrl = getSparkHomeLib(sparkVersion.sparkHome, "jars", f => !f.getName.startsWith("log4j") && !f.getName.startsWith("slf4j")) + val libUrl = getSparkHomeLib(sparkVersion.sparkHome, "jars") val shimsUrls = ListBuffer[URL](libUrl: _*) // TODO If there are compatibility issues with different versions - addShimsUrls( sparkVersion, file => { @@ -151,9 +150,8 @@ object SparkShimsProxy extends Logger { SHIMS_CLASS_LOADER_CACHE.getOrElseUpdate( s"${sparkVersion.fullVersion}", { // 1) spark/lib - val libUrl = getSparkHomeLib(sparkVersion.sparkHome, "jars", f => !f.getName.startsWith("log4j") && !f.getName.startsWith("slf4j")) + val libUrl = getSparkHomeLib(sparkVersion.sparkHome, "jars") val shimsUrls = ListBuffer[URL](libUrl: _*) - // 2) add all shims jar addShimsUrls( sparkVersion, @@ -174,10 +172,18 @@ object SparkShimsProxy extends Logger { private[this] def getSparkHomeLib( sparkHome: String, childDir: String, - filterFun: File => Boolean): List[URL] = { + filterFun: File => Boolean = null): List[URL] = { val file = new File(sparkHome, childDir) require(file.isDirectory, s"SPARK_HOME $file does not exist") - file.listFiles.filter(filterFun).map(_.toURI.toURL).toList + file.listFiles + .filter(f => !f.getName.startsWith("log4j") && !f.getName.startsWith("slf4j")) + .filter(f => { + if (filterFun != null) { + filterFun(f) + } else { + true + } + }).map(_.toURI.toURL).toList } @throws[Exception]