diff --git a/streampark-common/src/main/java/org/apache/streampark/common/enums/ApplicationType.java b/streampark-common/src/main/java/org/apache/streampark/common/enums/ApplicationTypeEnum.java similarity index 87% rename from streampark-common/src/main/java/org/apache/streampark/common/enums/ApplicationType.java rename to streampark-common/src/main/java/org/apache/streampark/common/enums/ApplicationTypeEnum.java index 70c992d820..557dd4cf85 100644 --- a/streampark-common/src/main/java/org/apache/streampark/common/enums/ApplicationType.java +++ b/streampark-common/src/main/java/org/apache/streampark/common/enums/ApplicationTypeEnum.java @@ -17,7 +17,7 @@ package org.apache.streampark.common.enums; -public enum ApplicationType { +public enum ApplicationTypeEnum { /** StreamPark Flink */ STREAMPARK_FLINK(1, "StreamPark Flink"), @@ -34,7 +34,7 @@ public enum ApplicationType { private final int type; private final String name; - ApplicationType(int type, String name) { + ApplicationTypeEnum(int type, String name) { this.type = type; this.name = name; } @@ -47,8 +47,8 @@ public String getName() { return name; } - public static ApplicationType of(int type) { - for (ApplicationType appType : ApplicationType.values()) { + public static ApplicationTypeEnum of(int type) { + for (ApplicationTypeEnum appType : ApplicationTypeEnum.values()) { if (appType.getType() == type) { return appType; } diff --git a/streampark-common/src/main/java/org/apache/streampark/common/enums/ClusterState.java b/streampark-common/src/main/java/org/apache/streampark/common/enums/ClusterStateEnum.java similarity index 71% rename from streampark-common/src/main/java/org/apache/streampark/common/enums/ClusterState.java rename to streampark-common/src/main/java/org/apache/streampark/common/enums/ClusterStateEnum.java index b2599b74c0..3cd992f874 100644 --- a/streampark-common/src/main/java/org/apache/streampark/common/enums/ClusterState.java +++ b/streampark-common/src/main/java/org/apache/streampark/common/enums/ClusterStateEnum.java @@ -18,7 +18,7 @@ package org.apache.streampark.common.enums; /** @since 1.2.3 */ -public enum ClusterState { +public enum ClusterStateEnum { /** The cluster was just created but not started */ CREATED(0), @@ -49,33 +49,33 @@ public enum ClusterState { private final Integer state; - ClusterState(Integer state) { + ClusterStateEnum(Integer state) { this.state = state; } - public static ClusterState of(Integer value) { - for (ClusterState clusterState : values()) { - if (clusterState.state.equals(value)) { - return clusterState; + public static ClusterStateEnum of(Integer value) { + for (ClusterStateEnum clusterStateEnum : values()) { + if (clusterStateEnum.state.equals(value)) { + return clusterStateEnum; } } - return ClusterState.UNKNOWN; + return ClusterStateEnum.UNKNOWN; } - public static ClusterState of(String name) { - for (ClusterState clusterState : values()) { - if (clusterState.name().equals(name)) { - return clusterState; + public static ClusterStateEnum of(String name) { + for (ClusterStateEnum clusterStateEnum : values()) { + if (clusterStateEnum.name().equals(name)) { + return clusterStateEnum; } } - return ClusterState.UNKNOWN; + return ClusterStateEnum.UNKNOWN; } public Integer getState() { return state; } - public static boolean isRunning(ClusterState state) { + public static boolean isRunning(ClusterStateEnum state) { return RUNNING.equals(state); } } diff --git a/streampark-common/src/main/java/org/apache/streampark/common/enums/DevelopmentMode.java b/streampark-common/src/main/java/org/apache/streampark/common/enums/DevelopmentModeEnum.java similarity index 80% rename from streampark-common/src/main/java/org/apache/streampark/common/enums/DevelopmentMode.java rename to streampark-common/src/main/java/org/apache/streampark/common/enums/DevelopmentModeEnum.java index 00580a2cdc..31377c1f73 100644 --- a/streampark-common/src/main/java/org/apache/streampark/common/enums/DevelopmentMode.java +++ b/streampark-common/src/main/java/org/apache/streampark/common/enums/DevelopmentModeEnum.java @@ -17,7 +17,7 @@ package org.apache.streampark.common.enums; -public enum DevelopmentMode { +public enum DevelopmentModeEnum { /** custom code */ CUSTOM_CODE("Custom Code", 1), @@ -32,15 +32,15 @@ public enum DevelopmentMode { private final Integer mode; - DevelopmentMode(String name, Integer mode) { + DevelopmentModeEnum(String name, Integer mode) { this.name = name; this.mode = mode; } - public static DevelopmentMode of(Integer value) { - for (DevelopmentMode developmentMode : values()) { - if (developmentMode.mode.equals(value)) { - return developmentMode; + public static DevelopmentModeEnum of(Integer value) { + for (DevelopmentModeEnum developmentModeEnum : values()) { + if (developmentModeEnum.mode.equals(value)) { + return developmentModeEnum; } } return null; diff --git a/streampark-common/src/main/java/org/apache/streampark/common/enums/ExecutionMode.java b/streampark-common/src/main/java/org/apache/streampark/common/enums/ExecutionModeEnum.java similarity index 76% rename from streampark-common/src/main/java/org/apache/streampark/common/enums/ExecutionMode.java rename to streampark-common/src/main/java/org/apache/streampark/common/enums/ExecutionModeEnum.java index c903a5bcd7..06bedf34b2 100644 --- a/streampark-common/src/main/java/org/apache/streampark/common/enums/ExecutionMode.java +++ b/streampark-common/src/main/java/org/apache/streampark/common/enums/ExecutionModeEnum.java @@ -21,7 +21,7 @@ import java.util.List; -public enum ExecutionMode { +public enum ExecutionModeEnum { /** Local mode */ LOCAL(0, "local"), @@ -48,24 +48,24 @@ public enum ExecutionMode { private final String name; - ExecutionMode(Integer mode, String name) { + ExecutionModeEnum(Integer mode, String name) { this.mode = mode; this.name = name; } - public static ExecutionMode of(Integer value) { - for (ExecutionMode executionMode : values()) { - if (executionMode.mode.equals(value)) { - return executionMode; + public static ExecutionModeEnum of(Integer value) { + for (ExecutionModeEnum executionModeEnum : values()) { + if (executionModeEnum.mode.equals(value)) { + return executionModeEnum; } } return null; } - public static ExecutionMode of(String name) { - for (ExecutionMode executionMode : values()) { - if (executionMode.name.equals(name)) { - return executionMode; + public static ExecutionModeEnum of(String name) { + for (ExecutionModeEnum executionModeEnum : values()) { + if (executionModeEnum.name.equals(name)) { + return executionModeEnum; } } return null; @@ -79,17 +79,17 @@ public String getName() { return name; } - public static boolean isYarnMode(ExecutionMode mode) { + public static boolean isYarnMode(ExecutionModeEnum mode) { return YARN_PER_JOB == mode || YARN_APPLICATION == mode || YARN_SESSION == mode; } // TODO: We'll inline this method back to the corresponding caller lines // after dropping the yarn perjob mode. - public static boolean isYarnPerJobOrAppMode(ExecutionMode mode) { + public static boolean isYarnPerJobOrAppMode(ExecutionModeEnum mode) { return YARN_PER_JOB == mode || YARN_APPLICATION == mode; } - public static boolean isYarnSessionMode(ExecutionMode mode) { + public static boolean isYarnSessionMode(ExecutionModeEnum mode) { return YARN_SESSION == mode; } @@ -101,7 +101,7 @@ public static boolean isKubernetesSessionMode(Integer value) { return KUBERNETES_NATIVE_SESSION == of(value); } - public static boolean isKubernetesMode(ExecutionMode mode) { + public static boolean isKubernetesMode(ExecutionModeEnum mode) { return KUBERNETES_NATIVE_SESSION == mode || KUBERNETES_NATIVE_APPLICATION == mode; } @@ -118,7 +118,7 @@ public static List getKubernetesMode() { KUBERNETES_NATIVE_SESSION.getMode(), KUBERNETES_NATIVE_APPLICATION.getMode()); } - public static boolean isSessionMode(ExecutionMode mode) { + public static boolean isSessionMode(ExecutionModeEnum mode) { return KUBERNETES_NATIVE_SESSION == mode || YARN_SESSION == mode; } @@ -126,7 +126,7 @@ public static boolean isRemoteMode(Integer value) { return isRemoteMode(of(value)); } - public static boolean isRemoteMode(ExecutionMode mode) { + public static boolean isRemoteMode(ExecutionModeEnum mode) { return REMOTE == mode; } } diff --git a/streampark-common/src/main/java/org/apache/streampark/common/enums/FlinkK8sRestExposedType.java b/streampark-common/src/main/java/org/apache/streampark/common/enums/FlinkK8sRestExposedTypeEnum.java similarity index 85% rename from streampark-common/src/main/java/org/apache/streampark/common/enums/FlinkK8sRestExposedType.java rename to streampark-common/src/main/java/org/apache/streampark/common/enums/FlinkK8sRestExposedTypeEnum.java index c87c5a3a6b..df65044532 100644 --- a/streampark-common/src/main/java/org/apache/streampark/common/enums/FlinkK8sRestExposedType.java +++ b/streampark-common/src/main/java/org/apache/streampark/common/enums/FlinkK8sRestExposedTypeEnum.java @@ -18,7 +18,7 @@ package org.apache.streampark.common.enums; /** kubernetes.rest-service.exposed.type */ -public enum FlinkK8sRestExposedType { +public enum FlinkK8sRestExposedTypeEnum { /** LoadBalancer */ LOAD_BALANCER("LoadBalancer", 0), @@ -33,13 +33,13 @@ public enum FlinkK8sRestExposedType { private final Integer type; - FlinkK8sRestExposedType(String name, Integer type) { + FlinkK8sRestExposedTypeEnum(String name, Integer type) { this.name = name; this.type = type; } - public static FlinkK8sRestExposedType of(Integer value) { - for (FlinkK8sRestExposedType order : values()) { + public static FlinkK8sRestExposedTypeEnum of(Integer value) { + for (FlinkK8sRestExposedTypeEnum order : values()) { if (order.type.equals(value)) { return order; } diff --git a/streampark-common/src/main/java/org/apache/streampark/common/enums/FlinkSqlValidationFailedType.java b/streampark-common/src/main/java/org/apache/streampark/common/enums/FlinkSqlValidationFailedTypeEnum.java similarity index 85% rename from streampark-common/src/main/java/org/apache/streampark/common/enums/FlinkSqlValidationFailedType.java rename to streampark-common/src/main/java/org/apache/streampark/common/enums/FlinkSqlValidationFailedTypeEnum.java index 6f9b8a067a..8c32a55b16 100644 --- a/streampark-common/src/main/java/org/apache/streampark/common/enums/FlinkSqlValidationFailedType.java +++ b/streampark-common/src/main/java/org/apache/streampark/common/enums/FlinkSqlValidationFailedTypeEnum.java @@ -17,7 +17,7 @@ package org.apache.streampark.common.enums; -public enum FlinkSqlValidationFailedType { +public enum FlinkSqlValidationFailedTypeEnum { /** Basic test failed (such as null, etc.) */ VERIFY_FAILED(1), @@ -36,12 +36,12 @@ public enum FlinkSqlValidationFailedType { private final int failedType; - FlinkSqlValidationFailedType(int failedType) { + FlinkSqlValidationFailedTypeEnum(int failedType) { this.failedType = failedType; } - public static FlinkSqlValidationFailedType of(Integer value) { - for (FlinkSqlValidationFailedType type : values()) { + public static FlinkSqlValidationFailedTypeEnum of(Integer value) { + for (FlinkSqlValidationFailedTypeEnum type : values()) { if (type.failedType == value) { return type; } diff --git a/streampark-common/src/main/java/org/apache/streampark/common/enums/ResolveOrder.java b/streampark-common/src/main/java/org/apache/streampark/common/enums/ResolveOrderEnum.java similarity index 87% rename from streampark-common/src/main/java/org/apache/streampark/common/enums/ResolveOrder.java rename to streampark-common/src/main/java/org/apache/streampark/common/enums/ResolveOrderEnum.java index e69823e272..0ab2a8821f 100644 --- a/streampark-common/src/main/java/org/apache/streampark/common/enums/ResolveOrder.java +++ b/streampark-common/src/main/java/org/apache/streampark/common/enums/ResolveOrderEnum.java @@ -18,7 +18,7 @@ package org.apache.streampark.common.enums; /** classloader.resolve-order */ -public enum ResolveOrder { +public enum ResolveOrderEnum { /** parent-first */ PARENT_FIRST("parent-first", 0), @@ -30,13 +30,13 @@ public enum ResolveOrder { private final Integer order; - ResolveOrder(String name, Integer order) { + ResolveOrderEnum(String name, Integer order) { this.name = name; this.order = order; } - public static ResolveOrder of(Integer value) { - for (ResolveOrder order : values()) { + public static ResolveOrderEnum of(Integer value) { + for (ResolveOrderEnum order : values()) { if (order.order.equals(value)) { return order; } diff --git a/streampark-common/src/main/java/org/apache/streampark/common/enums/RestoreMode.java b/streampark-common/src/main/java/org/apache/streampark/common/enums/RestoreModeEnum.java similarity index 88% rename from streampark-common/src/main/java/org/apache/streampark/common/enums/RestoreMode.java rename to streampark-common/src/main/java/org/apache/streampark/common/enums/RestoreModeEnum.java index e4a49e8fc1..bb48c1d04b 100644 --- a/streampark-common/src/main/java/org/apache/streampark/common/enums/RestoreMode.java +++ b/streampark-common/src/main/java/org/apache/streampark/common/enums/RestoreModeEnum.java @@ -19,7 +19,7 @@ import java.util.Objects; -public enum RestoreMode { +public enum RestoreModeEnum { /** * In this mode Flink claims ownership of the snapshot and essentially treats it like a @@ -51,7 +51,7 @@ public int get() { return this.mode; } - RestoreMode(int mode) { + RestoreModeEnum(int mode) { this.mode = mode; } @@ -59,10 +59,10 @@ public String getName() { return this.toString(); } - public static RestoreMode of(Integer value) { - for (RestoreMode restoreMode : values()) { - if (Objects.equals(restoreMode.mode, value)) { - return restoreMode; + public static RestoreModeEnum of(Integer value) { + for (RestoreModeEnum restoreModeEnum : values()) { + if (Objects.equals(restoreModeEnum.mode, value)) { + return restoreModeEnum; } } return null; diff --git a/streampark-common/src/main/java/org/apache/streampark/common/enums/Semantic.java b/streampark-common/src/main/java/org/apache/streampark/common/enums/SemanticEnum.java similarity index 85% rename from streampark-common/src/main/java/org/apache/streampark/common/enums/Semantic.java rename to streampark-common/src/main/java/org/apache/streampark/common/enums/SemanticEnum.java index f405f28187..9f5ab9f47b 100644 --- a/streampark-common/src/main/java/org/apache/streampark/common/enums/Semantic.java +++ b/streampark-common/src/main/java/org/apache/streampark/common/enums/SemanticEnum.java @@ -18,7 +18,7 @@ package org.apache.streampark.common.enums; /** Flink consistency semantics */ -public enum Semantic { +public enum SemanticEnum { /** * Ensure that the counting results obtained after a fault are consistent with the correct values. @@ -31,10 +31,10 @@ public enum Semantic { /** After the fault occurs, the counting results may be lost. */ NONE; - public static Semantic of(String name) { - for (Semantic semantic : Semantic.values()) { - if (name.equals(semantic.name())) { - return semantic; + public static SemanticEnum of(String name) { + for (SemanticEnum semanticEnum : SemanticEnum.values()) { + if (name.equals(semanticEnum.name())) { + return semanticEnum; } } return null; diff --git a/streampark-common/src/main/java/org/apache/streampark/common/enums/StorageType.java b/streampark-common/src/main/java/org/apache/streampark/common/enums/StorageTypeEnum.java similarity index 88% rename from streampark-common/src/main/java/org/apache/streampark/common/enums/StorageType.java rename to streampark-common/src/main/java/org/apache/streampark/common/enums/StorageTypeEnum.java index 11be2cbec5..8d49a5f980 100644 --- a/streampark-common/src/main/java/org/apache/streampark/common/enums/StorageType.java +++ b/streampark-common/src/main/java/org/apache/streampark/common/enums/StorageTypeEnum.java @@ -19,7 +19,7 @@ import org.apache.commons.lang3.StringUtils; -public enum StorageType { +public enum StorageTypeEnum { /** hdfs */ HDFS("hdfs"), @@ -29,7 +29,7 @@ public enum StorageType { private final String type; - StorageType(String type) { + StorageTypeEnum(String type) { this.type = type; } @@ -37,11 +37,11 @@ public String getType() { return type; } - public static StorageType of(String identifier) { + public static StorageTypeEnum of(String identifier) { if (StringUtils.isBlank(identifier)) { return LFS; } - for (StorageType type : values()) { + for (StorageTypeEnum type : values()) { if (type.type.equals(identifier)) { return type; } diff --git a/streampark-common/src/main/scala/org/apache/streampark/common/conf/Workspace.scala b/streampark-common/src/main/scala/org/apache/streampark/common/conf/Workspace.scala index 8f3e7b10c0..d7d7ee13e1 100644 --- a/streampark-common/src/main/scala/org/apache/streampark/common/conf/Workspace.scala +++ b/streampark-common/src/main/scala/org/apache/streampark/common/conf/Workspace.scala @@ -16,7 +16,7 @@ */ package org.apache.streampark.common.conf -import org.apache.streampark.common.enums.StorageType +import org.apache.streampark.common.enums.StorageTypeEnum import org.apache.streampark.common.util.{HdfsUtils, SystemPropertyUtils} import org.apache.streampark.common.util.ImplicitsUtils._ @@ -24,11 +24,11 @@ import java.net.URI object Workspace { - def of(storageType: StorageType): Workspace = Workspace(storageType) + def of(storageType: StorageTypeEnum): Workspace = Workspace(storageType) - lazy val local: Workspace = Workspace.of(StorageType.LFS) + lazy val local: Workspace = Workspace.of(StorageTypeEnum.LFS) - lazy val remote: Workspace = Workspace.of(StorageType.HDFS) + lazy val remote: Workspace = Workspace.of(StorageTypeEnum.HDFS) private[this] lazy val localWorkspace = local.WORKSPACE @@ -52,7 +52,7 @@ object Workspace { } -case class Workspace(storageType: StorageType) { +case class Workspace(storageType: StorageTypeEnum) { private[this] def getConfigValue[T](option: InternalOption): T = { val s = SystemPropertyUtils.get(option.key) @@ -68,11 +68,11 @@ case class Workspace(storageType: StorageType) { lazy val WORKSPACE: String = { storageType match { - case StorageType.LFS => + case StorageTypeEnum.LFS => val path: String = getConfigValue[String](CommonConfig.STREAMPARK_WORKSPACE_LOCAL) require(path != null, "[StreamPark] streampark.workspace.local must not be null") path - case StorageType.HDFS => + case StorageTypeEnum.HDFS => val path: String = getConfigValue[String](CommonConfig.STREAMPARK_WORKSPACE_REMOTE) path match { case p if p.isEmpty => diff --git a/streampark-common/src/main/scala/org/apache/streampark/common/enums/ApiType.scala b/streampark-common/src/main/scala/org/apache/streampark/common/enums/ApiTypeEnum.scala similarity index 92% rename from streampark-common/src/main/scala/org/apache/streampark/common/enums/ApiType.scala rename to streampark-common/src/main/scala/org/apache/streampark/common/enums/ApiTypeEnum.scala index d261dc6787..22578a37e4 100644 --- a/streampark-common/src/main/scala/org/apache/streampark/common/enums/ApiType.scala +++ b/streampark-common/src/main/scala/org/apache/streampark/common/enums/ApiTypeEnum.scala @@ -16,7 +16,7 @@ */ package org.apache.streampark.common.enums -object ApiType extends Enumeration { +object ApiTypeEnum extends Enumeration { type ApiType = Value - val java, scala = Value + val JAVA, SCALA = Value } diff --git a/streampark-common/src/main/scala/org/apache/streampark/common/enums/PlannerType.scala b/streampark-common/src/main/scala/org/apache/streampark/common/enums/PlannerTypeEnum.scala similarity index 92% rename from streampark-common/src/main/scala/org/apache/streampark/common/enums/PlannerType.scala rename to streampark-common/src/main/scala/org/apache/streampark/common/enums/PlannerTypeEnum.scala index 9951f8a65b..2f4e0f006c 100644 --- a/streampark-common/src/main/scala/org/apache/streampark/common/enums/PlannerType.scala +++ b/streampark-common/src/main/scala/org/apache/streampark/common/enums/PlannerTypeEnum.scala @@ -16,7 +16,7 @@ */ package org.apache.streampark.common.enums -object PlannerType extends Enumeration { +object PlannerTypeEnum extends Enumeration { type PlannerType = Value - val blink, old, any = Value + val BLINK, OLD, ANY = Value } diff --git a/streampark-common/src/main/scala/org/apache/streampark/common/enums/TableMode.scala b/streampark-common/src/main/scala/org/apache/streampark/common/enums/TableModeEnum.scala similarity index 92% rename from streampark-common/src/main/scala/org/apache/streampark/common/enums/TableMode.scala rename to streampark-common/src/main/scala/org/apache/streampark/common/enums/TableModeEnum.scala index 50bd9628d1..88a78188cc 100644 --- a/streampark-common/src/main/scala/org/apache/streampark/common/enums/TableMode.scala +++ b/streampark-common/src/main/scala/org/apache/streampark/common/enums/TableModeEnum.scala @@ -16,7 +16,7 @@ */ package org.apache.streampark.common.enums -object TableMode extends Enumeration { +object TableModeEnum extends Enumeration { type TableMode = Value - val batch, streaming = Value + val BATCH, STREAMING = Value } diff --git a/streampark-common/src/main/scala/org/apache/streampark/common/fs/FsOperator.scala b/streampark-common/src/main/scala/org/apache/streampark/common/fs/FsOperator.scala index b7565371c2..d54014af44 100644 --- a/streampark-common/src/main/scala/org/apache/streampark/common/fs/FsOperator.scala +++ b/streampark-common/src/main/scala/org/apache/streampark/common/fs/FsOperator.scala @@ -17,18 +17,18 @@ package org.apache.streampark.common.fs -import org.apache.streampark.common.enums.StorageType +import org.apache.streampark.common.enums.StorageTypeEnum object FsOperator { - lazy val lfs: FsOperator = FsOperator.of(StorageType.LFS) + lazy val lfs: FsOperator = FsOperator.of(StorageTypeEnum.LFS) - lazy val hdfs: FsOperator = FsOperator.of(StorageType.HDFS) + lazy val hdfs: FsOperator = FsOperator.of(StorageTypeEnum.HDFS) - def of(storageType: StorageType): FsOperator = { + def of(storageType: StorageTypeEnum): FsOperator = { storageType match { - case StorageType.HDFS => HdfsOperator - case StorageType.LFS => LfsOperator + case StorageTypeEnum.HDFS => HdfsOperator + case StorageTypeEnum.LFS => LfsOperator case _ => throw new UnsupportedOperationException(s"Unsupported storageType:$storageType") } } diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/interceptor/UploadFileTypeInterceptor.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/interceptor/UploadFileTypeInterceptor.java index e575c78241..2dde6014c5 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/interceptor/UploadFileTypeInterceptor.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/interceptor/UploadFileTypeInterceptor.java @@ -75,7 +75,7 @@ private boolean isPythonFileType(String contentType, InputStream input) { try { Metadata metadata = new Metadata(); AutoDetectParser parser = new AutoDetectParser(); - parser.parse(input, new DefaultHandler(), metadata, new ParseContext()); + parser.parse(stream, new DefaultHandler(), metadata, new ParseContext()); String mimeType = metadata.get(HttpHeaders.CONTENT_TYPE); return contentType.contains("text/x-python") && MediaType.TEXT_PLAIN.toString().equals(mimeType); diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/util/GitUtils.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/util/GitUtils.java index 57531baa91..7aab84af65 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/util/GitUtils.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/base/util/GitUtils.java @@ -20,7 +20,7 @@ import org.apache.streampark.common.util.FileUtils; import org.apache.streampark.common.util.SystemPropertyUtils; import org.apache.streampark.console.core.entity.Project; -import org.apache.streampark.console.core.enums.GitCredential; +import org.apache.streampark.console.core.enums.GitCredentialEnum; import org.apache.commons.lang3.StringUtils; @@ -79,8 +79,8 @@ public static List getBranchList(Project project) throws GitAPIException } private static void setCredentials(TransportCommand transportCommand, Project project) { - GitCredential gitCredential = GitCredential.of(project.getGitCredential()); - switch (gitCredential) { + GitCredentialEnum gitCredentialEnum = GitCredentialEnum.of(project.getGitCredential()); + switch (gitCredentialEnum) { case HTTPS: if (!StringUtils.isAllBlank(project.getUserName(), project.getPassword())) { UsernamePasswordCredentialsProvider credentialsProvider = diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/annotation/PermissionAction.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/annotation/PermissionAction.java index 04d964c614..5c6a4ad7dc 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/annotation/PermissionAction.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/annotation/PermissionAction.java @@ -17,7 +17,7 @@ package org.apache.streampark.console.core.annotation; -import org.apache.streampark.console.core.enums.PermissionType; +import org.apache.streampark.console.core.enums.PermissionTypeEnum; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; @@ -29,5 +29,5 @@ public @interface PermissionAction { String id(); - PermissionType type(); + PermissionTypeEnum type(); } diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/aspect/StreamParkAspect.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/aspect/StreamParkAspect.java index a155833336..a16765d807 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/aspect/StreamParkAspect.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/aspect/StreamParkAspect.java @@ -24,8 +24,8 @@ import org.apache.streampark.console.core.annotation.ApiAccess; import org.apache.streampark.console.core.annotation.PermissionAction; import org.apache.streampark.console.core.entity.Application; -import org.apache.streampark.console.core.enums.PermissionType; -import org.apache.streampark.console.core.enums.UserType; +import org.apache.streampark.console.core.enums.PermissionTypeEnum; +import org.apache.streampark.console.core.enums.UserTypeEnum; import org.apache.streampark.console.core.service.CommonService; import org.apache.streampark.console.core.service.application.ApplicationManageService; import org.apache.streampark.console.core.task.FlinkAppHttpWatcher; @@ -109,13 +109,13 @@ public RestResponse permissionAction(ProceedingJoinPoint joinPoint) throws Throw User currentUser = commonService.getCurrentUser(); ApiAlertException.throwIfNull(currentUser, "Permission denied, please login first."); - boolean isAdmin = currentUser.getUserType() == UserType.ADMIN; + boolean isAdmin = currentUser.getUserTypeEnum() == UserTypeEnum.ADMIN; if (!isAdmin) { - PermissionType permissionType = permissionAction.type(); + PermissionTypeEnum permissionTypeEnum = permissionAction.type(); Long paramId = getParamId(joinPoint, methodSignature, permissionAction.id()); - switch (permissionType) { + switch (permissionTypeEnum) { case USER: ApiAlertException.throwIfTrue( !currentUser.getUserId().equals(paramId), @@ -137,7 +137,7 @@ public RestResponse permissionAction(ProceedingJoinPoint joinPoint) throws Throw break; default: throw new IllegalArgumentException( - String.format("Permission type %s is not supported.", permissionType)); + String.format("Permission type %s is not supported.", permissionTypeEnum)); } } diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/bean/AlertProbeMsg.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/bean/AlertProbeMsg.java index f9bb316040..af6069993b 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/bean/AlertProbeMsg.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/bean/AlertProbeMsg.java @@ -17,7 +17,7 @@ package org.apache.streampark.console.core.bean; -import org.apache.streampark.console.core.enums.FlinkAppState; +import org.apache.streampark.console.core.enums.FlinkAppStateEnum; import lombok.Data; import lombok.NoArgsConstructor; @@ -40,7 +40,7 @@ public class AlertProbeMsg { private Integer cancelledJobs = 0; - public void compute(FlinkAppState state) { + public void compute(FlinkAppStateEnum state) { this.probeJobs++; switch (state) { case LOST: diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/bean/AlertTemplate.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/bean/AlertTemplate.java index be407078a5..e05195637d 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/bean/AlertTemplate.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/bean/AlertTemplate.java @@ -17,14 +17,14 @@ package org.apache.streampark.console.core.bean; -import org.apache.streampark.common.enums.ClusterState; -import org.apache.streampark.common.enums.ExecutionMode; +import org.apache.streampark.common.enums.ClusterStateEnum; +import org.apache.streampark.common.enums.ExecutionModeEnum; import org.apache.streampark.common.util.DateUtils; import org.apache.streampark.common.util.YarnUtils; import org.apache.streampark.console.core.entity.Application; import org.apache.streampark.console.core.entity.FlinkCluster; -import org.apache.streampark.console.core.enums.CheckPointStatus; -import org.apache.streampark.console.core.enums.FlinkAppState; +import org.apache.streampark.console.core.enums.CheckPointStatusEnum; +import org.apache.streampark.console.core.enums.FlinkAppStateEnum; import lombok.Data; @@ -57,7 +57,7 @@ public class AlertTemplate implements Serializable { private Integer lostJobs; private Integer cancelledJobs; - public static AlertTemplate of(Application application, FlinkAppState appState) { + public static AlertTemplate of(Application application, FlinkAppStateEnum appState) { return new AlertTemplateBuilder() .setDuration(application.getStartTime(), application.getEndTime()) .setJobName(application.getJobName()) @@ -74,7 +74,8 @@ public static AlertTemplate of(Application application, FlinkAppState appState) .build(); } - public static AlertTemplate of(Application application, CheckPointStatus checkPointStatus) { + public static AlertTemplate of( + Application application, CheckPointStatusEnum checkPointStatusEnum) { return new AlertTemplateBuilder() .setDuration(application.getStartTime(), application.getEndTime()) .setJobName(application.getJobName()) @@ -90,7 +91,7 @@ public static AlertTemplate of(Application application, CheckPointStatus checkPo .build(); } - public static AlertTemplate of(FlinkCluster cluster, ClusterState clusterState) { + public static AlertTemplate of(FlinkCluster cluster, ClusterStateEnum clusterStateEnum) { return new AlertTemplateBuilder() .setDuration(cluster.getStartTime(), cluster.getEndTime()) .setJobName(cluster.getClusterName()) @@ -98,10 +99,10 @@ public static AlertTemplate of(FlinkCluster cluster, ClusterState clusterState) .setStartTime(cluster.getStartTime()) .setEndTime(cluster.getEndTime()) .setType(3) - .setTitle(String.format("Notify: %s %s", cluster.getClusterName(), clusterState.name())) + .setTitle(String.format("Notify: %s %s", cluster.getClusterName(), clusterStateEnum.name())) .setSubject( - String.format("StreamPark Alert: %s %s", cluster.getClusterName(), clusterState)) - .setStatus(clusterState.name()) + String.format("StreamPark Alert: %s %s", cluster.getClusterName(), clusterStateEnum)) + .setStatus(clusterStateEnum.name()) .setAllJobs(cluster.getAllJobs()) .setAffectedJobs(cluster.getAffectedJobs()) .build(); @@ -186,8 +187,8 @@ public AlertTemplateBuilder setLink(String link) { return this; } - public AlertTemplateBuilder setLink(ExecutionMode mode, String appId) { - if (ExecutionMode.isYarnMode(mode)) { + public AlertTemplateBuilder setLink(ExecutionModeEnum mode, String appId) { + if (ExecutionModeEnum.isYarnMode(mode)) { String format = "%s/proxy/%s/"; String url = String.format(format, YarnUtils.getRMWebAppURL(false), appId); alertTemplate.setLink(url); diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/controller/ApplicationBuildPipelineController.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/controller/ApplicationBuildPipelineController.java index 14dcf73266..a692e56fa8 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/controller/ApplicationBuildPipelineController.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/controller/ApplicationBuildPipelineController.java @@ -23,10 +23,10 @@ import org.apache.streampark.console.core.annotation.PermissionAction; import org.apache.streampark.console.core.bean.AppBuildDockerResolvedDetail; import org.apache.streampark.console.core.entity.AppBuildPipeline; -import org.apache.streampark.console.core.enums.PermissionType; +import org.apache.streampark.console.core.enums.PermissionTypeEnum; import org.apache.streampark.console.core.service.AppBuildPipeService; import org.apache.streampark.flink.packer.pipeline.DockerResolvedSnapshot; -import org.apache.streampark.flink.packer.pipeline.PipelineType; +import org.apache.streampark.flink.packer.pipeline.PipelineTypeEnum; import org.apache.shiro.authz.annotation.RequiresPermissions; @@ -75,7 +75,7 @@ public class ApplicationBuildPipelineController { schema = @Schema(defaultValue = "false", implementation = boolean.class)) }) @ApiAccess - @PermissionAction(id = "#appId", type = PermissionType.APP) + @PermissionAction(id = "#appId", type = PermissionTypeEnum.APP) @PostMapping(value = "build") @RequiresPermissions("app:create") public RestResponse buildApplication(Long appId, boolean forceBuild) { @@ -103,7 +103,7 @@ public RestResponse getBuildProgressDetail(Long appId) { details.put("pipeline", pipeline.map(AppBuildPipeline::toView).orElse(null)); if (pipeline.isPresent() - && PipelineType.FLINK_NATIVE_K8S_APPLICATION == pipeline.get().getPipeType()) { + && PipelineTypeEnum.FLINK_NATIVE_K8S_APPLICATION == pipeline.get().getPipeType()) { DockerResolvedSnapshot dockerProgress = appBuildPipeService.getDockerProgressDetailSnapshot(appId); details.put("docker", AppBuildDockerResolvedDetail.of(dockerProgress)); diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/controller/ApplicationController.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/controller/ApplicationController.java index 048f95f2cc..dfe803bce3 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/controller/ApplicationController.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/controller/ApplicationController.java @@ -29,8 +29,8 @@ import org.apache.streampark.console.core.entity.Application; import org.apache.streampark.console.core.entity.ApplicationBackUp; import org.apache.streampark.console.core.entity.ApplicationLog; -import org.apache.streampark.console.core.enums.AppExistsState; -import org.apache.streampark.console.core.enums.PermissionType; +import org.apache.streampark.console.core.enums.AppExistsStateEnum; +import org.apache.streampark.console.core.enums.PermissionTypeEnum; import org.apache.streampark.console.core.service.ApplicationBackUpService; import org.apache.streampark.console.core.service.ApplicationLogService; import org.apache.streampark.console.core.service.ResourceService; @@ -92,7 +92,7 @@ public RestResponse get(Application app) { @Operation(summary = "Create application") @ApiAccess - @PermissionAction(id = "#app.teamId", type = PermissionType.TEAM) + @PermissionAction(id = "#app.teamId", type = PermissionTypeEnum.TEAM) @PostMapping("create") @RequiresPermissions("app:create") public RestResponse create(Application app) throws IOException { @@ -118,7 +118,7 @@ public RestResponse create(Application app) throws IOException { @Parameter(name = "args", description = "new application args", in = ParameterIn.QUERY) }) @ApiAccess - @PermissionAction(id = "#app.id", type = PermissionType.APP) + @PermissionAction(id = "#app.id", type = PermissionTypeEnum.APP) @PostMapping(value = "copy") @RequiresPermissions("app:copy") public RestResponse copy(@Parameter(hidden = true) Application app) throws IOException { @@ -128,7 +128,7 @@ public RestResponse copy(@Parameter(hidden = true) Application app) throws IOExc @Operation(summary = "Update application") @AppUpdated - @PermissionAction(id = "#app.id", type = PermissionType.APP) + @PermissionAction(id = "#app.id", type = PermissionTypeEnum.APP) @PostMapping("update") @RequiresPermissions("app:update") public RestResponse update(Application app) { @@ -163,7 +163,7 @@ public RestResponse mapping(Application app) { @Operation(summary = "Revoke application") @AppUpdated - @PermissionAction(id = "#app.id", type = PermissionType.APP) + @PermissionAction(id = "#app.id", type = PermissionTypeEnum.APP) @PostMapping("revoke") @RequiresPermissions("app:release") public RestResponse revoke(Application app) { @@ -202,7 +202,7 @@ public RestResponse revoke(Application app) { schema = @Schema(implementation = boolean.class, defaultValue = "false")) }) @ApiAccess - @PermissionAction(id = "#app.id", type = PermissionType.APP) + @PermissionAction(id = "#app.id", type = PermissionTypeEnum.APP) @PostMapping(value = "start") @RequiresPermissions("app:start") public RestResponse start(@Parameter(hidden = true) Application app) { @@ -253,7 +253,7 @@ public RestResponse start(@Parameter(hidden = true) Application app) { example = "false", schema = @Schema(implementation = boolean.class, defaultValue = "false")) }) - @PermissionAction(id = "#app.id", type = PermissionType.APP) + @PermissionAction(id = "#app.id", type = PermissionTypeEnum.APP) @PostMapping(value = "cancel") @RequiresPermissions("app:cancel") public RestResponse cancel(@Parameter(hidden = true) Application app) throws Exception { @@ -264,7 +264,7 @@ public RestResponse cancel(@Parameter(hidden = true) Application app) throws Exc @Operation(summary = "Clean application") @AppUpdated @ApiAccess - @PermissionAction(id = "#app.id", type = PermissionType.APP) + @PermissionAction(id = "#app.id", type = PermissionTypeEnum.APP) @PostMapping("clean") @RequiresPermissions("app:clean") public RestResponse clean(Application app) { @@ -274,7 +274,7 @@ public RestResponse clean(Application app) { /** force stop(stop normal start or in progress) */ @Operation(summary = "Force stop application") - @PermissionAction(id = "#app.id", type = PermissionType.APP) + @PermissionAction(id = "#app.id", type = PermissionTypeEnum.APP) @PostMapping("forcedStop") @RequiresPermissions("app:cancel") public RestResponse forcedStop(Application app) { @@ -298,7 +298,7 @@ public RestResponse yarnName(Application app) { @Operation(summary = "Check the application exist status") @PostMapping("checkName") public RestResponse checkName(Application app) { - AppExistsState exists = applicationInfoService.checkExists(app); + AppExistsStateEnum exists = applicationInfoService.checkExists(app); return RestResponse.success(exists.get()); } @@ -331,7 +331,7 @@ public RestResponse optionlog(ApplicationLog applicationLog, RestRequest request } @Operation(summary = "Delete application operation log") - @PermissionAction(id = "#applicationLog.appId", type = PermissionType.APP) + @PermissionAction(id = "#applicationLog.appId", type = PermissionTypeEnum.APP) @PostMapping("deleteOperationLog") @RequiresPermissions("app:delete") public RestResponse deleteOperationLog(ApplicationLog applicationLog) { @@ -340,7 +340,7 @@ public RestResponse deleteOperationLog(ApplicationLog applicationLog) { } @Operation(summary = "Delete application") - @PermissionAction(id = "#app.id", type = PermissionType.APP) + @PermissionAction(id = "#app.id", type = PermissionTypeEnum.APP) @PostMapping("delete") @RequiresPermissions("app:delete") public RestResponse delete(Application app) throws InternalException { @@ -349,7 +349,7 @@ public RestResponse delete(Application app) throws InternalException { } @Operation(summary = "Backup application when deleted") - @PermissionAction(id = "#backUp.appId", type = PermissionType.APP) + @PermissionAction(id = "#backUp.appId", type = PermissionTypeEnum.APP) @PostMapping("deletebak") public RestResponse deleteBak(ApplicationBackUp backUp) throws InternalException { Boolean deleted = backUpService.delete(backUp.getId()); diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/controller/ApplicationHistoryController.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/controller/ApplicationHistoryController.java index 324ca40660..4a09164dab 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/controller/ApplicationHistoryController.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/controller/ApplicationHistoryController.java @@ -17,7 +17,7 @@ package org.apache.streampark.console.core.controller; -import org.apache.streampark.common.enums.ExecutionMode; +import org.apache.streampark.common.enums.ExecutionModeEnum; import org.apache.streampark.console.base.domain.RestResponse; import org.apache.streampark.console.core.service.application.ApplicationInfoService; @@ -65,7 +65,7 @@ public RestResponse listK8sNamespace() { @RequiresPermissions("app:create") public RestResponse listSessionClusterId(int executionMode) { List clusterIds; - switch (ExecutionMode.of(executionMode)) { + switch (ExecutionModeEnum.of(executionMode)) { case KUBERNETES_NATIVE_SESSION: case YARN_SESSION: case REMOTE: diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/controller/FlinkClusterController.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/controller/FlinkClusterController.java index a2b0048c2d..64f2072146 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/controller/FlinkClusterController.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/controller/FlinkClusterController.java @@ -17,7 +17,7 @@ package org.apache.streampark.console.core.controller; -import org.apache.streampark.common.enums.ClusterState; +import org.apache.streampark.common.enums.ClusterStateEnum; import org.apache.streampark.console.base.domain.RestResponse; import org.apache.streampark.console.base.exception.InternalException; import org.apache.streampark.console.core.bean.ResponseResult; @@ -93,7 +93,7 @@ public RestResponse get(Long id) throws InternalException { @Operation(summary = "Start flink cluster") @PostMapping("start") public RestResponse start(FlinkCluster cluster) { - flinkClusterService.updateClusterState(cluster.getId(), ClusterState.STARTING); + flinkClusterService.updateClusterState(cluster.getId(), ClusterStateEnum.STARTING); flinkClusterService.start(cluster); return RestResponse.success(); } @@ -102,7 +102,7 @@ public RestResponse start(FlinkCluster cluster) { @PostMapping("shutdown") public RestResponse shutdown(FlinkCluster cluster) { if (flinkClusterService.allowShutdownCluster(cluster)) { - flinkClusterService.updateClusterState(cluster.getId(), ClusterState.CANCELLING); + flinkClusterService.updateClusterState(cluster.getId(), ClusterStateEnum.CANCELLING); flinkClusterService.shutdown(cluster); } return RestResponse.success(); diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/controller/MessageController.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/controller/MessageController.java index 87d763783d..7fc544c4de 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/controller/MessageController.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/controller/MessageController.java @@ -20,7 +20,7 @@ import org.apache.streampark.console.base.domain.RestRequest; import org.apache.streampark.console.base.domain.RestResponse; import org.apache.streampark.console.core.entity.Message; -import org.apache.streampark.console.core.enums.NoticeType; +import org.apache.streampark.console.core.enums.NoticeTypeEnum; import org.apache.streampark.console.core.service.MessageService; import com.baomidou.mybatisplus.core.metadata.IPage; @@ -45,8 +45,8 @@ public class MessageController { @Operation(summary = "List notices") @PostMapping("notice") public RestResponse notice(Integer type, RestRequest request) { - NoticeType noticeType = NoticeType.of(type); - IPage pages = messageService.getUnRead(noticeType, request); + NoticeTypeEnum noticeTypeEnum = NoticeTypeEnum.of(type); + IPage pages = messageService.getUnRead(noticeTypeEnum, request); return RestResponse.success(pages); } diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/controller/ProjectController.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/controller/ProjectController.java index 859261933a..57476dfed5 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/controller/ProjectController.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/controller/ProjectController.java @@ -22,7 +22,7 @@ import org.apache.streampark.console.base.exception.ApiAlertException; import org.apache.streampark.console.core.annotation.AppUpdated; import org.apache.streampark.console.core.entity.Project; -import org.apache.streampark.console.core.enums.GitAuthorizedError; +import org.apache.streampark.console.core.enums.GitAuthorizedErrorEnum; import org.apache.streampark.console.core.service.ProjectService; import org.apache.shiro.authz.annotation.RequiresPermissions; @@ -120,7 +120,7 @@ public RestResponse delete(Long id) { @Operation(summary = "Authenticate git project") @PostMapping("gitcheck") public RestResponse gitCheck(Project project) { - GitAuthorizedError error = project.gitCheck(); + GitAuthorizedErrorEnum error = project.gitCheck(); return RestResponse.success().data(error.getType()); } diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/entity/AppBuildPipeline.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/entity/AppBuildPipeline.java index fddcd80a2a..75b5e386c3 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/entity/AppBuildPipeline.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/entity/AppBuildPipeline.java @@ -23,9 +23,9 @@ import org.apache.streampark.flink.packer.pipeline.BuildResult; import org.apache.streampark.flink.packer.pipeline.PipeError; import org.apache.streampark.flink.packer.pipeline.PipeSnapshot; -import org.apache.streampark.flink.packer.pipeline.PipelineStatus; -import org.apache.streampark.flink.packer.pipeline.PipelineStepStatus; -import org.apache.streampark.flink.packer.pipeline.PipelineType; +import org.apache.streampark.flink.packer.pipeline.PipelineStatusEnum; +import org.apache.streampark.flink.packer.pipeline.PipelineStepStatusEnum; +import org.apache.streampark.flink.packer.pipeline.PipelineTypeEnum; import org.apache.commons.lang3.StringUtils; @@ -96,37 +96,37 @@ public class AppBuildPipeline { @Nonnull @JsonIgnore - public PipelineType getPipeType() { - return PipelineType.of(pipeTypeCode); + public PipelineTypeEnum getPipeType() { + return PipelineTypeEnum.of(pipeTypeCode); } @JsonIgnore - public AppBuildPipeline setPipeType(@Nonnull PipelineType pipeType) { + public AppBuildPipeline setPipeType(@Nonnull PipelineTypeEnum pipeType) { this.pipeTypeCode = pipeType.getCode(); return this; } @Nonnull @JsonIgnore - public PipelineStatus getPipelineStatus() { - return PipelineStatus.of(pipeStatusCode); + public PipelineStatusEnum getPipelineStatus() { + return PipelineStatusEnum.of(pipeStatusCode); } @JsonIgnore - public AppBuildPipeline setPipeStatus(@Nonnull PipelineStatus pipeStatus) { + public AppBuildPipeline setPipeStatus(@Nonnull PipelineStatusEnum pipeStatus) { this.pipeStatusCode = pipeStatus.getCode(); return this; } @Nonnull @JsonIgnore - public Map getStepStatus() { + public Map getStepStatus() { if (StringUtils.isBlank(stepStatusJson)) { return Collections.emptyMap(); } try { return JacksonUtils.read( - stepStatusJson, new TypeReference>() {}); + stepStatusJson, new TypeReference>() {}); } catch (JsonProcessingException e) { log.error( "json parse error on ApplicationBuildPipeline, stepStatusJson={}", stepStatusJson, e); @@ -135,7 +135,7 @@ public Map getStepStatus() { } @JsonIgnore - public AppBuildPipeline setStepStatus(@Nonnull Map stepStatus) { + public AppBuildPipeline setStepStatus(@Nonnull Map stepStatus) { try { this.stepStatusJson = JacksonUtils.write(stepStatus); } catch (JsonProcessingException e) { @@ -235,7 +235,7 @@ public long calCostSecond() { @Nullable @JsonIgnore public R getBuildResult() { - PipelineType pipeType = getPipeType(); + PipelineTypeEnum pipeType = getPipeType(); if (pipeType.isUnknown() || buildResultJson == null) { return null; } @@ -292,7 +292,7 @@ public static class View { public static View of(@Nonnull AppBuildPipeline pipe) { // combine step info Map stepDesc = pipe.getPipeType().getSteps(); - Map stepStatus = pipe.getStepStatus(); + Map stepStatus = pipe.getStepStatus(); Map stepTs = pipe.getStepStatusTimestamp(); List steps = new ArrayList<>(stepDesc.size()); for (int i = 1; i <= pipe.getPipeType().getSteps().size(); i++) { @@ -300,7 +300,7 @@ public static View of(@Nonnull AppBuildPipeline pipe) { new Step() .setSeq(i) .setDesc(stepDesc.getOrDefault(i, "unknown step")) - .setStatus(stepStatus.getOrDefault(i, PipelineStepStatus.unknown).getCode()); + .setStatus(stepStatus.getOrDefault(i, PipelineStepStatusEnum.unknown).getCode()); Long st = stepTs.get(i); if (st != null) { step.setTs(new Date(st)); diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/entity/Application.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/entity/Application.java index 2e8a06b61d..b92c3dd62b 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/entity/Application.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/entity/Application.java @@ -20,18 +20,18 @@ import org.apache.streampark.common.conf.ConfigConst; import org.apache.streampark.common.conf.K8sFlinkConfig; import org.apache.streampark.common.conf.Workspace; -import org.apache.streampark.common.enums.ApplicationType; -import org.apache.streampark.common.enums.DevelopmentMode; -import org.apache.streampark.common.enums.ExecutionMode; -import org.apache.streampark.common.enums.FlinkK8sRestExposedType; -import org.apache.streampark.common.enums.StorageType; +import org.apache.streampark.common.enums.ApplicationTypeEnum; +import org.apache.streampark.common.enums.DevelopmentModeEnum; +import org.apache.streampark.common.enums.ExecutionModeEnum; +import org.apache.streampark.common.enums.FlinkK8sRestExposedTypeEnum; +import org.apache.streampark.common.enums.StorageTypeEnum; import org.apache.streampark.common.fs.FsOperator; import org.apache.streampark.console.base.util.JacksonUtils; import org.apache.streampark.console.core.bean.AppControl; import org.apache.streampark.console.core.bean.Dependency; -import org.apache.streampark.console.core.enums.FlinkAppState; -import org.apache.streampark.console.core.enums.ReleaseState; -import org.apache.streampark.console.core.enums.ResourceFrom; +import org.apache.streampark.console.core.enums.FlinkAppStateEnum; +import org.apache.streampark.console.core.enums.ReleaseStateEnum; +import org.apache.streampark.console.core.enums.ResourceFromEnum; import org.apache.streampark.console.core.metrics.flink.JobsOverview; import org.apache.streampark.console.core.utils.YarnQueueLabelExpression; import org.apache.streampark.flink.kubernetes.model.K8sPodTemplates; @@ -276,8 +276,8 @@ public void setState(Integer state) { } public void setYarnQueueByHotParams() { - if (!(ExecutionMode.YARN_APPLICATION == this.getExecutionModeEnum() - || ExecutionMode.YARN_PER_JOB == this.getExecutionModeEnum())) { + if (!(ExecutionModeEnum.YARN_APPLICATION == this.getExecutionModeEnum() + || ExecutionModeEnum.YARN_PER_JOB == this.getExecutionModeEnum())) { return; } @@ -336,28 +336,28 @@ public boolean isCanBeStart() { } @JsonIgnore - public ReleaseState getReleaseState() { - return ReleaseState.of(release); + public ReleaseStateEnum getReleaseState() { + return ReleaseStateEnum.of(release); } @JsonIgnore - public DevelopmentMode getDevelopmentMode() { - return DevelopmentMode.of(jobType); + public DevelopmentModeEnum getDevelopmentMode() { + return DevelopmentModeEnum.of(jobType); } @JsonIgnore - public FlinkAppState getStateEnum() { - return FlinkAppState.of(state); + public FlinkAppStateEnum getStateEnum() { + return FlinkAppStateEnum.of(state); } @JsonIgnore - public FlinkK8sRestExposedType getK8sRestExposedTypeEnum() { - return FlinkK8sRestExposedType.of(this.k8sRestExposedType); + public FlinkK8sRestExposedTypeEnum getK8sRestExposedTypeEnum() { + return FlinkK8sRestExposedTypeEnum.of(this.k8sRestExposedType); } @JsonIgnore - public ExecutionMode getExecutionModeEnum() { - return ExecutionMode.of(executionMode); + public ExecutionModeEnum getExecutionModeEnum() { + return ExecutionModeEnum.of(executionMode); } public boolean cpFailedTrigger() { @@ -423,8 +423,8 @@ public String getAppLib() { } @JsonIgnore - public ApplicationType getApplicationType() { - return ApplicationType.of(appType); + public ApplicationTypeEnum getApplicationType() { + return ApplicationTypeEnum.of(appType); } @JsonIgnore @@ -441,40 +441,40 @@ public Map getOptionMap() { @JsonIgnore public boolean isFlinkSqlJob() { - return DevelopmentMode.FLINK_SQL.getMode().equals(this.getJobType()); + return DevelopmentModeEnum.FLINK_SQL.getMode().equals(this.getJobType()); } @JsonIgnore public boolean isFlinkSqlJobOrPyFlinkJob() { - return DevelopmentMode.FLINK_SQL.getMode().equals(this.getJobType()) - || DevelopmentMode.PYFLINK.getMode().equals(this.getJobType()); + return DevelopmentModeEnum.FLINK_SQL.getMode().equals(this.getJobType()) + || DevelopmentModeEnum.PYFLINK.getMode().equals(this.getJobType()); } @JsonIgnore public boolean isCustomCodeJob() { - return DevelopmentMode.CUSTOM_CODE.getMode().equals(this.getJobType()); + return DevelopmentModeEnum.CUSTOM_CODE.getMode().equals(this.getJobType()); } @JsonIgnore public boolean isCustomCodeOrPyFlinkJob() { - return DevelopmentMode.CUSTOM_CODE.getMode().equals(this.getJobType()) - || DevelopmentMode.PYFLINK.getMode().equals(this.getJobType()); + return DevelopmentModeEnum.CUSTOM_CODE.getMode().equals(this.getJobType()) + || DevelopmentModeEnum.PYFLINK.getMode().equals(this.getJobType()); } @JsonIgnore public boolean isUploadJob() { return isCustomCodeOrPyFlinkJob() - && ResourceFrom.UPLOAD.getValue().equals(this.getResourceFrom()); + && ResourceFromEnum.UPLOAD.getValue().equals(this.getResourceFrom()); } @JsonIgnore public boolean isCICDJob() { return isCustomCodeOrPyFlinkJob() - && ResourceFrom.CICD.getValue().equals(this.getResourceFrom()); + && ResourceFromEnum.CICD.getValue().equals(this.getResourceFrom()); } public boolean isStreamParkJob() { - return this.getAppType() == ApplicationType.STREAMPARK_FLINK.getType(); + return this.getAppType() == ApplicationTypeEnum.STREAMPARK_FLINK.getType(); } @JsonIgnore @@ -490,12 +490,12 @@ public DependencyInfo getDependencyInfo() { @JsonIgnore public boolean isRunning() { - return FlinkAppState.RUNNING.getValue() == this.getState(); + return FlinkAppStateEnum.RUNNING.getValue() == this.getState(); } @JsonIgnore public boolean isNeedRollback() { - return ReleaseState.NEED_ROLLBACK.get() == this.getRelease(); + return ReleaseStateEnum.NEED_ROLLBACK.get() == this.getRelease(); } @JsonIgnore @@ -507,23 +507,23 @@ public boolean isNeedRestartOnFailed() { } @JsonIgnore - public StorageType getStorageType() { + public StorageTypeEnum getStorageType() { return getStorageType(getExecutionMode()); } - public static StorageType getStorageType(Integer execMode) { - ExecutionMode executionMode = ExecutionMode.of(execMode); - switch (Objects.requireNonNull(executionMode)) { + public static StorageTypeEnum getStorageType(Integer execMode) { + ExecutionModeEnum executionModeEnum = ExecutionModeEnum.of(execMode); + switch (Objects.requireNonNull(executionModeEnum)) { case YARN_APPLICATION: - return StorageType.HDFS; + return StorageTypeEnum.HDFS; case YARN_PER_JOB: case YARN_SESSION: case KUBERNETES_NATIVE_SESSION: case KUBERNETES_NATIVE_APPLICATION: case REMOTE: - return StorageType.LFS; + return StorageTypeEnum.LFS; default: - throw new UnsupportedOperationException("Unsupported ".concat(executionMode.getName())); + throw new UnsupportedOperationException("Unsupported ".concat(executionModeEnum.getName())); } } @@ -559,7 +559,7 @@ public void updateHotParams(Application appParam) { if (appParam != this) { this.hotParams = null; } - ExecutionMode executionModeEnum = appParam.getExecutionModeEnum(); + ExecutionModeEnum executionModeEnum = appParam.getExecutionModeEnum(); Map hotParams = new HashMap<>(0); if (needFillYarnQueueLabel(executionModeEnum)) { hotParams.putAll(YarnQueueLabelExpression.getQueueLabelMap(appParam.getYarnQueue())); @@ -569,8 +569,8 @@ public void updateHotParams(Application appParam) { } } - private boolean needFillYarnQueueLabel(ExecutionMode mode) { - return ExecutionMode.YARN_PER_JOB == mode || ExecutionMode.YARN_APPLICATION == mode; + private boolean needFillYarnQueueLabel(ExecutionModeEnum mode) { + return ExecutionModeEnum.YARN_PER_JOB == mode || ExecutionModeEnum.YARN_APPLICATION == mode; } @Override diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/entity/ApplicationConfig.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/entity/ApplicationConfig.java index 5d9fba53dd..48a2f951eb 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/entity/ApplicationConfig.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/entity/ApplicationConfig.java @@ -20,7 +20,7 @@ import org.apache.streampark.common.conf.ConfigConst; import org.apache.streampark.common.util.DeflaterUtils; import org.apache.streampark.common.util.PropertiesUtils; -import org.apache.streampark.console.core.enums.ConfigFileType; +import org.apache.streampark.console.core.enums.ConfigFileTypeEnum; import com.baomidou.mybatisplus.annotation.FieldStrategy; import com.baomidou.mybatisplus.annotation.IdType; @@ -75,7 +75,7 @@ public void setToApplication(Application application) { } public Map readConfig() { - ConfigFileType fileType = ConfigFileType.of(this.format); + ConfigFileTypeEnum fileType = ConfigFileTypeEnum.of(this.format); Map configs = null; if (fileType != null) { switch (fileType) { diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/entity/Effective.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/entity/Effective.java index 35613076d2..d5befb9483 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/entity/Effective.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/entity/Effective.java @@ -17,7 +17,7 @@ package org.apache.streampark.console.core.entity; -import org.apache.streampark.console.core.enums.EffectiveType; +import org.apache.streampark.console.core.enums.EffectiveTypeEnum; import com.baomidou.mybatisplus.annotation.IdType; import com.baomidou.mybatisplus.annotation.TableId; @@ -45,11 +45,11 @@ public class Effective { private Long targetId; private Date createTime; - private transient EffectiveType effectiveType; + private transient EffectiveTypeEnum effectiveTypeEnum; public Effective() {} - public Effective(Long appId, EffectiveType type, Long targetId) { + public Effective(Long appId, EffectiveTypeEnum type, Long targetId) { this.appId = appId; this.targetType = type.getType(); this.targetId = targetId; diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/entity/FlinkCluster.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/entity/FlinkCluster.java index a09727e2c3..0c3ef209a2 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/entity/FlinkCluster.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/entity/FlinkCluster.java @@ -18,10 +18,10 @@ package org.apache.streampark.console.core.entity; import org.apache.streampark.common.conf.ConfigConst; -import org.apache.streampark.common.enums.ClusterState; -import org.apache.streampark.common.enums.ExecutionMode; -import org.apache.streampark.common.enums.FlinkK8sRestExposedType; -import org.apache.streampark.common.enums.ResolveOrder; +import org.apache.streampark.common.enums.ClusterStateEnum; +import org.apache.streampark.common.enums.ExecutionModeEnum; +import org.apache.streampark.common.enums.FlinkK8sRestExposedTypeEnum; +import org.apache.streampark.common.enums.ResolveOrderEnum; import org.apache.streampark.common.util.HttpClientUtils; import org.apache.streampark.common.util.PropertiesUtils; import org.apache.streampark.console.base.util.JacksonUtils; @@ -118,16 +118,16 @@ public class FlinkCluster implements Serializable { private transient Integer affectedJobs = 0; @JsonIgnore - public FlinkK8sRestExposedType getK8sRestExposedTypeEnum() { - return FlinkK8sRestExposedType.of(this.k8sRestExposedType); + public FlinkK8sRestExposedTypeEnum getK8sRestExposedTypeEnum() { + return FlinkK8sRestExposedTypeEnum.of(this.k8sRestExposedType); } - public ExecutionMode getExecutionModeEnum() { - return ExecutionMode.of(this.executionMode); + public ExecutionModeEnum getExecutionModeEnum() { + return ExecutionModeEnum.of(this.executionMode); } - public ClusterState getClusterStateEnum() { - return ClusterState.of(this.clusterState); + public ClusterStateEnum getClusterStateEnum() { + return ClusterStateEnum.of(this.clusterState); } @JsonIgnore @@ -137,7 +137,7 @@ public Map getOptionMap() { return Collections.emptyMap(); } Map map = JacksonUtils.read(this.options, Map.class); - if (ExecutionMode.YARN_SESSION == getExecutionModeEnum()) { + if (ExecutionModeEnum.YARN_SESSION == getExecutionModeEnum()) { map.put(ConfigConst.KEY_YARN_APP_NAME(), this.clusterName); map.putAll(YarnQueueLabelExpression.getQueueLabelMap(yarnQueue)); } @@ -181,9 +181,9 @@ public Map getProperties() { PropertiesUtils.extractDynamicPropertiesAsJava(this.getDynamicProperties()); map.putAll(this.getOptionMap()); map.putAll(dynamicProperties); - ResolveOrder resolveOrder = ResolveOrder.of(this.getResolveOrder()); - if (resolveOrder != null) { - map.put(CoreOptions.CLASSLOADER_RESOLVE_ORDER.key(), resolveOrder.getName()); + ResolveOrderEnum resolveOrderEnum = ResolveOrderEnum.of(this.getResolveOrder()); + if (resolveOrderEnum != null) { + map.put(CoreOptions.CLASSLOADER_RESOLVE_ORDER.key(), resolveOrderEnum.getName()); } return map; } diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/entity/Message.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/entity/Message.java index ce0087637b..e56b86c8ad 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/entity/Message.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/entity/Message.java @@ -17,7 +17,7 @@ package org.apache.streampark.console.core.entity; -import org.apache.streampark.console.core.enums.NoticeType; +import org.apache.streampark.console.core.enums.NoticeTypeEnum; import com.baomidou.mybatisplus.annotation.IdType; import com.baomidou.mybatisplus.annotation.TableId; @@ -50,12 +50,13 @@ public class Message { public Message() {} - public Message(Long userId, Long appId, String title, String context, NoticeType noticeType) { + public Message( + Long userId, Long appId, String title, String context, NoticeTypeEnum noticeTypeEnum) { this.userId = userId; this.appId = appId; this.title = title; this.context = context; - this.type = noticeType.get(); + this.type = noticeTypeEnum.get(); this.createTime = new Date(); this.isRead = false; } diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/entity/Project.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/entity/Project.java index 98b2ff7f9a..fabdfdcb30 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/entity/Project.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/entity/Project.java @@ -25,7 +25,7 @@ import org.apache.streampark.console.base.util.CommonUtils; import org.apache.streampark.console.base.util.GitUtils; import org.apache.streampark.console.base.util.WebUtils; -import org.apache.streampark.console.core.enums.GitAuthorizedError; +import org.apache.streampark.console.core.enums.GitAuthorizedErrorEnum; import org.apache.commons.io.FileUtils; import org.apache.commons.lang3.StringUtils; @@ -150,18 +150,18 @@ public List getAllBranches() { } } - public GitAuthorizedError gitCheck() { + public GitAuthorizedErrorEnum gitCheck() { try { GitUtils.getBranchList(this); - return GitAuthorizedError.SUCCESS; + return GitAuthorizedErrorEnum.SUCCESS; } catch (Exception e) { String err = e.getMessage(); if (err.contains("not authorized")) { - return GitAuthorizedError.ERROR; + return GitAuthorizedErrorEnum.ERROR; } else if (err.contains("Authentication is required")) { - return GitAuthorizedError.REQUIRED; + return GitAuthorizedErrorEnum.REQUIRED; } - return GitAuthorizedError.UNKNOW; + return GitAuthorizedErrorEnum.UNKNOW; } } diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/entity/Resource.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/entity/Resource.java index e02661f1a6..87903a486b 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/entity/Resource.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/entity/Resource.java @@ -17,8 +17,8 @@ package org.apache.streampark.console.core.entity; -import org.apache.streampark.console.core.enums.EngineType; -import org.apache.streampark.console.core.enums.ResourceType; +import org.apache.streampark.console.core.enums.EngineTypeEnum; +import org.apache.streampark.console.core.enums.ResourceTypeEnum; import com.baomidou.mybatisplus.annotation.IdType; import com.baomidou.mybatisplus.annotation.TableId; @@ -54,9 +54,9 @@ public class Resource implements Serializable { /** user id of creator */ private Long creatorId; - private ResourceType resourceType; + private ResourceTypeEnum resourceTypeEnum; - private EngineType engineType; + private EngineTypeEnum engineTypeEnum; // for flink app private String mainClass; diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/AccessTokenState.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/AccessTokenStateEnum.java similarity index 93% rename from streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/AccessTokenState.java rename to streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/AccessTokenStateEnum.java index b2a3735a78..8236844950 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/AccessTokenState.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/AccessTokenStateEnum.java @@ -17,7 +17,7 @@ package org.apache.streampark.console.core.enums; -public enum AccessTokenState { +public enum AccessTokenStateEnum { /** not added token */ NULL(0), @@ -30,7 +30,7 @@ public enum AccessTokenState { private final int value; - AccessTokenState(int value) { + AccessTokenStateEnum(int value) { this.value = value; } diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/AlertType.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/AlertTypeEnum.java similarity index 79% rename from streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/AlertType.java rename to streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/AlertTypeEnum.java index b63c8495d0..6756b8aad3 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/AlertType.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/AlertTypeEnum.java @@ -36,7 +36,7 @@ /** The AlertType enum represents different types of alerts that can be used for notifications. */ @Getter -public enum AlertType { +public enum AlertTypeEnum { /** Email */ EMAIL(1, EmailAlertNotifyServiceImpl.class), @@ -63,27 +63,27 @@ public enum AlertType { private final Class clazz; /** A cache map used to quickly get the alert type from an integer code */ - private static final Map CACHE_MAP = createCacheMap(); + private static final Map CACHE_MAP = createCacheMap(); - private static Map createCacheMap() { - Map map = new HashMap<>(); - for (AlertType notifyType : AlertType.values()) { + private static Map createCacheMap() { + Map map = new HashMap<>(); + for (AlertTypeEnum notifyType : AlertTypeEnum.values()) { map.put(notifyType.code, notifyType); } return Collections.unmodifiableMap(map); } - AlertType(Integer code, Class clazz) { + AlertTypeEnum(Integer code, Class clazz) { this.code = code; this.clazz = clazz; } - public static List decode(Integer level) { + public static List decode(Integer level) { if (level == null) { level = EMPTY_LEVEL; } - List result = new ArrayList<>(AlertType.values().length); + List result = new ArrayList<>(AlertTypeEnum.values().length); while (level != 0) { int code = level & -level; result.add(getByCode(code)); @@ -92,19 +92,19 @@ public static List decode(Integer level) { return result; } - public static int encode(List alertTypes) { - if (CollectionUtils.isEmpty(alertTypes)) { + public static int encode(List alertTypeEnums) { + if (CollectionUtils.isEmpty(alertTypeEnums)) { return EMPTY_LEVEL; } int result = 0; - for (AlertType alertType : alertTypes) { - result |= alertType.code; + for (AlertTypeEnum alertTypeEnum : alertTypeEnums) { + result |= alertTypeEnum.code; } return result; } - private static AlertType getByCode(Integer code) { + private static AlertTypeEnum getByCode(Integer code) { return CACHE_MAP.get(code); } } diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/AppExistsState.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/AppExistsStateEnum.java similarity index 94% rename from streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/AppExistsState.java rename to streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/AppExistsStateEnum.java index 707a60ac69..cc6f570dd2 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/AppExistsState.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/AppExistsStateEnum.java @@ -17,7 +17,7 @@ package org.apache.streampark.console.core.enums; -public enum AppExistsState { +public enum AppExistsStateEnum { /** no exists */ NO(0), @@ -36,7 +36,7 @@ public enum AppExistsState { private final int value; - AppExistsState(int value) { + AppExistsStateEnum(int value) { this.value = value; } diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/BuildState.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/BuildStateEnum.java similarity index 92% rename from streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/BuildState.java rename to streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/BuildStateEnum.java index 2229a8b36c..f1de52ac55 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/BuildState.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/BuildStateEnum.java @@ -19,7 +19,7 @@ import java.util.Arrays; -public enum BuildState { +public enum BuildStateEnum { /** has changed, need rebuild */ NEED_REBUILD(-2), @@ -37,7 +37,7 @@ public enum BuildState { private final int value; - BuildState(int value) { + BuildStateEnum(int value) { this.value = value; } @@ -45,7 +45,7 @@ public int get() { return this.value; } - public static BuildState of(Integer state) { + public static BuildStateEnum of(Integer state) { return Arrays.stream(values()).filter((x) -> x.value == state).findFirst().orElse(null); } } diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/CandidateType.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/CandidateTypeEnum.java similarity index 91% rename from streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/CandidateType.java rename to streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/CandidateTypeEnum.java index d4693d84d7..95cfed2419 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/CandidateType.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/CandidateTypeEnum.java @@ -19,7 +19,7 @@ import java.util.Arrays; -public enum CandidateType { +public enum CandidateTypeEnum { /** non candidate */ NONE(0), @@ -32,7 +32,7 @@ public enum CandidateType { private final int value; - CandidateType(int value) { + CandidateTypeEnum(int value) { this.value = value; } @@ -40,7 +40,7 @@ public int get() { return this.value; } - public static CandidateType of(Integer value) { + public static CandidateTypeEnum of(Integer value) { return Arrays.stream(values()).filter((x) -> x.value == value).findFirst().orElse(null); } } diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/CheckPointStatus.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/CheckPointStatusEnum.java similarity index 90% rename from streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/CheckPointStatus.java rename to streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/CheckPointStatusEnum.java index f065b864a0..fd11e9d943 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/CheckPointStatus.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/CheckPointStatusEnum.java @@ -19,7 +19,7 @@ import java.util.Arrays; -public enum CheckPointStatus { +public enum CheckPointStatusEnum { /** IN_PROGRESS */ IN_PROGRESS(1), /** COMPLETED */ @@ -34,11 +34,11 @@ public int get() { return this.value; } - CheckPointStatus(int value) { + CheckPointStatusEnum(int value) { this.value = value; } - public static CheckPointStatus of(Integer value) { + public static CheckPointStatusEnum of(Integer value) { return Arrays.stream(values()).filter((x) -> x.value == value).findFirst().orElse(null); } } diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/CheckPointType.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/CheckPointTypeEnum.java similarity index 90% rename from streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/CheckPointType.java rename to streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/CheckPointTypeEnum.java index e4a25e94c8..15e3448f96 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/CheckPointType.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/CheckPointTypeEnum.java @@ -19,7 +19,7 @@ import java.util.Arrays; -public enum CheckPointType { +public enum CheckPointTypeEnum { /** CHECKPOINT */ CHECKPOINT(1), /** SAVEPOINT */ @@ -33,11 +33,11 @@ public int get() { return this.value; } - CheckPointType(int value) { + CheckPointTypeEnum(int value) { this.value = value; } - public static CheckPointType of(Integer value) { + public static CheckPointTypeEnum of(Integer value) { return Arrays.stream(values()).filter((x) -> x.value == value).findFirst().orElse(null); } } diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/ConfigFileType.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/ConfigFileTypeEnum.java similarity index 90% rename from streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/ConfigFileType.java rename to streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/ConfigFileTypeEnum.java index 9cd62bf7fc..aeba51f18d 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/ConfigFileType.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/ConfigFileTypeEnum.java @@ -23,7 +23,7 @@ /** configFile Type enum */ @Getter -public enum ConfigFileType { +public enum ConfigFileTypeEnum { YAML(1, "yaml"), PROPERTIES(2, "prop"), @@ -35,12 +35,12 @@ public enum ConfigFileType { private final int value; private final String typeName; - ConfigFileType(int value, String name) { + ConfigFileTypeEnum(int value, String name) { this.value = value; this.typeName = name; } - public static ConfigFileType of(Integer value) { + public static ConfigFileTypeEnum of(Integer value) { return Arrays.stream(values()).filter((x) -> x.value == value).findFirst().orElse(null); } } diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/EffectiveType.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/EffectiveTypeEnum.java similarity index 93% rename from streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/EffectiveType.java rename to streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/EffectiveTypeEnum.java index 6de934c00f..bba8c9d8a6 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/EffectiveType.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/EffectiveTypeEnum.java @@ -20,7 +20,7 @@ import lombok.Getter; @Getter -public enum EffectiveType { +public enum EffectiveTypeEnum { /** config */ CONFIG(1), /** FLINKSQL */ @@ -28,7 +28,7 @@ public enum EffectiveType { private final int type; - EffectiveType(int value) { + EffectiveTypeEnum(int value) { this.type = value; } } diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/EngineType.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/EngineTypeEnum.java similarity index 92% rename from streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/EngineType.java rename to streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/EngineTypeEnum.java index d0310e37b3..a2eecb3d1f 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/EngineType.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/EngineTypeEnum.java @@ -24,7 +24,7 @@ /** Compute engine type. */ @Getter -public enum EngineType { +public enum EngineTypeEnum { /** Apache Flink: activated by default */ FLINK(0), @@ -34,11 +34,11 @@ public enum EngineType { @EnumValue private final int code; - EngineType(int code) { + EngineTypeEnum(int code) { this.code = code; } - public static EngineType of(Integer code) { + public static EngineTypeEnum of(Integer code) { return Arrays.stream(values()).filter((x) -> x.code == code).findFirst().orElse(null); } } diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/FailoverStrategy.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/FailoverStrategyEnum.java similarity index 90% rename from streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/FailoverStrategy.java rename to streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/FailoverStrategyEnum.java index 9c4f8ad20f..a1f2be11af 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/FailoverStrategy.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/FailoverStrategyEnum.java @@ -19,7 +19,7 @@ import java.util.Arrays; -public enum FailoverStrategy { +public enum FailoverStrategyEnum { /** send alert */ ALERT(1), @@ -29,7 +29,7 @@ public enum FailoverStrategy { private final int value; - FailoverStrategy(int value) { + FailoverStrategyEnum(int value) { this.value = value; } @@ -37,7 +37,7 @@ public int get() { return this.value; } - public static FailoverStrategy of(Integer value) { + public static FailoverStrategyEnum of(Integer value) { return Arrays.stream(values()).filter((x) -> x.value == value).findFirst().orElse(null); } } diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/FlinkAppState.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/FlinkAppStateEnum.java similarity index 74% rename from streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/FlinkAppState.java rename to streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/FlinkAppStateEnum.java index 4875d5969f..77963c3211 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/FlinkAppState.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/FlinkAppStateEnum.java @@ -17,14 +17,14 @@ package org.apache.streampark.console.core.enums; -import org.apache.streampark.flink.kubernetes.enums.FlinkJobState; +import org.apache.streampark.flink.kubernetes.enums.FlinkJobStateEnum; import lombok.Getter; import scala.Enumeration; @Getter -public enum FlinkAppState { +public enum FlinkAppStateEnum { /** Added new job to database. */ ADDED(0), @@ -107,42 +107,42 @@ public enum FlinkAppState { private final int value; - FlinkAppState(int value) { + FlinkAppStateEnum(int value) { this.value = value; } - public static FlinkAppState of(Integer state) { - for (FlinkAppState appState : values()) { + public static FlinkAppStateEnum of(Integer state) { + for (FlinkAppStateEnum appState : values()) { if (appState.value == state) { return appState; } } - return FlinkAppState.OTHER; + return FlinkAppStateEnum.OTHER; } - public static FlinkAppState of(String name) { - for (FlinkAppState appState : values()) { + public static FlinkAppStateEnum of(String name) { + for (FlinkAppStateEnum appState : values()) { if (appState.name().equals(name)) { return appState; } } - return FlinkAppState.OTHER; + return FlinkAppStateEnum.OTHER; } public static boolean isEndState(Integer appState) { - FlinkAppState flinkAppState = FlinkAppState.of(appState); - return FlinkAppState.CANCELED == flinkAppState - || FlinkAppState.FAILED == flinkAppState - || FlinkAppState.KILLED == flinkAppState - || FlinkAppState.FINISHED == flinkAppState - || FlinkAppState.SUCCEEDED == flinkAppState - || FlinkAppState.LOST == flinkAppState - || FlinkAppState.TERMINATED == flinkAppState; + FlinkAppStateEnum flinkAppStateEnum = FlinkAppStateEnum.of(appState); + return FlinkAppStateEnum.CANCELED == flinkAppStateEnum + || FlinkAppStateEnum.FAILED == flinkAppStateEnum + || FlinkAppStateEnum.KILLED == flinkAppStateEnum + || FlinkAppStateEnum.FINISHED == flinkAppStateEnum + || FlinkAppStateEnum.SUCCEEDED == flinkAppStateEnum + || FlinkAppStateEnum.LOST == flinkAppStateEnum + || FlinkAppStateEnum.TERMINATED == flinkAppStateEnum; } public static boolean isLost(Integer appState) { - FlinkAppState flinkAppState = FlinkAppState.of(appState); - return FlinkAppState.LOST == flinkAppState; + FlinkAppStateEnum flinkAppStateEnum = FlinkAppStateEnum.of(appState); + return FlinkAppStateEnum.LOST == flinkAppStateEnum; } /** @@ -152,8 +152,8 @@ public static boolean isLost(Integer appState) { @Deprecated public static class Bridge { /** covert from org.apache.streampark.flink.k8s.enums.FlinkJobState */ - public static FlinkAppState fromK8sFlinkJobState(Enumeration.Value flinkJobState) { - if (FlinkJobState.K8S_INITIALIZING() == flinkJobState) { + public static FlinkAppStateEnum fromK8sFlinkJobState(Enumeration.Value flinkJobState) { + if (FlinkJobStateEnum.K8S_INITIALIZING() == flinkJobState) { return INITIALIZING; } else { return of(flinkJobState.toString()); @@ -161,8 +161,8 @@ public static FlinkAppState fromK8sFlinkJobState(Enumeration.Value flinkJobState } /** covert to org.apache.streampark.flink.k8s.enums.FlinkJobState */ - public static Enumeration.Value toK8sFlinkJobState(FlinkAppState flinkAppState) { - return FlinkJobState.of(flinkAppState.name()); + public static Enumeration.Value toK8sFlinkJobState(FlinkAppStateEnum flinkAppStateEnum) { + return FlinkJobStateEnum.of(flinkAppStateEnum.name()); } } } diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/GitAuthorizedError.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/GitAuthorizedErrorEnum.java similarity index 86% rename from streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/GitAuthorizedError.java rename to streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/GitAuthorizedErrorEnum.java index de45a74456..9ae2ddc05c 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/GitAuthorizedError.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/GitAuthorizedErrorEnum.java @@ -17,7 +17,7 @@ package org.apache.streampark.console.core.enums; -public enum GitAuthorizedError { +public enum GitAuthorizedErrorEnum { /** Success. */ SUCCESS(0), @@ -33,12 +33,12 @@ public enum GitAuthorizedError { private final int value; - GitAuthorizedError(int value) { + GitAuthorizedErrorEnum(int value) { this.value = value; } - public static GitAuthorizedError of(Integer state) { - for (GitAuthorizedError error : values()) { + public static GitAuthorizedErrorEnum of(Integer state) { + for (GitAuthorizedErrorEnum error : values()) { if (error.value == state) { return error; } diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/GitCredential.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/GitCredentialEnum.java similarity index 85% rename from streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/GitCredential.java rename to streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/GitCredentialEnum.java index 748077ff33..07e8920431 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/GitCredential.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/GitCredentialEnum.java @@ -19,22 +19,22 @@ import java.util.Arrays; -public enum GitCredential { +public enum GitCredentialEnum { HTTPS(1), SSH(2); private final int value; - GitCredential(int value) { + GitCredentialEnum(int value) { this.value = value; } - public static GitCredential of(Integer value) { + public static GitCredentialEnum of(Integer value) { return Arrays.stream(values()).filter(x -> x.value == value).findFirst().orElse(null); } public static boolean isSSH(Integer gitCredential) { - return GitCredential.SSH == GitCredential.of(gitCredential); + return GitCredentialEnum.SSH == GitCredentialEnum.of(gitCredential); } public Integer getValue() { diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/LoginType.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/LoginTypeEnum.java similarity index 90% rename from streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/LoginType.java rename to streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/LoginTypeEnum.java index 3b4f7c796f..20e2ae3606 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/LoginType.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/LoginTypeEnum.java @@ -25,7 +25,7 @@ /** The user login type. */ @Getter -public enum LoginType { +public enum LoginTypeEnum { /** sign in with password */ PASSWORD(0), @@ -38,15 +38,15 @@ public enum LoginType { @EnumValue private final int code; - LoginType(int code) { + LoginTypeEnum(int code) { this.code = code; } - public static LoginType of(Integer code) { + public static LoginTypeEnum of(Integer code) { return Arrays.stream(values()).filter((x) -> x.code == code).findFirst().orElse(null); } - public static LoginType of(String loginType) { + public static LoginTypeEnum of(String loginType) { return Arrays.stream(values()) .filter((x) -> Objects.equals(x.toString(), loginType)) .findFirst() diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/NoticeType.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/NoticeTypeEnum.java similarity index 91% rename from streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/NoticeType.java rename to streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/NoticeTypeEnum.java index 29f67cc0be..5d6b228393 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/NoticeType.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/NoticeTypeEnum.java @@ -19,7 +19,7 @@ import java.util.Arrays; -public enum NoticeType { +public enum NoticeTypeEnum { /** exception */ EXCEPTION(1), /** message */ @@ -31,11 +31,11 @@ public int get() { return this.value; } - NoticeType(int value) { + NoticeTypeEnum(int value) { this.value = value; } - public static NoticeType of(Integer value) { + public static NoticeTypeEnum of(Integer value) { return Arrays.stream(values()).filter((x) -> x.value == value).findFirst().orElse(null); } } diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/Operation.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/OperationEnum.java similarity index 91% rename from streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/Operation.java rename to streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/OperationEnum.java index ba6cee3dd1..fa559eb4a9 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/Operation.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/OperationEnum.java @@ -22,7 +22,7 @@ import java.util.Arrays; @Getter -public enum Operation { +public enum OperationEnum { RELEASE(0), START(1), SAVEPOINT(2), @@ -30,11 +30,11 @@ public enum Operation { private final int value; - Operation(int value) { + OperationEnum(int value) { this.value = value; } - public static Operation of(Integer option) { + public static OperationEnum of(Integer option) { return Arrays.stream(values()).filter((x) -> x.value == option).findFirst().orElse(null); } } diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/OptionState.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/OptionStateEnum.java similarity index 92% rename from streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/OptionState.java rename to streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/OptionStateEnum.java index 500aefe4de..9a6b8ca1a3 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/OptionState.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/OptionStateEnum.java @@ -22,7 +22,7 @@ import java.util.Arrays; @Getter -public enum OptionState { +public enum OptionStateEnum { /** Application which is currently action: none. */ NONE(0), @@ -39,11 +39,11 @@ public enum OptionState { private final int value; - OptionState(int value) { + OptionStateEnum(int value) { this.value = value; } - public static OptionState of(Integer state) { + public static OptionStateEnum of(Integer state) { return Arrays.stream(values()).filter((x) -> x.value == state).findFirst().orElse(null); } } diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/PermissionType.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/PermissionTypeEnum.java similarity index 90% rename from streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/PermissionType.java rename to streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/PermissionTypeEnum.java index 2609e9f5fe..989fd5baa8 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/PermissionType.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/PermissionTypeEnum.java @@ -19,7 +19,7 @@ import java.util.Arrays; -public enum PermissionType { +public enum PermissionTypeEnum { USER(1), TEAM(2), APP(3); @@ -30,11 +30,11 @@ public int get() { return this.value; } - PermissionType(int value) { + PermissionTypeEnum(int value) { this.value = value; } - public static PermissionType of(Integer value) { + public static PermissionTypeEnum of(Integer value) { return Arrays.stream(values()).filter((x) -> x.value == value).findFirst().orElse(null); } } diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/PlaceholderType.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/PlaceholderTypeEnum.java similarity index 93% rename from streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/PlaceholderType.java rename to streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/PlaceholderTypeEnum.java index b3804124a7..971e391a2b 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/PlaceholderType.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/PlaceholderTypeEnum.java @@ -18,7 +18,7 @@ package org.apache.streampark.console.core.enums; /** configFile Type enum */ -public enum PlaceholderType { +public enum PlaceholderTypeEnum { JOB_ID("job_id"), JOB_NAME("job_name"), @@ -27,7 +27,7 @@ public enum PlaceholderType { private final String name; - PlaceholderType(String name) { + PlaceholderTypeEnum(String name) { this.name = name; } diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/ReleaseState.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/ReleaseStateEnum.java similarity index 92% rename from streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/ReleaseState.java rename to streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/ReleaseStateEnum.java index ad47e1fa0b..db092156c7 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/ReleaseState.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/ReleaseStateEnum.java @@ -19,7 +19,7 @@ import java.util.Arrays; -public enum ReleaseState { +public enum ReleaseStateEnum { /** release failed */ FAILED(-1), @@ -46,7 +46,7 @@ public enum ReleaseState { private final int value; - ReleaseState(int value) { + ReleaseStateEnum(int value) { this.value = value; } @@ -54,7 +54,7 @@ public int get() { return this.value; } - public static ReleaseState of(Integer state) { + public static ReleaseStateEnum of(Integer state) { return Arrays.stream(values()).filter((x) -> x.value == state).findFirst().orElse(null); } } diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/ResourceFrom.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/ResourceFromEnum.java similarity index 90% rename from streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/ResourceFrom.java rename to streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/ResourceFromEnum.java index d2e08addb6..03ce3787e1 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/ResourceFrom.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/ResourceFromEnum.java @@ -22,7 +22,7 @@ import java.util.Arrays; @Getter -public enum ResourceFrom { +public enum ResourceFromEnum { /** cicd(build from cvs) */ CICD(1), @@ -32,11 +32,11 @@ public enum ResourceFrom { private final Integer value; - ResourceFrom(Integer value) { + ResourceFromEnum(Integer value) { this.value = value; } - public static ResourceFrom of(Integer value) { + public static ResourceFromEnum of(Integer value) { return Arrays.stream(values()).filter((x) -> x.value.equals(value)).findFirst().orElse(null); } } diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/ResourceType.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/ResourceTypeEnum.java similarity index 92% rename from streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/ResourceType.java rename to streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/ResourceTypeEnum.java index 2a000a448b..cd8a20b812 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/ResourceType.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/ResourceTypeEnum.java @@ -24,7 +24,7 @@ /** The resource type. */ @Getter -public enum ResourceType { +public enum ResourceTypeEnum { /** Flink application */ FLINK_APP(0), @@ -43,11 +43,11 @@ public enum ResourceType { @EnumValue private final int code; - ResourceType(int code) { + ResourceTypeEnum(int code) { this.code = code; } - public static ResourceType of(Integer code) { + public static ResourceTypeEnum of(Integer code) { return Arrays.stream(values()).filter((x) -> x.code == code).findFirst().orElse(null); } } diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/StopFrom.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/StopFromEnum.java similarity index 97% rename from streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/StopFrom.java rename to streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/StopFromEnum.java index 45e99a7b73..a77f131078 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/StopFrom.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/StopFromEnum.java @@ -17,7 +17,7 @@ package org.apache.streampark.console.core.enums; -public enum StopFrom { +public enum StopFromEnum { /** None */ NONE, /** StreamPark */ diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/UserType.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/UserTypeEnum.java similarity index 92% rename from streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/UserType.java rename to streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/UserTypeEnum.java index 73e8a07b19..29cfe74a29 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/UserType.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/enums/UserTypeEnum.java @@ -24,7 +24,7 @@ /** The user type. */ @Getter -public enum UserType { +public enum UserTypeEnum { /** The admin of StreamPark. */ ADMIN(1), @@ -34,11 +34,11 @@ public enum UserType { @EnumValue private final int code; - UserType(int code) { + UserTypeEnum(int code) { this.code = code; } - public static UserType of(Integer code) { + public static UserTypeEnum of(Integer code) { return Arrays.stream(values()).filter((x) -> x.code == code).findFirst().orElse(null); } } diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/metrics/flink/CheckPoints.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/metrics/flink/CheckPoints.java index b92fc40596..812d84ea31 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/metrics/flink/CheckPoints.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/metrics/flink/CheckPoints.java @@ -17,8 +17,8 @@ package org.apache.streampark.console.core.metrics.flink; -import org.apache.streampark.console.core.enums.CheckPointStatus; -import org.apache.streampark.console.core.enums.CheckPointType; +import org.apache.streampark.console.core.enums.CheckPointStatusEnum; +import org.apache.streampark.console.core.enums.CheckPointTypeEnum; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; @@ -73,17 +73,17 @@ public static class CheckPoint implements Serializable { private Boolean discarded; - public CheckPointStatus getCheckPointStatus() { - return CheckPointStatus.valueOf(this.status); + public CheckPointStatusEnum getCheckPointStatus() { + return CheckPointStatusEnum.valueOf(this.status); } - public CheckPointType getCheckPointType() { + public CheckPointTypeEnum getCheckPointType() { if ("CHECKPOINT".equals(this.checkpointType)) { - return CheckPointType.CHECKPOINT; + return CheckPointTypeEnum.CHECKPOINT; } else if ("SAVEPOINT".equals(this.checkpointType)) { - return CheckPointType.SAVEPOINT; + return CheckPointTypeEnum.SAVEPOINT; } - return CheckPointType.SYNC_SAVEPOINT; + return CheckPointTypeEnum.SYNC_SAVEPOINT; } public String getPath() { diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/runner/EnvInitializer.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/runner/EnvInitializer.java index c4cb68a27e..e3f685a2fc 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/runner/EnvInitializer.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/runner/EnvInitializer.java @@ -22,7 +22,7 @@ import org.apache.streampark.common.conf.InternalConfigHolder; import org.apache.streampark.common.conf.InternalOption; import org.apache.streampark.common.conf.Workspace; -import org.apache.streampark.common.enums.StorageType; +import org.apache.streampark.common.enums.StorageTypeEnum; import org.apache.streampark.common.fs.FsOperator; import org.apache.streampark.common.util.SystemPropertyUtils; import org.apache.streampark.common.util.Utils; @@ -53,7 +53,7 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; -import static org.apache.streampark.common.enums.StorageType.LFS; +import static org.apache.streampark.common.enums.StorageTypeEnum.LFS; @Order(1) @Slf4j @@ -64,7 +64,7 @@ public class EnvInitializer implements ApplicationRunner { @Autowired private SettingService settingService; - private final Set initialized = new HashSet<>(2); + private final Set initialized = new HashSet<>(2); private final FileFilter fileFilter = p -> !".gitkeep".equals(p.getName()); @@ -126,17 +126,17 @@ private void overrideSystemProp(String key, String defaultValue) { SystemPropertyUtils.set(key, value); } - public synchronized void storageInitialize(StorageType storageType) { + public synchronized void storageInitialize(StorageTypeEnum storageTypeEnum) { - if (initialized.contains(storageType)) { + if (initialized.contains(storageTypeEnum)) { return; } - FsOperator fsOperator = FsOperator.of(storageType); - Workspace workspace = Workspace.of(storageType); + FsOperator fsOperator = FsOperator.of(storageTypeEnum); + Workspace workspace = Workspace.of(storageTypeEnum); // 1. prepare workspace dir - if (LFS == storageType) { + if (LFS == storageTypeEnum) { fsOperator.mkdirsIfNotExists(Workspace.APP_LOCAL_DIST()); } Arrays.asList( @@ -198,18 +198,18 @@ public synchronized void storageInitialize(StorageType storageType) { FsOperator.lfs().mkdirs(localMavenRepo); } - initialized.add(storageType); + initialized.add(storageTypeEnum); } - public void checkFlinkEnv(StorageType storageType, FlinkEnv flinkEnv) throws IOException { + public void checkFlinkEnv(StorageTypeEnum storageTypeEnum, FlinkEnv flinkEnv) throws IOException { String flinkLocalHome = flinkEnv.getFlinkHome(); if (flinkLocalHome == null) { throw new ExceptionInInitializerError( "[StreamPark] FLINK_HOME is undefined,Make sure that Flink is installed."); } - Workspace workspace = Workspace.of(storageType); + Workspace workspace = Workspace.of(storageTypeEnum); String appFlink = workspace.APP_FLINK(); - FsOperator fsOperator = FsOperator.of(storageType); + FsOperator fsOperator = FsOperator.of(storageTypeEnum); if (!fsOperator.exists(appFlink)) { log.info("checkFlinkEnv, now mkdir [{}] starting ...", appFlink); fsOperator.mkdirs(appFlink); diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/AppBuildPipeService.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/AppBuildPipeService.java index 4b455b5c5c..531626a396 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/AppBuildPipeService.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/AppBuildPipeService.java @@ -19,7 +19,7 @@ import org.apache.streampark.console.core.entity.AppBuildPipeline; import org.apache.streampark.flink.packer.pipeline.DockerResolvedSnapshot; -import org.apache.streampark.flink.packer.pipeline.PipelineStatus; +import org.apache.streampark.flink.packer.pipeline.PipelineStatusEnum; import com.baomidou.mybatisplus.extension.service.IService; @@ -55,7 +55,7 @@ public interface AppBuildPipeService extends IService { boolean allowToBuildNow(@Nonnull Long appId); /** list pipeline status on application id list */ - Map listPipelineStatus(List appIds); + Map listPipelineStatus(List appIds); /** * delete appBuildPipeline By application diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/EffectiveService.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/EffectiveService.java index c003526f74..7488be8670 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/EffectiveService.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/EffectiveService.java @@ -18,17 +18,17 @@ package org.apache.streampark.console.core.service; import org.apache.streampark.console.core.entity.Effective; -import org.apache.streampark.console.core.enums.EffectiveType; +import org.apache.streampark.console.core.enums.EffectiveTypeEnum; import com.baomidou.mybatisplus.extension.service.IService; public interface EffectiveService extends IService { - void delete(Long appId, EffectiveType config); + void delete(Long appId, EffectiveTypeEnum config); - Effective get(Long appId, EffectiveType config); + Effective get(Long appId, EffectiveTypeEnum config); - void saveOrUpdate(Long appId, EffectiveType type, Long id); + void saveOrUpdate(Long appId, EffectiveTypeEnum type, Long id); void removeApp(Long appId); } diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/FlinkClusterService.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/FlinkClusterService.java index 18393545aa..2c8ed7ff29 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/FlinkClusterService.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/FlinkClusterService.java @@ -17,8 +17,8 @@ package org.apache.streampark.console.core.service; -import org.apache.streampark.common.enums.ClusterState; -import org.apache.streampark.common.enums.ExecutionMode; +import org.apache.streampark.common.enums.ClusterStateEnum; +import org.apache.streampark.common.enums.ExecutionModeEnum; import org.apache.streampark.console.core.bean.ResponseResult; import org.apache.streampark.console.core.entity.FlinkCluster; @@ -49,7 +49,7 @@ public interface FlinkClusterService extends IService { Boolean existsByFlinkEnvId(Long id); - List getByExecutionModes(Collection executionModes); + List getByExecutionModes(Collection executionModeEnums); - void updateClusterState(Long id, ClusterState state); + void updateClusterState(Long id, ClusterStateEnum state); } diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/FlinkSqlService.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/FlinkSqlService.java index 0a836821aa..88b3f93c36 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/FlinkSqlService.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/FlinkSqlService.java @@ -20,7 +20,7 @@ import org.apache.streampark.console.base.domain.RestRequest; import org.apache.streampark.console.core.entity.Application; import org.apache.streampark.console.core.entity.FlinkSql; -import org.apache.streampark.console.core.enums.CandidateType; +import org.apache.streampark.console.core.enums.CandidateTypeEnum; import org.apache.streampark.flink.core.FlinkSqlValidationResult; import com.baomidou.mybatisplus.core.metadata.IPage; @@ -32,7 +32,7 @@ public interface FlinkSqlService extends IService { void create(FlinkSql flinkSql); - void setCandidate(CandidateType candidateType, Long appId, Long sqlId); + void setCandidate(CandidateTypeEnum candidateTypeEnum, Long appId, Long sqlId); FlinkSql getEffective(Long appId, boolean decode); @@ -40,7 +40,7 @@ public interface FlinkSqlService extends IService { List history(Application application); - FlinkSql getCandidate(Long appId, CandidateType type); + FlinkSql getCandidate(Long appId, CandidateTypeEnum type); void toEffective(Long appId, Long sqlId); diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/MessageService.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/MessageService.java index a26fd8be72..0d33ff2ba1 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/MessageService.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/MessageService.java @@ -19,7 +19,7 @@ import org.apache.streampark.console.base.domain.RestRequest; import org.apache.streampark.console.core.entity.Message; -import org.apache.streampark.console.core.enums.NoticeType; +import org.apache.streampark.console.core.enums.NoticeTypeEnum; import com.baomidou.mybatisplus.core.metadata.IPage; import com.baomidou.mybatisplus.extension.service.IService; @@ -28,5 +28,5 @@ public interface MessageService extends IService { void push(Message message); - IPage getUnRead(NoticeType noticeType, RestRequest request); + IPage getUnRead(NoticeTypeEnum noticeTypeEnum, RestRequest request); } diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/YarnQueueService.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/YarnQueueService.java index 6f92456c44..eaaafd649b 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/YarnQueueService.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/YarnQueueService.java @@ -17,7 +17,7 @@ package org.apache.streampark.console.core.service; -import org.apache.streampark.common.enums.ExecutionMode; +import org.apache.streampark.common.enums.ExecutionModeEnum; import org.apache.streampark.console.base.domain.RestRequest; import org.apache.streampark.console.core.bean.ResponseResult; import org.apache.streampark.console.core.entity.YarnQueue; @@ -37,7 +37,7 @@ public interface YarnQueueService extends IService { void deleteYarnQueue(YarnQueue yarnQueue); - void checkQueueLabel(ExecutionMode executionMode, String queueLabel); + void checkQueueLabel(ExecutionModeEnum executionModeEnum, String queueLabel); boolean isDefaultQueue(String queueLabel); diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/alert/impl/AlertServiceImpl.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/alert/impl/AlertServiceImpl.java index dfaac9873a..1175096e8a 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/alert/impl/AlertServiceImpl.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/alert/impl/AlertServiceImpl.java @@ -22,7 +22,7 @@ import org.apache.streampark.console.core.bean.AlertConfigParams; import org.apache.streampark.console.core.bean.AlertTemplate; import org.apache.streampark.console.core.entity.AlertConfig; -import org.apache.streampark.console.core.enums.AlertType; +import org.apache.streampark.console.core.enums.AlertTypeEnum; import org.apache.streampark.console.core.service.alert.AlertConfigService; import org.apache.streampark.console.core.service.alert.AlertService; @@ -52,18 +52,18 @@ public boolean alert(Long alertConfigId, AlertTemplate alertTemplate) { AlertConfig alertConfig = alertConfigService.getById(alertConfigId); try { AlertConfigParams params = AlertConfigParams.of(alertConfig); - List alertTypes = AlertType.decode(params.getAlertType()); - if (CollectionUtils.isEmpty(alertTypes)) { + List alertTypeEnums = AlertTypeEnum.decode(params.getAlertType()); + if (CollectionUtils.isEmpty(alertTypeEnums)) { return true; } // No use thread pool, ensure that the alarm can be sent successfully Tuple2 reduce = - alertTypes.stream() + alertTypeEnums.stream() .map( - alertType -> { + alertTypeEnum -> { try { boolean alertRes = - SpringContextUtils.getBean(alertType.getClazz()) + SpringContextUtils.getBean(alertTypeEnum.getClazz()) .doAlert(params, alertTemplate); return new Tuple2(alertRes, null); } catch (AlertException e) { diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/ApplicationInfoService.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/ApplicationInfoService.java index 57fbdef59d..5dc9349a73 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/ApplicationInfoService.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/ApplicationInfoService.java @@ -19,7 +19,7 @@ import org.apache.streampark.console.base.exception.ApplicationException; import org.apache.streampark.console.core.entity.Application; -import org.apache.streampark.console.core.enums.AppExistsState; +import org.apache.streampark.console.core.enums.AppExistsStateEnum; import com.baomidou.mybatisplus.extension.service.IService; @@ -140,7 +140,7 @@ public interface ApplicationInfoService extends IService { * @param appParam The application to check for existence. * @return AppExistsState indicating the existence state of the application. */ - AppExistsState checkExists(Application appParam); + AppExistsStateEnum checkExists(Application appParam); /** * Persists the metrics of the given application. diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/ApplicationManageService.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/ApplicationManageService.java index c472b0cc36..4ea3708e7e 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/ApplicationManageService.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/ApplicationManageService.java @@ -17,7 +17,7 @@ package org.apache.streampark.console.core.service.application; -import org.apache.streampark.common.enums.ExecutionMode; +import org.apache.streampark.common.enums.ExecutionModeEnum; import org.apache.streampark.console.base.domain.RestRequest; import org.apache.streampark.console.core.entity.Application; @@ -143,12 +143,12 @@ public interface ApplicationManageService extends IService { * Retrieves a list of applications by team ID and execution modes. * * @param teamId The ID of the team to filter by - * @param executionModes The collection of execution modes to filter by + * @param executionModeEnums The collection of execution modes to filter by * @return A list of applications that belong to the specified team and have the specified * execution modes */ List getByTeamIdAndExecutionModes( - Long teamId, Collection executionModes); + Long teamId, Collection executionModeEnums); /** * Retrieves a list of applications be probing or need to probe. diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/impl/ApplicationActionServiceImpl.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/impl/ApplicationActionServiceImpl.java index a364c1db92..7b5c63900d 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/impl/ApplicationActionServiceImpl.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/impl/ApplicationActionServiceImpl.java @@ -20,10 +20,10 @@ import org.apache.streampark.common.conf.ConfigConst; import org.apache.streampark.common.conf.K8sFlinkConfig; import org.apache.streampark.common.conf.Workspace; -import org.apache.streampark.common.enums.DevelopmentMode; -import org.apache.streampark.common.enums.ExecutionMode; -import org.apache.streampark.common.enums.ResolveOrder; -import org.apache.streampark.common.enums.RestoreMode; +import org.apache.streampark.common.enums.DevelopmentModeEnum; +import org.apache.streampark.common.enums.ExecutionModeEnum; +import org.apache.streampark.common.enums.ResolveOrderEnum; +import org.apache.streampark.common.enums.RestoreModeEnum; import org.apache.streampark.common.fs.FsOperator; import org.apache.streampark.common.util.CompletableFutureUtils; import org.apache.streampark.common.util.DeflaterUtils; @@ -43,12 +43,12 @@ import org.apache.streampark.console.core.entity.FlinkSql; import org.apache.streampark.console.core.entity.Resource; import org.apache.streampark.console.core.entity.SavePoint; -import org.apache.streampark.console.core.enums.CheckPointType; -import org.apache.streampark.console.core.enums.ConfigFileType; -import org.apache.streampark.console.core.enums.FlinkAppState; -import org.apache.streampark.console.core.enums.Operation; -import org.apache.streampark.console.core.enums.OptionState; -import org.apache.streampark.console.core.enums.ReleaseState; +import org.apache.streampark.console.core.enums.CheckPointTypeEnum; +import org.apache.streampark.console.core.enums.ConfigFileTypeEnum; +import org.apache.streampark.console.core.enums.FlinkAppStateEnum; +import org.apache.streampark.console.core.enums.OperationEnum; +import org.apache.streampark.console.core.enums.OptionStateEnum; +import org.apache.streampark.console.core.enums.ReleaseStateEnum; import org.apache.streampark.console.core.mapper.ApplicationMapper; import org.apache.streampark.console.core.service.AppBuildPipeService; import org.apache.streampark.console.core.service.ApplicationBackUpService; @@ -186,12 +186,12 @@ public void revoke(Application appParam) throws ApplicationException { LambdaUpdateWrapper updateWrapper = Wrappers.lambdaUpdate(); updateWrapper.eq(Application::getId, application.getId()); if (application.isFlinkSqlJob()) { - updateWrapper.set(Application::getRelease, ReleaseState.FAILED.get()); + updateWrapper.set(Application::getRelease, ReleaseStateEnum.FAILED.get()); } else { - updateWrapper.set(Application::getRelease, ReleaseState.NEED_RELEASE.get()); + updateWrapper.set(Application::getRelease, ReleaseStateEnum.NEED_RELEASE.get()); } if (!application.isRunning()) { - updateWrapper.set(Application::getState, FlinkAppState.REVOKED.getValue()); + updateWrapper.set(Application::getState, FlinkAppStateEnum.REVOKED.getValue()); } baseMapper.update(null, updateWrapper); } @@ -228,12 +228,12 @@ public void forcedStop(Application appParam) { @Override public void cancel(Application appParam) throws Exception { - FlinkAppHttpWatcher.setOptionState(appParam.getId(), OptionState.CANCELLING); + FlinkAppHttpWatcher.setOptionState(appParam.getId(), OptionStateEnum.CANCELLING); Application application = getById(appParam.getId()); - application.setState(FlinkAppState.CANCELLING.getValue()); + application.setState(FlinkAppStateEnum.CANCELLING.getValue()); ApplicationLog applicationLog = new ApplicationLog(); - applicationLog.setOptionName(Operation.CANCEL.getValue()); + applicationLog.setOptionName(OperationEnum.CANCEL.getValue()); applicationLog.setAppId(application.getId()); applicationLog.setJobManagerUrl(application.getJobManagerUrl()); applicationLog.setOptionTime(new Date()); @@ -241,9 +241,9 @@ public void cancel(Application appParam) throws Exception { if (appParam.getSavePointed()) { FlinkAppHttpWatcher.addSavepoint(application.getId()); - application.setOptionState(OptionState.SAVEPOINTING.getValue()); + application.setOptionState(OptionStateEnum.SAVEPOINTING.getValue()); } else { - application.setOptionState(OptionState.CANCELLING.getValue()); + application.setOptionState(OptionStateEnum.CANCELLING.getValue()); } application.setOptionTime(new Date()); @@ -266,10 +266,10 @@ public void cancel(Application appParam) throws Exception { } String clusterId = null; - if (ExecutionMode.isKubernetesMode(application.getExecutionMode())) { + if (ExecutionModeEnum.isKubernetesMode(application.getExecutionMode())) { clusterId = application.getClusterId(); - } else if (ExecutionMode.isYarnMode(application.getExecutionMode())) { - if (ExecutionMode.YARN_SESSION == application.getExecutionModeEnum()) { + } else if (ExecutionModeEnum.isYarnMode(application.getExecutionMode())) { + if (ExecutionModeEnum.YARN_SESSION == application.getExecutionModeEnum()) { FlinkCluster cluster = flinkClusterService.getById(application.getFlinkClusterId()); ApiAlertException.throwIfNull( cluster, @@ -284,7 +284,7 @@ public void cancel(Application appParam) throws Exception { Map properties = new HashMap<>(); - if (ExecutionMode.isRemoteMode(application.getExecutionModeEnum())) { + if (ExecutionModeEnum.isRemoteMode(application.getExecutionModeEnum())) { FlinkCluster cluster = flinkClusterService.getById(application.getFlinkClusterId()); ApiAlertException.throwIfNull( cluster, @@ -301,7 +301,7 @@ public void cancel(Application appParam) throws Exception { new CancelRequest( application.getId(), flinkEnv.getFlinkVersion(), - ExecutionMode.of(application.getExecutionMode()), + ExecutionModeEnum.of(application.getExecutionMode()), properties, clusterId, application.getJobId(), @@ -330,7 +330,7 @@ public void cancel(Application appParam) throws Exception { savePoint.setPath(savePointDir); savePoint.setAppId(application.getId()); savePoint.setLatest(true); - savePoint.setType(CheckPointType.SAVEPOINT.get()); + savePoint.setType(CheckPointTypeEnum.SAVEPOINT.get()); savePoint.setCreateTime(new Date()); savePoint.setTriggerTime(triggerTime); savePointService.save(savePoint); @@ -344,8 +344,8 @@ public void cancel(Application appParam) throws Exception { updateToStopped(application); } else { log.error("stop flink job fail.", e); - application.setOptionState(OptionState.NONE.getValue()); - application.setState(FlinkAppState.FAILED.getValue()); + application.setOptionState(OptionStateEnum.NONE.getValue()); + application.setState(FlinkAppStateEnum.FAILED.getValue()); updateById(application); if (appParam.getSavePointed()) { @@ -407,7 +407,7 @@ public void start(Application appParam, boolean auto) throws Exception { String jobId = new JobID().toHexString(); ApplicationLog applicationLog = new ApplicationLog(); - applicationLog.setOptionName(Operation.START.getValue()); + applicationLog.setOptionName(OperationEnum.START.getValue()); applicationLog.setAppId(application.getId()); applicationLog.setOptionTime(new Date()); @@ -439,7 +439,7 @@ public void start(Application appParam, boolean auto) throws Exception { String appConf = userJarAndAppConf.f1; BuildResult buildResult = buildPipeline.getBuildResult(); - if (ExecutionMode.YARN_APPLICATION == application.getExecutionModeEnum()) { + if (ExecutionModeEnum.YARN_APPLICATION == application.getExecutionModeEnum()) { buildResult = new ShadedBuildResponse(null, flinkUserJar, true); } @@ -450,17 +450,19 @@ public void start(Application appParam, boolean auto) throws Exception { SubmitRequest submitRequest = new SubmitRequest( flinkEnv.getFlinkVersion(), - ExecutionMode.of(application.getExecutionMode()), + ExecutionModeEnum.of(application.getExecutionMode()), getProperties(application), flinkEnv.getFlinkConf(), - DevelopmentMode.of(application.getJobType()), + DevelopmentModeEnum.of(application.getJobType()), application.getId(), jobId, application.getJobName(), appConf, application.getApplicationType(), getSavePointed(appParam), - appParam.getRestoreMode() == null ? null : RestoreMode.of(appParam.getRestoreMode()), + appParam.getRestoreMode() == null + ? null + : RestoreModeEnum.of(appParam.getRestoreMode()), applicationArgs, buildResult, kubernetesSubmitParam, @@ -501,7 +503,7 @@ public void start(Application appParam, boolean auto) throws Exception { application.setStartTime(new Date()); application.setEndTime(null); if (isKubernetesApp(application)) { - application.setRelease(ReleaseState.DONE.get()); + application.setRelease(ReleaseStateEnum.DONE.get()); } updateById(application); @@ -509,7 +511,7 @@ public void start(Application appParam, boolean auto) throws Exception { if (isKubernetesApp(application)) { k8SFlinkTrackMonitor.doWatching(toTrackId(application)); } else { - FlinkAppHttpWatcher.setOptionState(appParam.getId(), OptionState.STARTING); + FlinkAppHttpWatcher.setOptionState(appParam.getId(), OptionStateEnum.STARTING); FlinkAppHttpWatcher.doWatching(application); } @@ -525,8 +527,8 @@ public void start(Application appParam, boolean auto) throws Exception { applicationLog.setException(exception); applicationLog.setSuccess(false); Application app = getById(appParam.getId()); - app.setState(FlinkAppState.FAILED.getValue()); - app.setOptionState(OptionState.NONE.getValue()); + app.setState(FlinkAppStateEnum.FAILED.getValue()); + app.setOptionState(OptionStateEnum.NONE.getValue()); updateById(app); if (isKubernetesApp(app)) { k8SFlinkTrackMonitor.unWatching(toTrackId(app)); @@ -538,7 +540,8 @@ public void start(Application appParam, boolean auto) throws Exception { .whenComplete( (t, e) -> { if (!K8sFlinkConfig.isV2Enabled() - && ExecutionMode.isKubernetesApplicationMode(application.getExecutionMode())) { + && ExecutionModeEnum.isKubernetesApplicationMode( + application.getExecutionMode())) { String domainName = settingService.getIngressModeDefault(); if (StringUtils.isNotBlank(domainName)) { try { @@ -551,8 +554,8 @@ public void start(Application appParam, boolean auto) throws Exception { applicationLog.setException(e.getMessage()); applicationLog.setSuccess(false); applicationLogService.save(applicationLog); - application.setState(FlinkAppState.FAILED.getValue()); - application.setOptionState(OptionState.NONE.getValue()); + application.setState(FlinkAppStateEnum.FAILED.getValue()); + application.setOptionState(OptionStateEnum.NONE.getValue()); updateById(application); return; } @@ -565,17 +568,17 @@ public void start(Application appParam, boolean auto) throws Exception { } private void starting(Application application) { - application.setState(FlinkAppState.STARTING.getValue()); + application.setState(FlinkAppStateEnum.STARTING.getValue()); application.setOptionTime(new Date()); updateById(application); } private Tuple2 getUserJarAndAppConf(FlinkEnv flinkEnv, Application application) { - ExecutionMode executionMode = application.getExecutionModeEnum(); + ExecutionModeEnum executionModeEnum = application.getExecutionModeEnum(); ApplicationConfig applicationConfig = configService.getEffective(application.getId()); ApiAlertException.throwIfNull( - executionMode, "ExecutionMode can't be null, start application failed."); + executionModeEnum, "ExecutionMode can't be null, start application failed."); String flinkUserJar = null; String appConf = null; @@ -592,7 +595,7 @@ private Tuple2 getUserJarAndAppConf(FlinkEnv flinkEnv, Applicati ? null : String.format("yaml://%s", applicationConfig.getContent()); // 3) client - if (ExecutionMode.YARN_APPLICATION == executionMode) { + if (ExecutionModeEnum.YARN_APPLICATION == executionModeEnum) { String clientPath = Workspace.remote().APP_CLIENT(); flinkUserJar = String.format("%s/%s", clientPath, sqlDistJar); } @@ -624,8 +627,8 @@ private Tuple2 getUserJarAndAppConf(FlinkEnv flinkEnv, Applicati } else { switch (application.getApplicationType()) { case STREAMPARK_FLINK: - ConfigFileType fileType = ConfigFileType.of(applicationConfig.getFormat()); - if (fileType != null && ConfigFileType.UNKNOWN != fileType) { + ConfigFileTypeEnum fileType = ConfigFileTypeEnum.of(applicationConfig.getFormat()); + if (fileType != null && ConfigFileTypeEnum.UNKNOWN != fileType) { appConf = String.format( "%s://%s", fileType.getTypeName(), applicationConfig.getContent()); @@ -646,7 +649,7 @@ private Tuple2 getUserJarAndAppConf(FlinkEnv flinkEnv, Applicati } } - if (ExecutionMode.YARN_APPLICATION == executionMode) { + if (ExecutionModeEnum.YARN_APPLICATION == executionModeEnum) { switch (application.getApplicationType()) { case STREAMPARK_FLINK: flinkUserJar = @@ -679,7 +682,7 @@ private Tuple2 getUserJarAndAppConf(FlinkEnv flinkEnv, Applicati private Map getProperties(Application application) { Map properties = new HashMap<>(application.getOptionMap()); - if (ExecutionMode.isRemoteMode(application.getExecutionModeEnum())) { + if (ExecutionModeEnum.isRemoteMode(application.getExecutionModeEnum())) { FlinkCluster cluster = flinkClusterService.getById(application.getFlinkClusterId()); ApiAlertException.throwIfNull( cluster, @@ -690,8 +693,8 @@ private Map getProperties(Application application) { URI activeAddress = cluster.getRemoteURI(); properties.put(RestOptions.ADDRESS.key(), activeAddress.getHost()); properties.put(RestOptions.PORT.key(), activeAddress.getPort()); - } else if (ExecutionMode.isYarnMode(application.getExecutionModeEnum())) { - if (ExecutionMode.YARN_SESSION == application.getExecutionModeEnum()) { + } else if (ExecutionModeEnum.isYarnMode(application.getExecutionModeEnum())) { + if (ExecutionModeEnum.YARN_SESSION == application.getExecutionModeEnum()) { FlinkCluster cluster = flinkClusterService.getById(application.getFlinkClusterId()); ApiAlertException.throwIfNull( cluster, @@ -710,11 +713,11 @@ private Map getProperties(Application application) { Optional.ofNullable(yarnLabelExpr) .ifPresent(yLabel -> properties.put(ConfigConst.KEY_YARN_APP_NODE_LABEL(), yLabel)); } - } else if (ExecutionMode.isKubernetesMode(application.getExecutionModeEnum())) { + } else if (ExecutionModeEnum.isKubernetesMode(application.getExecutionModeEnum())) { properties.put(ConfigConst.KEY_K8S_IMAGE_PULL_POLICY(), "Always"); } - if (ExecutionMode.isKubernetesApplicationMode(application.getExecutionMode())) { + if (ExecutionModeEnum.isKubernetesApplicationMode(application.getExecutionMode())) { try { HadoopUtils.yarnClient(); properties.put(JobManagerOptions.ARCHIVE_DIR.key(), Workspace.ARCHIVES_FILE_PATH()); @@ -730,9 +733,9 @@ private Map getProperties(Application application) { Map dynamicProperties = PropertiesUtils.extractDynamicPropertiesAsJava(application.getDynamicProperties()); properties.putAll(dynamicProperties); - ResolveOrder resolveOrder = ResolveOrder.of(application.getResolveOrder()); - if (resolveOrder != null) { - properties.put(CoreOptions.CLASSLOADER_RESOLVE_ORDER.key(), resolveOrder.getName()); + ResolveOrderEnum resolveOrderEnum = ResolveOrderEnum.of(application.getResolveOrder()); + if (resolveOrderEnum != null) { + properties.put(CoreOptions.CLASSLOADER_RESOLVE_ORDER.key(), resolveOrderEnum.getName()); } return properties; @@ -740,8 +743,8 @@ private Map getProperties(Application application) { private void updateToStopped(Application app) { Application application = getById(app); - application.setOptionState(OptionState.NONE.getValue()); - application.setState(FlinkAppState.CANCELED.getValue()); + application.setOptionState(OptionStateEnum.NONE.getValue()); + application.setState(FlinkAppStateEnum.CANCELED.getValue()); application.setOptionTime(new Date()); updateById(application); savePointService.expire(application.getId()); diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/impl/ApplicationInfoServiceImpl.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/impl/ApplicationInfoServiceImpl.java index 528f31169f..1be928ea65 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/impl/ApplicationInfoServiceImpl.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/impl/ApplicationInfoServiceImpl.java @@ -19,7 +19,7 @@ import org.apache.streampark.common.conf.K8sFlinkConfig; import org.apache.streampark.common.conf.Workspace; -import org.apache.streampark.common.enums.ExecutionMode; +import org.apache.streampark.common.enums.ExecutionModeEnum; import org.apache.streampark.common.fs.LfsOperator; import org.apache.streampark.common.util.ExceptionUtils; import org.apache.streampark.common.util.Utils; @@ -32,8 +32,8 @@ import org.apache.streampark.console.core.entity.FlinkCluster; import org.apache.streampark.console.core.entity.FlinkEnv; import org.apache.streampark.console.core.entity.Project; -import org.apache.streampark.console.core.enums.AppExistsState; -import org.apache.streampark.console.core.enums.FlinkAppState; +import org.apache.streampark.console.core.enums.AppExistsStateEnum; +import org.apache.streampark.console.core.enums.FlinkAppStateEnum; import org.apache.streampark.console.core.mapper.ApplicationMapper; import org.apache.streampark.console.core.metrics.flink.JobsOverview; import org.apache.streampark.console.core.runner.EnvInitializer; @@ -73,7 +73,7 @@ import java.util.regex.Pattern; import java.util.stream.Collectors; -import static org.apache.streampark.common.enums.StorageType.LFS; +import static org.apache.streampark.common.enums.StorageTypeEnum.LFS; import static org.apache.streampark.console.core.task.FlinkK8sWatcherWrapper.Bridge.toTrackId; import static org.apache.streampark.console.core.task.FlinkK8sWatcherWrapper.isKubernetesApp; @@ -135,7 +135,7 @@ public Map dashboard(Long teamId) { if (app.getAvailableSlot() != null) { availableSlot += app.getAvailableSlot(); } - if (app.getState() == FlinkAppState.RUNNING.getValue()) { + if (app.getState() == FlinkAppStateEnum.RUNNING.getValue()) { runningJob++; } JobsOverview.Task task = app.getOverview(); @@ -203,8 +203,8 @@ public boolean checkEnv(Application appParam) throws ApplicationException { envInitializer.checkFlinkEnv(application.getStorageType(), flinkEnv); envInitializer.storageInitialize(application.getStorageType()); - if (ExecutionMode.YARN_SESSION == application.getExecutionModeEnum() - || ExecutionMode.REMOTE == application.getExecutionModeEnum()) { + if (ExecutionModeEnum.YARN_SESSION == application.getExecutionModeEnum() + || ExecutionModeEnum.REMOTE == application.getExecutionModeEnum()) { FlinkCluster flinkCluster = flinkClusterService.getById(application.getFlinkClusterId()); boolean conned = flinkClusterWatcher.verifyClusterConnection(flinkCluster); if (!conned) { @@ -221,7 +221,7 @@ public boolean checkEnv(Application appParam) throws ApplicationException { @Override public boolean checkAlter(Application appParam) { Long appId = appParam.getId(); - if (FlinkAppState.CANCELED != appParam.getStateEnum()) { + if (FlinkAppStateEnum.CANCELED != appParam.getStateEnum()) { return false; } long cancelUserId = FlinkAppHttpWatcher.getCanceledJobUserId(appId); @@ -248,7 +248,7 @@ public boolean existsRunningByClusterId(Long clusterId) { .anyMatch( application -> clusterId.equals(application.getFlinkClusterId()) - && FlinkAppState.RUNNING == application.getStateEnum()); + && FlinkAppStateEnum.RUNNING == application.getStateEnum()); } @Override @@ -322,7 +322,7 @@ public String k8sStartLog(Long id, Integer offset, Integer limit) throws Excepti Application application = getById(id); ApiAlertException.throwIfNull( application, String.format("The application id=%s can't be found.", id)); - if (ExecutionMode.isKubernetesMode(application.getExecutionModeEnum())) { + if (ExecutionModeEnum.isKubernetesMode(application.getExecutionModeEnum())) { CompletableFuture future = CompletableFuture.supplyAsync( () -> @@ -378,10 +378,10 @@ public String getYarnName(Application appParam) { * @return The state of the application's existence. */ @Override - public AppExistsState checkExists(Application appParam) { + public AppExistsStateEnum checkExists(Application appParam) { if (!checkJobName(appParam.getJobName())) { - return AppExistsState.INVALID; + return AppExistsStateEnum.INVALID; } boolean existsByJobName = this.existsByJobName(appParam.getJobName()); @@ -389,43 +389,43 @@ public AppExistsState checkExists(Application appParam) { if (appParam.getId() != null) { Application app = getById(appParam.getId()); if (app.getJobName().equals(appParam.getJobName())) { - return AppExistsState.NO; + return AppExistsStateEnum.NO; } if (existsByJobName) { - return AppExistsState.IN_DB; + return AppExistsStateEnum.IN_DB; } // has stopped status - if (FlinkAppState.isEndState(app.getState())) { + if (FlinkAppStateEnum.isEndState(app.getState())) { // check whether jobName exists on yarn - if (ExecutionMode.isYarnMode(appParam.getExecutionMode()) + if (ExecutionModeEnum.isYarnMode(appParam.getExecutionMode()) && YarnUtils.isContains(appParam.getJobName())) { - return AppExistsState.IN_YARN; + return AppExistsStateEnum.IN_YARN; } // check whether clusterId, namespace, jobId on kubernetes - else if (ExecutionMode.isKubernetesMode(appParam.getExecutionMode()) + else if (ExecutionModeEnum.isKubernetesMode(appParam.getExecutionMode()) && k8SFlinkTrackMonitor.checkIsInRemoteCluster(toTrackId(appParam))) { - return AppExistsState.IN_KUBERNETES; + return AppExistsStateEnum.IN_KUBERNETES; } } } else { if (existsByJobName) { - return AppExistsState.IN_DB; + return AppExistsStateEnum.IN_DB; } // check whether jobName exists on yarn - if (ExecutionMode.isYarnMode(appParam.getExecutionMode()) + if (ExecutionModeEnum.isYarnMode(appParam.getExecutionMode()) && YarnUtils.isContains(appParam.getJobName())) { - return AppExistsState.IN_YARN; + return AppExistsStateEnum.IN_YARN; } // check whether clusterId, namespace, jobId on kubernetes - else if (ExecutionMode.isKubernetesMode(appParam.getExecutionMode()) + else if (ExecutionModeEnum.isKubernetesMode(appParam.getExecutionMode()) && k8SFlinkTrackMonitor.checkIsInRemoteCluster(toTrackId(appParam))) { - return AppExistsState.IN_KUBERNETES; + return AppExistsStateEnum.IN_KUBERNETES; } } - return AppExistsState.NO; + return AppExistsStateEnum.NO; } private boolean existsByJobName(String jobName) { diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/impl/ApplicationManageServiceImpl.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/impl/ApplicationManageServiceImpl.java index 4a46462ff0..48d3638edd 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/impl/ApplicationManageServiceImpl.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/application/impl/ApplicationManageServiceImpl.java @@ -19,8 +19,8 @@ import org.apache.streampark.common.conf.K8sFlinkConfig; import org.apache.streampark.common.conf.Workspace; -import org.apache.streampark.common.enums.ExecutionMode; -import org.apache.streampark.common.enums.StorageType; +import org.apache.streampark.common.enums.ExecutionModeEnum; +import org.apache.streampark.common.enums.StorageTypeEnum; import org.apache.streampark.common.fs.HdfsOperator; import org.apache.streampark.common.util.DeflaterUtils; import org.apache.streampark.console.base.domain.RestRequest; @@ -34,11 +34,11 @@ import org.apache.streampark.console.core.entity.ApplicationConfig; import org.apache.streampark.console.core.entity.FlinkSql; import org.apache.streampark.console.core.entity.Resource; -import org.apache.streampark.console.core.enums.CandidateType; +import org.apache.streampark.console.core.enums.CandidateTypeEnum; import org.apache.streampark.console.core.enums.ChangeTypeEnum; -import org.apache.streampark.console.core.enums.FlinkAppState; -import org.apache.streampark.console.core.enums.OptionState; -import org.apache.streampark.console.core.enums.ReleaseState; +import org.apache.streampark.console.core.enums.FlinkAppStateEnum; +import org.apache.streampark.console.core.enums.OptionStateEnum; +import org.apache.streampark.console.core.enums.ReleaseStateEnum; import org.apache.streampark.console.core.mapper.ApplicationMapper; import org.apache.streampark.console.core.service.AppBuildPipeService; import org.apache.streampark.console.core.service.ApplicationBackUpService; @@ -57,7 +57,7 @@ import org.apache.streampark.console.core.task.FlinkK8sObserverStub; import org.apache.streampark.console.core.utils.FlinkK8sDataTypeConverterStub; import org.apache.streampark.flink.kubernetes.FlinkK8sWatcher; -import org.apache.streampark.flink.packer.pipeline.PipelineStatus; +import org.apache.streampark.flink.packer.pipeline.PipelineStatusEnum; import org.apache.commons.lang3.StringUtils; @@ -203,7 +203,7 @@ private void removeApp(Application application) { .delete(application.getWorkspace().APP_WORKSPACE().concat("/").concat(appId.toString())); // try to delete yarn-application, and leave no trouble. String path = - Workspace.of(StorageType.HDFS).APP_WORKSPACE().concat("/").concat(appId.toString()); + Workspace.of(StorageTypeEnum.HDFS).APP_WORKSPACE().concat("/").concat(appId.toString()); if (HdfsOperator.exists(path)) { HdfsOperator.delete(path); } @@ -220,12 +220,12 @@ public IPage page(Application appParam, RestRequest request) { Page page = new MybatisPager().getDefaultPage(request); if (CommonUtils.notEmpty(appParam.getStateArray())) { if (Arrays.stream(appParam.getStateArray()) - .anyMatch(x -> x == FlinkAppState.FINISHED.getValue())) { + .anyMatch(x -> x == FlinkAppStateEnum.FINISHED.getValue())) { Integer[] newArray = CommonUtils.arrayInsertIndex( appParam.getStateArray(), appParam.getStateArray().length, - FlinkAppState.POS_TERMINATED.getValue()); + FlinkAppStateEnum.POS_TERMINATED.getValue()); appParam.setStateArray(newArray); } } @@ -234,7 +234,7 @@ public IPage page(Application appParam, RestRequest request) { long now = System.currentTimeMillis(); List appIds = records.stream().map(Application::getId).collect(Collectors.toList()); - Map pipeStates = appBuildPipeService.listPipelineStatus(appIds); + Map pipeStates = appBuildPipeService.listPipelineStatus(appIds); List newRecords = records.stream() @@ -260,12 +260,12 @@ record -> { new AppControl() .setAllowBuild( record.getBuildStatus() == null - || !PipelineStatus.running + || !PipelineStatusEnum.running .getCode() .equals(record.getBuildStatus())) .setAllowStart( !record.shouldTracking() - && PipelineStatus.success + && PipelineStatusEnum.success .getCode() .equals(record.getBuildStatus())) .setAllowStop(record.isRunning()); @@ -292,9 +292,9 @@ public boolean create(Application appParam) { ApiAlertException.throwIfNull( appParam.getTeamId(), "The teamId can't be null. Create application failed."); appParam.setUserId(commonService.getUserId()); - appParam.setState(FlinkAppState.ADDED.getValue()); - appParam.setRelease(ReleaseState.NEED_RELEASE.get()); - appParam.setOptionState(OptionState.NONE.getValue()); + appParam.setState(FlinkAppStateEnum.ADDED.getValue()); + appParam.setRelease(ReleaseStateEnum.NEED_RELEASE.get()); + appParam.setOptionState(OptionStateEnum.NONE.getValue()); appParam.setCreateTime(new Date()); appParam.setDefaultModeIngress(settingService.getIngressModeDefault()); @@ -345,7 +345,8 @@ public boolean create(Application appParam) { } private boolean shouldHandleK8sName(Application app) { - return K8sFlinkConfig.isV2Enabled() && ExecutionMode.isKubernetesMode(app.getExecutionMode()); + return K8sFlinkConfig.isV2Enabled() + && ExecutionModeEnum.isKubernetesMode(app.getExecutionMode()); } private boolean existsByJobName(String jobName) { @@ -369,7 +370,7 @@ public Long copy(Application appParam) { newApp.setJobName(jobName); newApp.setClusterId( - ExecutionMode.isSessionMode(oldApp.getExecutionModeEnum()) + ExecutionModeEnum.isSessionMode(oldApp.getExecutionModeEnum()) ? oldApp.getClusterId() : jobName); newApp.setArgs(appParam.getArgs() != null ? appParam.getArgs() : oldApp.getArgs()); @@ -400,9 +401,9 @@ public Long copy(Application appParam) { newApp.setProjectId(oldApp.getProjectId()); newApp.setModule(oldApp.getModule()); newApp.setUserId(commonService.getUserId()); - newApp.setState(FlinkAppState.ADDED.getValue()); - newApp.setRelease(ReleaseState.NEED_RELEASE.get()); - newApp.setOptionState(OptionState.NONE.getValue()); + newApp.setState(FlinkAppStateEnum.ADDED.getValue()); + newApp.setRelease(ReleaseStateEnum.NEED_RELEASE.get()); + newApp.setOptionState(OptionStateEnum.NONE.getValue()); newApp.setCreateTime(new Date()); newApp.setHotParams(oldApp.getHotParams()); @@ -449,7 +450,7 @@ public boolean update(Application appParam) { success, String.format(ERROR_APP_QUEUE_HINT, appParam.getYarnQueue(), appParam.getTeamId())); - application.setRelease(ReleaseState.NEED_RELEASE.get()); + application.setRelease(ReleaseStateEnum.NEED_RELEASE.get()); // 1) jar job jar file changed if (application.isUploadJob()) { @@ -472,7 +473,7 @@ public boolean update(Application appParam) { } // 2) k8s podTemplate changed. - if (application.getBuild() && ExecutionMode.isKubernetesMode(appParam.getExecutionMode())) { + if (application.getBuild() && ExecutionModeEnum.isKubernetesMode(appParam.getExecutionMode())) { if (ObjectUtils.trimNoEquals( application.getK8sRestExposedType(), appParam.getK8sRestExposedType()) || ObjectUtils.trimNoEquals( @@ -497,8 +498,8 @@ public boolean update(Application appParam) { // 4) yarn application mode change if (!application.getBuild()) { if (!application.getExecutionMode().equals(appParam.getExecutionMode())) { - if (ExecutionMode.YARN_APPLICATION == appParam.getExecutionModeEnum() - || ExecutionMode.YARN_APPLICATION == application.getExecutionModeEnum()) { + if (ExecutionModeEnum.YARN_APPLICATION == appParam.getExecutionModeEnum() + || ExecutionModeEnum.YARN_APPLICATION == application.getExecutionModeEnum()) { application.setBuild(true); } } @@ -590,7 +591,7 @@ public boolean update(Application appParam) { private void updateFlinkSqlJob(Application application, Application appParam) { FlinkSql effectiveFlinkSql = flinkSqlService.getEffective(application.getId(), true); if (effectiveFlinkSql == null) { - effectiveFlinkSql = flinkSqlService.getCandidate(application.getId(), CandidateType.NEW); + effectiveFlinkSql = flinkSqlService.getCandidate(application.getId(), CandidateTypeEnum.NEW); flinkSqlService.removeById(effectiveFlinkSql.getId()); FlinkSql sql = new FlinkSql(appParam); flinkSqlService.create(sql); @@ -613,7 +614,8 @@ private void updateFlinkSqlJob(Application application, Application appParam) { // if has been changed if (changeTypeEnum.hasChanged()) { // check if there is a candidate version for the newly added record - FlinkSql newFlinkSql = flinkSqlService.getCandidate(application.getId(), CandidateType.NEW); + FlinkSql newFlinkSql = + flinkSqlService.getCandidate(application.getId(), CandidateTypeEnum.NEW); // If the candidate version of the new record exists, it will be deleted directly, // and only one candidate version will be retained. If the new candidate version is not // effective, @@ -624,7 +626,7 @@ private void updateFlinkSqlJob(Application application, Application appParam) { flinkSqlService.removeById(newFlinkSql.getId()); } FlinkSql historyFlinkSql = - flinkSqlService.getCandidate(application.getId(), CandidateType.HISTORY); + flinkSqlService.getCandidate(application.getId(), CandidateTypeEnum.HISTORY); // remove candidate flags that already exist but are set as candidates if (historyFlinkSql != null) { flinkSqlService.cleanCandidate(historyFlinkSql.getId()); @@ -639,9 +641,9 @@ private void updateFlinkSqlJob(Application application, Application appParam) { boolean versionChanged = !effectiveFlinkSql.getId().equals(appParam.getSqlId()); if (versionChanged) { // sql and dependency not changed, but version changed, means that rollback to the version - CandidateType type = CandidateType.HISTORY; + CandidateTypeEnum type = CandidateTypeEnum.HISTORY; flinkSqlService.setCandidate(type, appParam.getId(), appParam.getSqlId()); - application.setRelease(ReleaseState.NEED_ROLLBACK.get()); + application.setRelease(ReleaseStateEnum.NEED_ROLLBACK.get()); application.setBuild(true); } } @@ -674,15 +676,15 @@ public List getByTeamId(Long teamId) { @Override public List getByTeamIdAndExecutionModes( - Long teamId, @Nonnull Collection executionModes) { + Long teamId, @Nonnull Collection executionModeEnums) { return getBaseMapper() .selectList( new LambdaQueryWrapper() .eq((SFunction) Application::getTeamId, teamId) .in( Application::getExecutionMode, - executionModes.stream() - .map(ExecutionMode::getMode) + executionModeEnums.stream() + .map(ExecutionModeEnum::getMode) .collect(Collectors.toSet()))); } @@ -697,16 +699,17 @@ public boolean checkBuildAndUpdate(Application appParam) { LambdaUpdateWrapper updateWrapper = Wrappers.lambdaUpdate(); updateWrapper.eq(Application::getId, appParam.getId()); if (appParam.isRunning()) { - updateWrapper.set(Application::getRelease, ReleaseState.NEED_RESTART.get()); + updateWrapper.set(Application::getRelease, ReleaseStateEnum.NEED_RESTART.get()); } else { - updateWrapper.set(Application::getRelease, ReleaseState.DONE.get()); - updateWrapper.set(Application::getOptionState, OptionState.NONE.getValue()); + updateWrapper.set(Application::getRelease, ReleaseStateEnum.DONE.get()); + updateWrapper.set(Application::getOptionState, OptionStateEnum.NONE.getValue()); } this.update(updateWrapper); // backup if (appParam.isFlinkSqlJob()) { - FlinkSql newFlinkSql = flinkSqlService.getCandidate(appParam.getId(), CandidateType.NEW); + FlinkSql newFlinkSql = + flinkSqlService.getCandidate(appParam.getId(), CandidateTypeEnum.NEW); if (!appParam.isNeedRollback() && newFlinkSql != null) { backUpService.backup(appParam, newFlinkSql); } @@ -724,7 +727,7 @@ public boolean checkBuildAndUpdate(Application appParam) { @Override public void clean(Application appParam) { - appParam.setRelease(ReleaseState.DONE.get()); + appParam.setRelease(ReleaseStateEnum.DONE.get()); this.updateRelease(appParam); } @@ -739,7 +742,7 @@ public Application getApp(Application appParam) { if (application.isFlinkSqlJob()) { FlinkSql flinkSql = flinkSqlService.getEffective(application.getId(), true); if (flinkSql == null) { - flinkSql = flinkSqlService.getCandidate(application.getId(), CandidateType.NEW); + flinkSql = flinkSqlService.getCandidate(application.getId(), CandidateTypeEnum.NEW); flinkSql.setSql(DeflaterUtils.unzipString(flinkSql.getSql())); } flinkSql.setToApplication(application); @@ -799,7 +802,7 @@ public boolean validateQueueIfNeeded(Application oldApp, Application newApp) { } oldApp.setYarnQueueByHotParams(); - if (ExecutionMode.isYarnPerJobOrAppMode(newApp.getExecutionModeEnum()) + if (ExecutionModeEnum.isYarnPerJobOrAppMode(newApp.getExecutionModeEnum()) && StringUtils.equals(oldApp.getYarnQueue(), newApp.getYarnQueue())) { return true; } @@ -815,7 +818,7 @@ public boolean validateQueueIfNeeded(Application oldApp, Application newApp) { * (empty or default), return true, false else. */ private boolean isYarnNotDefaultQueue(Application application) { - return ExecutionMode.isYarnPerJobOrAppMode(application.getExecutionModeEnum()) + return ExecutionModeEnum.isYarnPerJobOrAppMode(application.getExecutionModeEnum()) && !yarnQueueService.isDefaultQueue(application.getYarnQueue()); } } diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/AppBuildPipeServiceImpl.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/AppBuildPipeServiceImpl.java index 8c3120638c..c4716db2de 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/AppBuildPipeServiceImpl.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/AppBuildPipeServiceImpl.java @@ -20,9 +20,9 @@ import org.apache.streampark.common.conf.ConfigConst; import org.apache.streampark.common.conf.K8sFlinkConfig; import org.apache.streampark.common.conf.Workspace; -import org.apache.streampark.common.enums.ApplicationType; -import org.apache.streampark.common.enums.DevelopmentMode; -import org.apache.streampark.common.enums.ExecutionMode; +import org.apache.streampark.common.enums.ApplicationTypeEnum; +import org.apache.streampark.common.enums.DevelopmentModeEnum; +import org.apache.streampark.common.enums.ExecutionModeEnum; import org.apache.streampark.common.fs.FsOperator; import org.apache.streampark.common.util.ExceptionUtils; import org.apache.streampark.common.util.FileUtils; @@ -41,11 +41,11 @@ import org.apache.streampark.console.core.entity.FlinkSql; import org.apache.streampark.console.core.entity.Message; import org.apache.streampark.console.core.entity.Resource; -import org.apache.streampark.console.core.enums.CandidateType; -import org.apache.streampark.console.core.enums.NoticeType; -import org.apache.streampark.console.core.enums.OptionState; -import org.apache.streampark.console.core.enums.ReleaseState; -import org.apache.streampark.console.core.enums.ResourceType; +import org.apache.streampark.console.core.enums.CandidateTypeEnum; +import org.apache.streampark.console.core.enums.NoticeTypeEnum; +import org.apache.streampark.console.core.enums.OptionStateEnum; +import org.apache.streampark.console.core.enums.ReleaseStateEnum; +import org.apache.streampark.console.core.enums.ResourceTypeEnum; import org.apache.streampark.console.core.mapper.ApplicationBuildPipelineMapper; import org.apache.streampark.console.core.service.AppBuildPipeService; import org.apache.streampark.console.core.service.ApplicationBackUpService; @@ -77,8 +77,8 @@ import org.apache.streampark.flink.packer.pipeline.FlinkYarnApplicationBuildRequest; import org.apache.streampark.flink.packer.pipeline.PipeSnapshot; import org.apache.streampark.flink.packer.pipeline.PipeWatcher; -import org.apache.streampark.flink.packer.pipeline.PipelineStatus; -import org.apache.streampark.flink.packer.pipeline.PipelineType; +import org.apache.streampark.flink.packer.pipeline.PipelineStatusEnum; +import org.apache.streampark.flink.packer.pipeline.PipelineTypeEnum; import org.apache.streampark.flink.packer.pipeline.impl.FlinkK8sApplicationBuildPipeline; import org.apache.streampark.flink.packer.pipeline.impl.FlinkK8sApplicationBuildPipelineV2; import org.apache.streampark.flink.packer.pipeline.impl.FlinkK8sSessionBuildPipeline; @@ -115,7 +115,7 @@ import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; -import static org.apache.streampark.console.core.enums.Operation.RELEASE; +import static org.apache.streampark.console.core.enums.OperationEnum.RELEASE; @Service @Slf4j @@ -200,7 +200,7 @@ public boolean buildApplication(Long appId, boolean forceBuild) { } // 1) flink sql setDependency - FlinkSql newFlinkSql = flinkSqlService.getCandidate(app.getId(), CandidateType.NEW); + FlinkSql newFlinkSql = flinkSqlService.getCandidate(app.getId(), CandidateTypeEnum.NEW); FlinkSql effectiveFlinkSql = flinkSqlService.getEffective(app.getId(), false); if (app.isFlinkSqlJobOrPyFlinkJob()) { FlinkSql flinkSql = newFlinkSql == null ? effectiveFlinkSql : newFlinkSql; @@ -224,7 +224,7 @@ public void onStart(PipeSnapshot snapshot) { AppBuildPipeline.fromPipeSnapshot(snapshot).setAppId(app.getId()); saveEntity(buildPipeline); - app.setRelease(ReleaseState.RELEASING.get()); + app.setRelease(ReleaseStateEnum.RELEASING.get()); applicationManageService.updateRelease(app); if (flinkAppHttpWatcher.isWatchingApp(app.getId())) { @@ -313,10 +313,10 @@ public void onFinish(PipeSnapshot snapshot, BuildResult result) { if (result.pass()) { // running job ... if (app.isRunning()) { - app.setRelease(ReleaseState.NEED_RESTART.get()); + app.setRelease(ReleaseStateEnum.NEED_RESTART.get()); } else { - app.setOptionState(OptionState.NONE.getValue()); - app.setRelease(ReleaseState.DONE.get()); + app.setOptionState(OptionStateEnum.NONE.getValue()); + app.setRelease(ReleaseStateEnum.DONE.get()); // If the current task is not running, or the task has just been added, directly set // the candidate version to the official version if (app.isFlinkSqlJob()) { @@ -349,10 +349,10 @@ public void onFinish(PipeSnapshot snapshot, BuildResult result) { app.getId(), app.getJobName().concat(" release failed"), ExceptionUtils.stringifyException(snapshot.error().exception()), - NoticeType.EXCEPTION); + NoticeTypeEnum.EXCEPTION); messageService.push(message); - app.setRelease(ReleaseState.FAILED.get()); - app.setOptionState(OptionState.NONE.getValue()); + app.setRelease(ReleaseStateEnum.FAILED.get()); + app.setOptionState(OptionStateEnum.NONE.getValue()); app.setBuild(true); applicationLog.setException( ExceptionUtils.stringifyException(snapshot.error().exception())); @@ -366,7 +366,7 @@ public void onFinish(PipeSnapshot snapshot, BuildResult result) { } }); // save docker resolve progress detail to cache, only for flink-k8s application mode. - if (PipelineType.FLINK_NATIVE_K8S_APPLICATION == pipeline.pipeType()) { + if (PipelineTypeEnum.FLINK_NATIVE_K8S_APPLICATION == pipeline.pipeType()) { pipeline .as(FlinkK8sApplicationBuildPipeline.class) .registerDockerProgressWatcher( @@ -439,14 +439,14 @@ private BuildPipeline createPipelineInstance(@Nonnull Application app) { } } - ExecutionMode executionMode = app.getExecutionModeEnum(); + ExecutionModeEnum executionModeEnum = app.getExecutionModeEnum(); String mainClass = ConfigConst.STREAMPARK_FLINKSQL_CLIENT_CLASS(); - switch (executionMode) { + switch (executionModeEnum) { case YARN_APPLICATION: String yarnProvidedPath = app.getAppLib(); String localWorkspace = app.getLocalAppHome().concat("/lib"); - if (DevelopmentMode.CUSTOM_CODE == app.getDevelopmentMode() - && ApplicationType.APACHE_FLINK == app.getApplicationType()) { + if (DevelopmentModeEnum.CUSTOM_CODE == app.getDevelopmentMode() + && ApplicationTypeEnum.APACHE_FLINK == app.getApplicationType()) { yarnProvidedPath = app.getAppHome(); localWorkspace = app.getLocalAppHome(); } @@ -544,7 +544,7 @@ private String retrieveFlinkUserJar(FlinkEnv flinkEnv, Application app) { return String.format("%s/%s", app.getAppHome(), app.getJar()); case FLINK_SQL: String sqlDistJar = commonService.getSqlClientJar(flinkEnv); - if (app.getExecutionModeEnum() == ExecutionMode.YARN_APPLICATION) { + if (app.getExecutionModeEnum() == ExecutionModeEnum.YARN_APPLICATION) { String clientPath = Workspace.remote().APP_CLIENT(); return String.format("%s/%s", clientPath, sqlDistJar); } @@ -571,12 +571,12 @@ public DockerResolvedSnapshot getDockerProgressDetailSnapshot(@Nonnull Long appI @Override public boolean allowToBuildNow(@Nonnull Long appId) { return getCurrentBuildPipeline(appId) - .map(pipeline -> PipelineStatus.running != pipeline.getPipelineStatus()) + .map(pipeline -> PipelineStatusEnum.running != pipeline.getPipelineStatus()) .orElse(true); } @Override - public Map listPipelineStatus(List appIds) { + public Map listPipelineStatus(List appIds) { if (CollectionUtils.isEmpty(appIds)) { return Collections.emptyMap(); } @@ -635,7 +635,7 @@ private DependencyInfo getMergedDependencyInfo(Application application) { resourceId -> { Resource resource = resourceService.getById(resourceId); - if (resource.getResourceType() != ResourceType.GROUP) { + if (resource.getResourceTypeEnum() != ResourceTypeEnum.GROUP) { mergeDependency(application, mvnArtifacts, jarLibs, resource); } else { try { diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/ApplicationBackUpServiceImpl.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/ApplicationBackUpServiceImpl.java index 6afe5b45ec..d7ae18d6e5 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/ApplicationBackUpServiceImpl.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/ApplicationBackUpServiceImpl.java @@ -26,8 +26,8 @@ import org.apache.streampark.console.core.entity.ApplicationBackUp; import org.apache.streampark.console.core.entity.ApplicationConfig; import org.apache.streampark.console.core.entity.FlinkSql; -import org.apache.streampark.console.core.enums.EffectiveType; -import org.apache.streampark.console.core.enums.ReleaseState; +import org.apache.streampark.console.core.enums.EffectiveTypeEnum; +import org.apache.streampark.console.core.enums.ReleaseStateEnum; import org.apache.streampark.console.core.mapper.ApplicationBackUpMapper; import org.apache.streampark.console.core.service.ApplicationBackUpService; import org.apache.streampark.console.core.service.ApplicationConfigService; @@ -103,11 +103,12 @@ public void rollback(ApplicationBackUp bakParam) { // rollback to back up config configService.setLatestOrEffective(true, bakParam.getId(), bakParam.getAppId()); } else { - effectiveService.saveOrUpdate(bakParam.getAppId(), EffectiveType.CONFIG, bakParam.getId()); + effectiveService.saveOrUpdate( + bakParam.getAppId(), EffectiveTypeEnum.CONFIG, bakParam.getId()); // if flink sql task, will be rollback sql and dependencies if (application.isFlinkSqlJob()) { effectiveService.saveOrUpdate( - bakParam.getAppId(), EffectiveType.FLINKSQL, bakParam.getSqlId()); + bakParam.getAppId(), EffectiveTypeEnum.FLINKSQL, bakParam.getSqlId()); } } @@ -123,7 +124,7 @@ public void rollback(ApplicationBackUp bakParam) { new UpdateWrapper() .lambda() .eq(Application::getId, application.getId()) - .set(Application::getRelease, ReleaseState.NEED_RESTART.get())); + .set(Application::getRelease, ReleaseStateEnum.NEED_RESTART.get())); } @Override @@ -173,8 +174,8 @@ public void rollbackFlinkSql(Application appParam, FlinkSql flinkSqlParam) { ApiAlertException.throwIfNull( backUp, "Application backup can't be null. Rollback flink sql failed."); // rollback config and sql - effectiveService.saveOrUpdate(backUp.getAppId(), EffectiveType.CONFIG, backUp.getId()); - effectiveService.saveOrUpdate(backUp.getAppId(), EffectiveType.FLINKSQL, backUp.getSqlId()); + effectiveService.saveOrUpdate(backUp.getAppId(), EffectiveTypeEnum.CONFIG, backUp.getId()); + effectiveService.saveOrUpdate(backUp.getAppId(), EffectiveTypeEnum.FLINKSQL, backUp.getSqlId()); } @Override diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/ApplicationConfigServiceImpl.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/ApplicationConfigServiceImpl.java index 56f778b1c5..d5f5da1f24 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/ApplicationConfigServiceImpl.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/ApplicationConfigServiceImpl.java @@ -25,8 +25,8 @@ import org.apache.streampark.console.base.mybatis.pager.MybatisPager; import org.apache.streampark.console.core.entity.Application; import org.apache.streampark.console.core.entity.ApplicationConfig; -import org.apache.streampark.console.core.enums.ConfigFileType; -import org.apache.streampark.console.core.enums.EffectiveType; +import org.apache.streampark.console.core.enums.ConfigFileTypeEnum; +import org.apache.streampark.console.core.enums.EffectiveTypeEnum; import org.apache.streampark.console.core.mapper.ApplicationConfigMapper; import org.apache.streampark.console.core.service.ApplicationConfigService; import org.apache.streampark.console.core.service.EffectiveService; @@ -73,8 +73,8 @@ public synchronized void create(Application application, Boolean latest) { applicationConfig.setAppId(application.getId()); if (application.getFormat() != null) { - ConfigFileType fileType = ConfigFileType.of(application.getFormat()); - if (fileType == null || ConfigFileType.UNKNOWN == fileType) { + ConfigFileTypeEnum fileType = ConfigFileTypeEnum.of(application.getFormat()); + if (fileType == null || ConfigFileTypeEnum.UNKNOWN == fileType) { throw new ApiAlertException( "application' config error. must be (.properties|.yaml|.yml |.conf)"); } @@ -110,7 +110,7 @@ public synchronized void update(Application application, Boolean latest) { ApplicationConfig effectiveConfig = getEffective(application.getId()); if (Utils.isEmpty(application.getConfig())) { if (effectiveConfig != null) { - effectiveService.delete(application.getId(), EffectiveType.CONFIG); + effectiveService.delete(application.getId(), EffectiveTypeEnum.CONFIG); } } else { // there was no configuration before, is a new configuration @@ -180,7 +180,7 @@ public void toEffective(Long appId, Long configId) { LambdaUpdateWrapper updateWrapper = Wrappers.lambdaUpdate(); updateWrapper.eq(ApplicationConfig::getAppId, appId).set(ApplicationConfig::getLatest, false); this.update(updateWrapper); - effectiveService.saveOrUpdate(appId, EffectiveType.CONFIG, configId); + effectiveService.saveOrUpdate(appId, EffectiveTypeEnum.CONFIG, configId); } @Override diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/EffectiveServiceImpl.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/EffectiveServiceImpl.java index f721693eac..12afa4ec84 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/EffectiveServiceImpl.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/EffectiveServiceImpl.java @@ -18,7 +18,7 @@ package org.apache.streampark.console.core.service.impl; import org.apache.streampark.console.core.entity.Effective; -import org.apache.streampark.console.core.enums.EffectiveType; +import org.apache.streampark.console.core.enums.EffectiveTypeEnum; import org.apache.streampark.console.core.mapper.EffectiveMapper; import org.apache.streampark.console.core.service.EffectiveService; @@ -39,25 +39,25 @@ public class EffectiveServiceImpl extends ServiceImpl queryWrapper = new LambdaQueryWrapper() .eq(Effective::getAppId, appId) - .eq(Effective::getTargetType, effectiveType.getType()); + .eq(Effective::getTargetType, effectiveTypeEnum.getType()); baseMapper.delete(queryWrapper); } @Override - public Effective get(Long appId, EffectiveType effectiveType) { + public Effective get(Long appId, EffectiveTypeEnum effectiveTypeEnum) { LambdaQueryWrapper queryWrapper = new LambdaQueryWrapper() .eq(Effective::getAppId, appId) - .eq(Effective::getTargetType, effectiveType.getType()); + .eq(Effective::getTargetType, effectiveTypeEnum.getType()); return this.getOne(queryWrapper); } @Override - public void saveOrUpdate(Long appId, EffectiveType type, Long id) { + public void saveOrUpdate(Long appId, EffectiveTypeEnum type, Long id) { LambdaQueryWrapper queryWrapper = new LambdaQueryWrapper() .eq(Effective::getAppId, appId) diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/ExternalLinkServiceImpl.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/ExternalLinkServiceImpl.java index 4fc211d496..2d78edd460 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/ExternalLinkServiceImpl.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/ExternalLinkServiceImpl.java @@ -20,7 +20,7 @@ import org.apache.streampark.common.util.Utils; import org.apache.streampark.console.core.entity.Application; import org.apache.streampark.console.core.entity.ExternalLink; -import org.apache.streampark.console.core.enums.PlaceholderType; +import org.apache.streampark.console.core.enums.PlaceholderTypeEnum; import org.apache.streampark.console.core.mapper.ExternalLinkMapper; import org.apache.streampark.console.core.service.ExternalLinkService; import org.apache.streampark.console.core.service.application.ApplicationManageService; @@ -86,9 +86,9 @@ public List render(Long appId) { private void renderLinkUrl(ExternalLink link, Application app) { HashMap map = new HashMap(); - map.put(PlaceholderType.JOB_ID.get(), app.getJobId()); - map.put(PlaceholderType.JOB_NAME.get(), app.getJobName()); - map.put(PlaceholderType.YARN_ID.get(), app.getAppId()); + map.put(PlaceholderTypeEnum.JOB_ID.get(), app.getJobId()); + map.put(PlaceholderTypeEnum.JOB_NAME.get(), app.getJobName()); + map.put(PlaceholderTypeEnum.YARN_ID.get(), app.getAppId()); PropertyPlaceholderHelper propertyPlaceholderHelper = new PropertyPlaceholderHelper("{", "}"); link.setRenderedLinkUrl( propertyPlaceholderHelper.replacePlaceholders(link.getLinkUrl().trim(), map::get)); diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/FlinkClusterServiceImpl.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/FlinkClusterServiceImpl.java index d7f62ab083..affe3c2463 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/FlinkClusterServiceImpl.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/FlinkClusterServiceImpl.java @@ -18,8 +18,8 @@ package org.apache.streampark.console.core.service.impl; import org.apache.streampark.common.conf.K8sFlinkConfig; -import org.apache.streampark.common.enums.ClusterState; -import org.apache.streampark.common.enums.ExecutionMode; +import org.apache.streampark.common.enums.ClusterStateEnum; +import org.apache.streampark.common.enums.ExecutionModeEnum; import org.apache.streampark.common.util.ThreadUtils; import org.apache.streampark.common.util.YarnUtils; import org.apache.streampark.console.base.exception.ApiAlertException; @@ -123,13 +123,13 @@ public ResponseResult check(FlinkCluster cluster) { } // 3) Check connection - if (ExecutionMode.isRemoteMode(cluster.getExecutionModeEnum()) + if (ExecutionModeEnum.isRemoteMode(cluster.getExecutionModeEnum()) && !flinkClusterWatcher.verifyClusterConnection(cluster)) { result.setMsg("The remote cluster connection failed, please check!"); result.setStatus(3); return result; } - if (ExecutionMode.isYarnMode(cluster.getExecutionModeEnum()) + if (ExecutionModeEnum.isYarnMode(cluster.getExecutionModeEnum()) && cluster.getClusterId() != null && !flinkClusterWatcher.verifyClusterConnection(cluster)) { result.setMsg("The flink cluster connection failed, please check!"); @@ -152,15 +152,15 @@ public boolean internalCreate(FlinkCluster flinkCluster) { ApiAlertException.throwIfFalse( successful, String.format(ERROR_CLUSTER_QUEUE_HINT, flinkCluster.getYarnQueue())); flinkCluster.setCreateTime(new Date()); - if (ExecutionMode.isRemoteMode(flinkCluster.getExecutionModeEnum())) { - flinkCluster.setClusterState(ClusterState.RUNNING.getState()); + if (ExecutionModeEnum.isRemoteMode(flinkCluster.getExecutionModeEnum())) { + flinkCluster.setClusterState(ClusterStateEnum.RUNNING.getState()); flinkCluster.setStartTime(new Date()); flinkCluster.setEndTime(null); } else { - flinkCluster.setClusterState(ClusterState.CREATED.getState()); + flinkCluster.setClusterState(ClusterStateEnum.CREATED.getState()); } boolean ret = save(flinkCluster); - if (ret && ExecutionMode.isRemoteMode(flinkCluster.getExecutionMode())) { + if (ret && ExecutionModeEnum.isRemoteMode(flinkCluster.getExecutionMode())) { FlinkClusterWatcher.addWatching(flinkCluster); } if (shouldWatchForK8s(flinkCluster)) { @@ -178,7 +178,7 @@ public void start(FlinkCluster cluster) { ApiAlertException.throwIfNull( deployResponse, "Deploy cluster failed, unknown reason,please check you params or StreamPark error log"); - if (ExecutionMode.isYarnSessionMode(flinkCluster.getExecutionModeEnum())) { + if (ExecutionModeEnum.isYarnSessionMode(flinkCluster.getExecutionModeEnum())) { String address = String.format( "%s/proxy/%s/", YarnUtils.getRMWebAppURL(true), deployResponse.clusterId()); @@ -188,7 +188,7 @@ public void start(FlinkCluster cluster) { flinkCluster.setAddress(deployResponse.address()); } flinkCluster.setClusterId(deployResponse.clusterId()); - flinkCluster.setClusterState(ClusterState.RUNNING.getState()); + flinkCluster.setClusterState(ClusterStateEnum.RUNNING.getState()); flinkCluster.setException(null); flinkCluster.setEndTime(null); updateById(flinkCluster); @@ -198,7 +198,7 @@ public void start(FlinkCluster cluster) { } } catch (Exception e) { log.error(e.getMessage(), e); - flinkCluster.setClusterState(ClusterState.FAILED.getState()); + flinkCluster.setClusterState(ClusterStateEnum.FAILED.getState()); flinkCluster.setException(e.toString()); updateById(flinkCluster); throw new ApiDetailException(e); @@ -215,9 +215,9 @@ public void update(FlinkCluster paramOfCluster) { flinkCluster.setClusterName(paramOfCluster.getClusterName()); flinkCluster.setAlertId(paramOfCluster.getAlertId()); flinkCluster.setDescription(paramOfCluster.getDescription()); - if (ExecutionMode.isRemoteMode(flinkCluster.getExecutionModeEnum())) { + if (ExecutionModeEnum.isRemoteMode(flinkCluster.getExecutionModeEnum())) { flinkCluster.setAddress(paramOfCluster.getAddress()); - flinkCluster.setClusterState(ClusterState.RUNNING.getState()); + flinkCluster.setClusterState(ClusterStateEnum.RUNNING.getState()); flinkCluster.setStartTime(new Date()); flinkCluster.setEndTime(null); FlinkClusterWatcher.addWatching(flinkCluster); @@ -249,7 +249,7 @@ public void shutdown(FlinkCluster cluster) { ShutDownResponse shutDownResponse = shutdownInternal(flinkCluster, flinkCluster.getClusterId()); ApiAlertException.throwIfNull(shutDownResponse, "Get shutdown response failed"); - flinkCluster.setClusterState(ClusterState.CANCELED.getState()); + flinkCluster.setClusterState(ClusterStateEnum.CANCELED.getState()); flinkCluster.setEndTime(new Date()); updateById(flinkCluster); FlinkClusterWatcher.unWatching(flinkCluster); @@ -305,19 +305,19 @@ public Boolean existsByFlinkEnvId(Long flinkEnvId) { } @Override - public List getByExecutionModes(Collection executionModes) { + public List getByExecutionModes(Collection executionModeEnums) { return getBaseMapper() .selectList( new LambdaQueryWrapper() .in( FlinkCluster::getExecutionMode, - executionModes.stream() - .map(ExecutionMode::getMode) + executionModeEnums.stream() + .map(ExecutionModeEnum::getMode) .collect(Collectors.toSet()))); } @Override - public void updateClusterState(Long id, ClusterState state) { + public void updateClusterState(Long id, ClusterStateEnum state) { LambdaUpdateWrapper updateWrapper = new LambdaUpdateWrapper() .eq(FlinkCluster::getId, id) @@ -347,10 +347,10 @@ public void delete(FlinkCluster cluster) { FlinkCluster flinkCluster = getById(id); ApiAlertException.throwIfNull(flinkCluster, "Flink cluster not exist, please check."); - if (ExecutionMode.isYarnSessionMode(flinkCluster.getExecutionModeEnum()) - || ExecutionMode.isKubernetesSessionMode(flinkCluster.getExecutionMode())) { + if (ExecutionModeEnum.isYarnSessionMode(flinkCluster.getExecutionModeEnum()) + || ExecutionModeEnum.isKubernetesSessionMode(flinkCluster.getExecutionMode())) { ApiAlertException.throwIfTrue( - ClusterState.isRunning(flinkCluster.getClusterStateEnum()), + ClusterStateEnum.isRunning(flinkCluster.getClusterStateEnum()), "Flink cluster is running, cannot be delete, please check."); } if (shouldWatchForK8s(flinkCluster)) { @@ -393,7 +393,7 @@ public boolean validateQueueIfNeeded(FlinkCluster oldCluster, FlinkCluster newCl return true; } - if (ExecutionMode.isYarnSessionMode(newCluster.getExecutionModeEnum()) + if (ExecutionModeEnum.isYarnSessionMode(newCluster.getExecutionModeEnum()) && StringUtils.equals(oldCluster.getYarnQueue(), newCluster.getYarnQueue())) { return true; } @@ -409,7 +409,7 @@ public boolean validateQueueIfNeeded(FlinkCluster oldCluster, FlinkCluster newCl * default), return true, false else. */ private boolean isYarnNotDefaultQueue(FlinkCluster cluster) { - return ExecutionMode.isYarnSessionMode(cluster.getExecutionModeEnum()) + return ExecutionModeEnum.isYarnSessionMode(cluster.getExecutionModeEnum()) && !yarnQueueService.isDefaultQueue(cluster.getYarnQueue()); } @@ -442,12 +442,12 @@ private DeployResponse deployInternal(FlinkCluster flinkCluster) } private void checkActiveIfNeeded(FlinkCluster flinkCluster) { - if (ExecutionMode.isYarnSessionMode(flinkCluster.getExecutionModeEnum())) { + if (ExecutionModeEnum.isYarnSessionMode(flinkCluster.getExecutionModeEnum())) { ApiAlertException.throwIfFalse( - ClusterState.isRunning(flinkCluster.getClusterStateEnum()), + ClusterStateEnum.isRunning(flinkCluster.getClusterStateEnum()), "Current cluster is not active, please check!"); if (!flinkClusterWatcher.verifyClusterConnection(flinkCluster)) { - flinkCluster.setClusterState(ClusterState.LOST.getState()); + flinkCluster.setClusterState(ClusterStateEnum.LOST.getState()); updateById(flinkCluster); throw new ApiAlertException("Current cluster is not active, please check!"); } @@ -457,7 +457,7 @@ private void checkActiveIfNeeded(FlinkCluster flinkCluster) { @Nullable private KubernetesDeployParam getKubernetesDeployDesc( @Nonnull FlinkCluster flinkCluster, String action) { - ExecutionMode executionModeEnum = flinkCluster.getExecutionModeEnum(); + ExecutionModeEnum executionModeEnum = flinkCluster.getExecutionModeEnum(); switch (executionModeEnum) { case YARN_SESSION: break; @@ -479,6 +479,6 @@ private KubernetesDeployParam getKubernetesDeployDesc( private boolean shouldWatchForK8s(FlinkCluster flinkCluster) { return K8sFlinkConfig.isV2Enabled() - && ExecutionMode.isKubernetesSessionMode(flinkCluster.getExecutionMode()); + && ExecutionModeEnum.isKubernetesSessionMode(flinkCluster.getExecutionMode()); } } diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/FlinkSqlServiceImpl.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/FlinkSqlServiceImpl.java index 0c4abe1955..ded40a9a16 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/FlinkSqlServiceImpl.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/FlinkSqlServiceImpl.java @@ -26,8 +26,8 @@ import org.apache.streampark.console.core.entity.Application; import org.apache.streampark.console.core.entity.FlinkEnv; import org.apache.streampark.console.core.entity.FlinkSql; -import org.apache.streampark.console.core.enums.CandidateType; -import org.apache.streampark.console.core.enums.EffectiveType; +import org.apache.streampark.console.core.enums.CandidateTypeEnum; +import org.apache.streampark.console.core.enums.EffectiveTypeEnum; import org.apache.streampark.console.core.mapper.FlinkSqlMapper; import org.apache.streampark.console.core.service.ApplicationBackUpService; import org.apache.streampark.console.core.service.EffectiveService; @@ -102,20 +102,20 @@ public void create(FlinkSql flinkSql) { String sql = DeflaterUtils.zipString(flinkSql.getSql()); flinkSql.setSql(sql); this.save(flinkSql); - this.setCandidate(CandidateType.NEW, flinkSql.getAppId(), flinkSql.getId()); + this.setCandidate(CandidateTypeEnum.NEW, flinkSql.getAppId(), flinkSql.getId()); } @Override - public void setCandidate(CandidateType candidateType, Long appId, Long sqlId) { + public void setCandidate(CandidateTypeEnum candidateTypeEnum, Long appId, Long sqlId) { this.update( new LambdaUpdateWrapper() .eq(FlinkSql::getAppId, appId) - .set(FlinkSql::getCandidate, CandidateType.NONE.get())); + .set(FlinkSql::getCandidate, CandidateTypeEnum.NONE.get())); this.update( new LambdaUpdateWrapper() .eq(FlinkSql::getId, sqlId) - .set(FlinkSql::getCandidate, candidateType.get())); + .set(FlinkSql::getCandidate, candidateTypeEnum.get())); } @Override @@ -139,20 +139,20 @@ public List history(Application application) { } @Override - public FlinkSql getCandidate(Long appId, CandidateType candidateType) { + public FlinkSql getCandidate(Long appId, CandidateTypeEnum candidateTypeEnum) { LambdaQueryWrapper queryWrapper = new LambdaQueryWrapper().eq(FlinkSql::getAppId, appId); - if (candidateType == null) { - queryWrapper.gt(FlinkSql::getCandidate, CandidateType.NONE.get()); + if (candidateTypeEnum == null) { + queryWrapper.gt(FlinkSql::getCandidate, CandidateTypeEnum.NONE.get()); } else { - queryWrapper.eq(FlinkSql::getCandidate, candidateType.get()); + queryWrapper.eq(FlinkSql::getCandidate, candidateTypeEnum.get()); } return baseMapper.selectOne(queryWrapper); } @Override public void toEffective(Long appId, Long sqlId) { - effectiveService.saveOrUpdate(appId, EffectiveType.FLINKSQL, sqlId); + effectiveService.saveOrUpdate(appId, EffectiveTypeEnum.FLINKSQL, sqlId); } @Override @@ -160,7 +160,7 @@ public void cleanCandidate(Long id) { this.update( new LambdaUpdateWrapper() .eq(FlinkSql::getId, id) - .set(FlinkSql::getCandidate, CandidateType.NONE.get())); + .set(FlinkSql::getCandidate, CandidateTypeEnum.NONE.get())); } @Override @@ -173,7 +173,7 @@ public void removeApp(Long appId) { @Override @Transactional(propagation = Propagation.REQUIRES_NEW, rollbackFor = Exception.class) public void rollback(Application application) { - FlinkSql sql = getCandidate(application.getId(), CandidateType.HISTORY); + FlinkSql sql = getCandidate(application.getId(), CandidateTypeEnum.HISTORY); Utils.notNull(sql); try { // check and backup current job diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/MessageServiceImpl.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/MessageServiceImpl.java index 5a60fd9686..128c625a58 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/MessageServiceImpl.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/MessageServiceImpl.java @@ -20,7 +20,7 @@ import org.apache.streampark.console.base.domain.RestRequest; import org.apache.streampark.console.base.mybatis.pager.MybatisPager; import org.apache.streampark.console.core.entity.Message; -import org.apache.streampark.console.core.enums.NoticeType; +import org.apache.streampark.console.core.enums.NoticeTypeEnum; import org.apache.streampark.console.core.mapper.MessageMapper; import org.apache.streampark.console.core.service.MessageService; import org.apache.streampark.console.core.websocket.WebSocketEndpoint; @@ -47,13 +47,13 @@ public void push(Message message) { } @Override - public IPage getUnRead(NoticeType noticeType, RestRequest request) { + public IPage getUnRead(NoticeTypeEnum noticeTypeEnum, RestRequest request) { Page page = new MybatisPager().getDefaultPage(request); LambdaQueryWrapper queryWrapper = new LambdaQueryWrapper() .eq(Message::getIsRead, false) .orderByDesc(Message::getCreateTime) - .eq(Message::getType, noticeType.get()); + .eq(Message::getType, noticeTypeEnum.get()); return this.baseMapper.selectPage(page, queryWrapper); } } diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/ProjectServiceImpl.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/ProjectServiceImpl.java index 416e162b86..0970b769d9 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/ProjectServiceImpl.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/ProjectServiceImpl.java @@ -32,9 +32,9 @@ import org.apache.streampark.console.base.util.GZipUtils; import org.apache.streampark.console.core.entity.Application; import org.apache.streampark.console.core.entity.Project; -import org.apache.streampark.console.core.enums.BuildState; -import org.apache.streampark.console.core.enums.GitCredential; -import org.apache.streampark.console.core.enums.ReleaseState; +import org.apache.streampark.console.core.enums.BuildStateEnum; +import org.apache.streampark.console.core.enums.GitCredentialEnum; +import org.apache.streampark.console.core.enums.ReleaseStateEnum; import org.apache.streampark.console.core.mapper.ProjectMapper; import org.apache.streampark.console.core.service.ProjectService; import org.apache.streampark.console.core.service.application.ApplicationManageService; @@ -122,7 +122,7 @@ public boolean update(Project projectParam) { project.getTeamId().equals(projectParam.getTeamId()), "TeamId can't be changed, update project failed."); ApiAlertException.throwIfFalse( - !project.getBuildState().equals(BuildState.BUILDING.get()), + !project.getBuildState().equals(BuildStateEnum.BUILDING.get()), "The project is being built, update project failed."); project.setName(projectParam.getName()); project.setUrl(projectParam.getUrl()); @@ -134,21 +134,21 @@ public boolean update(Project projectParam) { project.setPom(projectParam.getPom()); project.setDescription(projectParam.getDescription()); project.setBuildArgs(projectParam.getBuildArgs()); - if (GitCredential.isSSH(project.getGitCredential())) { + if (GitCredentialEnum.isSSH(project.getGitCredential())) { project.setUserName(null); } else { project.setPrvkeyPath(null); } if (projectParam.getBuildState() != null) { project.setBuildState(projectParam.getBuildState()); - if (BuildState.NEED_REBUILD == BuildState.of(projectParam.getBuildState())) { + if (BuildStateEnum.NEED_REBUILD == BuildStateEnum.of(projectParam.getBuildState())) { List applications = getApplications(project); // Update deployment status applications.forEach( (app) -> { log.info( "update deploy by project: {}, appName:{}", project.getName(), app.getJobName()); - app.setRelease(ReleaseState.NEED_CHECK.get()); + app.setRelease(ReleaseStateEnum.NEED_CHECK.get()); applicationManageService.updateRelease(app); }); } @@ -196,15 +196,15 @@ public List findByTeamId(Long teamId) { @Override public void build(Long id) throws Exception { Project project = getById(id); - this.baseMapper.updateBuildState(project.getId(), BuildState.BUILDING.get()); + this.baseMapper.updateBuildState(project.getId(), BuildStateEnum.BUILDING.get()); String logPath = getBuildLogPath(id); ProjectBuildTask projectBuildTask = new ProjectBuildTask( logPath, project, - buildState -> { - baseMapper.updateBuildState(id, buildState.get()); - if (buildState == BuildState.SUCCESSFUL) { + buildStateEnum -> { + baseMapper.updateBuildState(id, buildStateEnum.get()); + if (buildStateEnum == BuildStateEnum.SUCCESSFUL) { baseMapper.updateBuildTime(id); } flinkAppHttpWatcher.init(); @@ -218,7 +218,7 @@ public void build(Long id) throws Exception { "update deploy by project: {}, appName:{}", project.getName(), app.getJobName()); - app.setRelease(ReleaseState.NEED_RELEASE.get()); + app.setRelease(ReleaseStateEnum.NEED_RELEASE.get()); app.setBuild(true); this.applicationManageService.updateRelease(app); }); @@ -235,7 +235,7 @@ public List modules(Long id) { Project project = getById(id); Utils.notNull(project); - if (BuildState.SUCCESSFUL != BuildState.of(project.getBuildState()) + if (BuildStateEnum.SUCCESSFUL != BuildStateEnum.of(project.getBuildState()) || !project.getDistHome().exists()) { return Collections.emptyList(); } diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/ResourceServiceImpl.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/ResourceServiceImpl.java index 30fb4fdf89..3a5e885ff3 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/ResourceServiceImpl.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/ResourceServiceImpl.java @@ -35,7 +35,7 @@ import org.apache.streampark.console.core.entity.Application; import org.apache.streampark.console.core.entity.FlinkSql; import org.apache.streampark.console.core.entity.Resource; -import org.apache.streampark.console.core.enums.ResourceType; +import org.apache.streampark.console.core.enums.ResourceTypeEnum; import org.apache.streampark.console.core.mapper.ResourceMapper; import org.apache.streampark.console.core.service.CommonService; import org.apache.streampark.console.core.service.FlinkSqlService; @@ -131,13 +131,13 @@ public void addResource(Resource resource) throws Exception { jars.isEmpty() && poms.isEmpty(), "Please add pom or jar resource."); ApiAlertException.throwIfTrue( - resource.getResourceType() == ResourceType.FLINK_APP && jars.isEmpty(), + resource.getResourceTypeEnum() == ResourceTypeEnum.FLINK_APP && jars.isEmpty(), "Please upload jar for Flink_App resource"); ApiAlertException.throwIfTrue( jars.size() + poms.size() > 1, "Please do not add multi dependency at one time."); - if (resource.getResourceType() != ResourceType.CONNECTOR) { + if (resource.getResourceTypeEnum() != ResourceTypeEnum.CONNECTOR) { ApiAlertException.throwIfNull(resource.getResourceName(), "The resourceName is required."); } else { String connector = resource.getConnector(); @@ -273,7 +273,7 @@ public String upload(MultipartFile file) throws IOException { @Override public RestResponse checkResource(Resource resourceParam) throws JsonProcessingException { - ResourceType type = resourceParam.getResourceType(); + ResourceTypeEnum type = resourceParam.getResourceTypeEnum(); Map resp = new HashMap<>(0); resp.put("state", 0); switch (type) { @@ -305,7 +305,7 @@ public RestResponse checkResource(Resource resourceParam) throws JsonProcessingE FlinkConnector connectorResource; ApiAlertException.throwIfFalse( - ResourceType.CONNECTOR == resourceParam.getResourceType(), + ResourceTypeEnum.CONNECTOR == resourceParam.getResourceTypeEnum(), "getConnectorId method error, resource not flink connector."); List jars; diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/SavePointServiceImpl.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/SavePointServiceImpl.java index 526ef2565c..d8aa71a9c1 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/SavePointServiceImpl.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/SavePointServiceImpl.java @@ -17,7 +17,7 @@ package org.apache.streampark.console.core.service.impl; -import org.apache.streampark.common.enums.ExecutionMode; +import org.apache.streampark.common.enums.ExecutionModeEnum; import org.apache.streampark.common.util.CompletableFutureUtils; import org.apache.streampark.common.util.ExceptionUtils; import org.apache.streampark.common.util.ThreadUtils; @@ -34,9 +34,9 @@ import org.apache.streampark.console.core.entity.FlinkCluster; import org.apache.streampark.console.core.entity.FlinkEnv; import org.apache.streampark.console.core.entity.SavePoint; -import org.apache.streampark.console.core.enums.CheckPointType; -import org.apache.streampark.console.core.enums.Operation; -import org.apache.streampark.console.core.enums.OptionState; +import org.apache.streampark.console.core.enums.CheckPointTypeEnum; +import org.apache.streampark.console.core.enums.OperationEnum; +import org.apache.streampark.console.core.enums.OptionStateEnum; import org.apache.streampark.console.core.mapper.SavePointMapper; import org.apache.streampark.console.core.service.ApplicationConfigService; import org.apache.streampark.console.core.service.ApplicationLogService; @@ -84,7 +84,7 @@ import static org.apache.flink.configuration.CheckpointingOptions.MAX_RETAINED_CHECKPOINTS; import static org.apache.flink.configuration.CheckpointingOptions.SAVEPOINT_DIRECTORY; import static org.apache.streampark.common.util.PropertiesUtils.extractDynamicPropertiesAsJava; -import static org.apache.streampark.console.core.enums.CheckPointType.CHECKPOINT; +import static org.apache.streampark.console.core.enums.CheckPointTypeEnum.CHECKPOINT; @Slf4j @Service @@ -171,7 +171,7 @@ public void trigger(Long appId, @Nullable String savepointPath, @Nullable Boolea Application application = applicationManageService.getById(appId); ApplicationLog applicationLog = new ApplicationLog(); - applicationLog.setOptionName(Operation.SAVEPOINT.getValue()); + applicationLog.setOptionName(OperationEnum.SAVEPOINT.getValue()); applicationLog.setAppId(application.getId()); applicationLog.setJobManagerUrl(application.getJobManagerUrl()); applicationLog.setOptionTime(new Date()); @@ -179,7 +179,7 @@ public void trigger(Long appId, @Nullable String savepointPath, @Nullable Boolea FlinkAppHttpWatcher.addSavepoint(application.getId()); - application.setOptionState(OptionState.SAVEPOINTING.getValue()); + application.setOptionState(OptionStateEnum.SAVEPOINTING.getValue()); application.setOptionTime(new Date()); this.applicationManageService.updateById(application); flinkAppHttpWatcher.init(); @@ -264,7 +264,7 @@ private void handleSavepointResponseFuture( .whenComplete( (t, e) -> { applicationLogService.save(applicationLog); - application.setOptionState(OptionState.NONE.getValue()); + application.setOptionState(OptionStateEnum.NONE.getValue()); application.setOptionTime(new Date()); applicationManageService.update(application); flinkAppHttpWatcher.init(); @@ -290,7 +290,7 @@ private String getFinalSavepointDir(@Nullable String savepointPath, Application private Map tryGetRestProps(Application application, FlinkCluster cluster) { Map properties = new HashMap<>(); - if (ExecutionMode.isRemoteMode(application.getExecutionModeEnum())) { + if (ExecutionModeEnum.isRemoteMode(application.getExecutionModeEnum())) { Utils.notNull( cluster, String.format( @@ -304,10 +304,10 @@ private Map tryGetRestProps(Application application, FlinkCluste } private String getClusterId(Application application, FlinkCluster cluster) { - if (ExecutionMode.isKubernetesMode(application.getExecutionMode())) { + if (ExecutionModeEnum.isKubernetesMode(application.getExecutionMode())) { return application.getClusterId(); - } else if (ExecutionMode.isYarnMode(application.getExecutionMode())) { - if (ExecutionMode.YARN_SESSION == application.getExecutionModeEnum()) { + } else if (ExecutionModeEnum.isYarnMode(application.getExecutionMode())) { + if (ExecutionModeEnum.YARN_SESSION == application.getExecutionModeEnum()) { Utils.notNull( cluster, String.format( @@ -368,7 +368,7 @@ public String getSavepointFromAppCfgIfStreamParkOrSQLJob(Application application public String getSavepointFromDeployLayer(Application application) throws JsonProcessingException { // At the yarn or k8s mode, then read the savepoint in flink-conf.yml in the bound flink - if (!ExecutionMode.isRemoteMode(application.getExecutionMode())) { + if (!ExecutionModeEnum.isRemoteMode(application.getExecutionMode())) { FlinkEnv flinkEnv = flinkEnvService.getById(application.getVersionId()); return flinkEnv.convertFlinkYamlAsMap().get(SAVEPOINT_DIRECTORY.key()); } @@ -446,7 +446,8 @@ private void expire(SavePoint entity) { int cpThreshold = tryGetChkNumRetainedFromDynamicProps(application.getDynamicProperties()) .orElse(getChkNumRetainedFromFlinkEnv(flinkEnv, application)); - cpThreshold = CHECKPOINT == CheckPointType.of(entity.getType()) ? cpThreshold - 1 : cpThreshold; + cpThreshold = + CHECKPOINT == CheckPointTypeEnum.of(entity.getType()) ? cpThreshold - 1 : cpThreshold; if (cpThreshold == 0) { LambdaQueryWrapper queryWrapper = diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/SqlWorkBenchServiceImpl.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/SqlWorkBenchServiceImpl.java index 3b7be04aeb..79ac65ca15 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/SqlWorkBenchServiceImpl.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/SqlWorkBenchServiceImpl.java @@ -17,7 +17,7 @@ package org.apache.streampark.console.core.service.impl; -import org.apache.streampark.common.enums.ExecutionMode; +import org.apache.streampark.common.enums.ExecutionModeEnum; import org.apache.streampark.common.util.HadoopConfigUtils; import org.apache.streampark.console.core.entity.FlinkCluster; import org.apache.streampark.console.core.entity.FlinkEnv; @@ -27,7 +27,7 @@ import org.apache.streampark.console.core.service.FlinkGateWayService; import org.apache.streampark.console.core.service.SqlWorkBenchService; import org.apache.streampark.flink.kubernetes.KubernetesRetriever; -import org.apache.streampark.flink.kubernetes.enums.FlinkK8sExecuteMode; +import org.apache.streampark.flink.kubernetes.enums.FlinkK8sExecuteModeEnum; import org.apache.streampark.flink.kubernetes.ingress.IngressController; import org.apache.streampark.gateway.OperationHandle; import org.apache.streampark.gateway.factories.FactoryUtil; @@ -55,10 +55,6 @@ import java.util.Objects; import java.util.UUID; -import static org.apache.streampark.common.enums.ExecutionMode.KUBERNETES_NATIVE_SESSION; -import static org.apache.streampark.common.enums.ExecutionMode.REMOTE; -import static org.apache.streampark.common.enums.ExecutionMode.YARN_SESSION; - @Slf4j @Service @RequiredArgsConstructor @@ -103,13 +99,13 @@ public SessionHandle openSession(Long flinkGatewayId, Long flinkClusterId) { String port = String.valueOf(remoteURI.getPort()); String clusterId = flinkCluster.getClusterId(); - ExecutionMode executionMode = ExecutionMode.of(flinkCluster.getExecutionMode()); - if (executionMode == null) { + ExecutionModeEnum executionModeEnum = ExecutionModeEnum.of(flinkCluster.getExecutionMode()); + if (executionModeEnum == null) { throw new IllegalArgumentException("executionMode is null"); } - streamParkConf.put("execution.target", executionMode.getName()); - switch (Objects.requireNonNull(executionMode)) { + streamParkConf.put("execution.target", executionModeEnum.getName()); + switch (Objects.requireNonNull(executionModeEnum)) { case REMOTE: streamParkConf.put("rest.address", host); streamParkConf.put("rest.port", port); @@ -125,7 +121,7 @@ public SessionHandle openSession(Long flinkGatewayId, Long flinkClusterId) { try (ClusterClient clusterClient = (ClusterClient) KubernetesRetriever.newFinkClusterClient( - clusterId, k8sNamespace, FlinkK8sExecuteMode.of(executionMode))) { + clusterId, k8sNamespace, FlinkK8sExecuteModeEnum.of(executionModeEnum))) { restAddress = IngressController.ingressUrlAddress(k8sNamespace, clusterId, clusterClient); } catch (Exception e) { throw new IllegalArgumentException("get k8s rest address error", e); @@ -137,7 +133,7 @@ public SessionHandle openSession(Long flinkGatewayId, Long flinkClusterId) { streamParkConf.put("rest.address", restAddress); break; default: - throw new IllegalArgumentException("Unsupported execution mode: " + executionMode); + throw new IllegalArgumentException("Unsupported execution mode: " + executionModeEnum); } return sqlGateWayService.openSession( diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/VariableServiceImpl.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/VariableServiceImpl.java index 830fb3de16..e4de5d58f6 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/VariableServiceImpl.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/VariableServiceImpl.java @@ -24,7 +24,7 @@ import org.apache.streampark.console.core.entity.Application; import org.apache.streampark.console.core.entity.FlinkSql; import org.apache.streampark.console.core.entity.Variable; -import org.apache.streampark.console.core.enums.ReleaseState; +import org.apache.streampark.console.core.enums.ReleaseStateEnum; import org.apache.streampark.console.core.mapper.VariableMapper; import org.apache.streampark.console.core.service.CommonService; import org.apache.streampark.console.core.service.FlinkSqlService; @@ -142,7 +142,7 @@ public void updateVariable(Variable variable) { .in( Application::getId, applications.stream().map(Application::getId).collect(Collectors.toList())) - .set(Application::getRelease, ReleaseState.NEED_RESTART.get())); + .set(Application::getRelease, ReleaseStateEnum.NEED_RESTART.get())); } } diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/YarnQueueServiceImpl.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/YarnQueueServiceImpl.java index ef9dd4f189..1eed756256 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/YarnQueueServiceImpl.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/service/impl/YarnQueueServiceImpl.java @@ -17,7 +17,7 @@ package org.apache.streampark.console.core.service.impl; -import org.apache.streampark.common.enums.ExecutionMode; +import org.apache.streampark.common.enums.ExecutionModeEnum; import org.apache.streampark.common.util.Utils; import org.apache.streampark.console.base.domain.RestRequest; import org.apache.streampark.console.base.exception.ApiAlertException; @@ -172,12 +172,12 @@ public void deleteYarnQueue(YarnQueue yarnQueue) { * Only check the validation of queue-labelExpression when using yarn application or yarn-session * mode or yarn-perjob mode. * - * @param executionMode execution mode. + * @param executionModeEnum execution mode. * @param queueLabel queueLabel expression. */ @Override - public void checkQueueLabel(ExecutionMode executionMode, String queueLabel) { - if (ExecutionMode.isYarnMode(executionMode)) { + public void checkQueueLabel(ExecutionModeEnum executionModeEnum, String queueLabel) { + if (ExecutionModeEnum.isYarnMode(executionModeEnum)) { ApiAlertException.throwIfFalse(isValid(queueLabel, true), ERR_FORMAT_HINTS); } } @@ -217,7 +217,7 @@ public YarnQueue getYarnQueueByIdWithPreconditions(YarnQueue yarnQueue) { public void checkNotReferencedByFlinkClusters( @Nonnull String queueLabel, @Nonnull String operation) { List clustersReferenceYarnQueueLabel = - flinkClusterService.getByExecutionModes(Sets.newHashSet(ExecutionMode.YARN_SESSION)) + flinkClusterService.getByExecutionModes(Sets.newHashSet(ExecutionModeEnum.YARN_SESSION)) .stream() .filter(flinkCluster -> StringUtils.equals(flinkCluster.getYarnQueue(), queueLabel)) .collect(Collectors.toList()); @@ -232,7 +232,8 @@ public void checkNotReferencedByApplications( List appsReferenceQueueLabel = applicationManageService .getByTeamIdAndExecutionModes( - teamId, Sets.newHashSet(ExecutionMode.YARN_APPLICATION, ExecutionMode.YARN_PER_JOB)) + teamId, + Sets.newHashSet(ExecutionModeEnum.YARN_APPLICATION, ExecutionModeEnum.YARN_PER_JOB)) .stream() .filter( application -> { diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/task/FlinkAppHttpWatcher.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/task/FlinkAppHttpWatcher.java index ce034f4048..ff334915db 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/task/FlinkAppHttpWatcher.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/task/FlinkAppHttpWatcher.java @@ -17,7 +17,7 @@ package org.apache.streampark.console.core.task; -import org.apache.streampark.common.enums.ExecutionMode; +import org.apache.streampark.common.enums.ExecutionModeEnum; import org.apache.streampark.common.util.HttpClientUtils; import org.apache.streampark.common.util.ThreadUtils; import org.apache.streampark.common.util.YarnUtils; @@ -25,10 +25,10 @@ import org.apache.streampark.console.core.bean.AlertTemplate; import org.apache.streampark.console.core.entity.Application; import org.apache.streampark.console.core.entity.FlinkCluster; -import org.apache.streampark.console.core.enums.FlinkAppState; -import org.apache.streampark.console.core.enums.OptionState; -import org.apache.streampark.console.core.enums.ReleaseState; -import org.apache.streampark.console.core.enums.StopFrom; +import org.apache.streampark.console.core.enums.FlinkAppStateEnum; +import org.apache.streampark.console.core.enums.OptionStateEnum; +import org.apache.streampark.console.core.enums.ReleaseStateEnum; +import org.apache.streampark.console.core.enums.StopFromEnum; import org.apache.streampark.console.core.metrics.flink.CheckPoints; import org.apache.streampark.console.core.metrics.flink.JobsOverview; import org.apache.streampark.console.core.metrics.flink.Overview; @@ -132,7 +132,7 @@ public class FlinkAppHttpWatcher { * and needs to be manually specified when started again. * */ - private static final Map STOP_FROM_MAP = new ConcurrentHashMap<>(0); + private static final Map STOP_FROM_MAP = new ConcurrentHashMap<>(0); /** * Cancelling tasks are placed in this cache with an expiration time of 10 seconds (the time of 2 @@ -148,7 +148,7 @@ public class FlinkAppHttpWatcher { private static final Map FLINK_CLUSTER_MAP = new ConcurrentHashMap<>(0); - private static final Map OPTIONING = new ConcurrentHashMap<>(0); + private static final Map OPTIONING = new ConcurrentHashMap<>(0); private Long lastWatchTime = 0L; @@ -172,8 +172,8 @@ public void init() { applicationManageService.list( new LambdaQueryWrapper() .eq(Application::getTracking, 1) - .ne(Application::getState, FlinkAppState.LOST.getValue()) - .notIn(Application::getExecutionMode, ExecutionMode.getKubernetesMode())); + .ne(Application::getState, FlinkAppStateEnum.LOST.getValue()) + .notIn(Application::getExecutionMode, ExecutionModeEnum.getKubernetesMode())); applications.forEach( (app) -> { WATCHING_APPS.put(app.getId(), app); @@ -209,7 +209,7 @@ public void start() { } @VisibleForTesting - public @Nullable FlinkAppState tryQueryFlinkAppState(@Nonnull Long appId) { + public @Nullable FlinkAppStateEnum tryQueryFlinkAppState(@Nonnull Long appId) { Application app = WATCHING_APPS.get(appId); return (app == null || app.getState() == null) ? null : app.getStateEnum(); } @@ -231,8 +231,8 @@ private void watch(Long id, Application application) { }); } - private StopFrom getAppStopFrom(Long appId) { - return STOP_FROM_MAP.getOrDefault(appId, StopFrom.NONE); + private StopFromEnum getAppStopFrom(Long appId) { + return STOP_FROM_MAP.getOrDefault(appId, StopFromEnum.NONE); } /** @@ -244,8 +244,9 @@ private StopFrom getAppStopFrom(Long appId) { private void getStateFromFlink(Application application) throws Exception { JobsOverview jobsOverview = httpJobsOverview(application); Optional optional; - ExecutionMode execMode = application.getExecutionModeEnum(); - if (ExecutionMode.YARN_APPLICATION == execMode || ExecutionMode.YARN_PER_JOB == execMode) { + ExecutionModeEnum execMode = application.getExecutionModeEnum(); + if (ExecutionModeEnum.YARN_APPLICATION == execMode + || ExecutionModeEnum.YARN_PER_JOB == execMode) { optional = !jobsOverview.getJobs().isEmpty() ? jobsOverview.getJobs().stream() @@ -261,9 +262,9 @@ private void getStateFromFlink(Application application) throws Exception { if (optional.isPresent()) { JobsOverview.Job jobOverview = optional.get(); - FlinkAppState currentState = FlinkAppState.of(jobOverview.getState()); + FlinkAppStateEnum currentState = FlinkAppStateEnum.of(jobOverview.getState()); - if (FlinkAppState.OTHER != currentState) { + if (FlinkAppStateEnum.OTHER != currentState) { try { // 1) set info from JobOverview handleJobOverview(application, jobOverview); @@ -277,11 +278,11 @@ private void getStateFromFlink(Application application) throws Exception { log.error("get flink jobOverview error: {}", e.getMessage(), e); } // 3) savePoint obsolete check and NEED_START check - OptionState optionState = OPTIONING.get(application.getId()); - if (FlinkAppState.RUNNING == currentState) { - handleRunningState(application, optionState, currentState); + OptionStateEnum optionStateEnum = OPTIONING.get(application.getId()); + if (FlinkAppStateEnum.RUNNING == currentState) { + handleRunningState(application, optionStateEnum, currentState); } else { - handleNotRunState(application, optionState, currentState); + handleNotRunState(application, optionStateEnum, currentState); } } } @@ -294,68 +295,68 @@ private void getStateFromFlink(Application application) throws Exception { * @param application application */ private void getStateFromYarn(Application application) throws Exception { - OptionState optionState = OPTIONING.get(application.getId()); + OptionStateEnum optionStateEnum = OPTIONING.get(application.getId()); /* If the status of the last time is CANCELING (flink rest server is not closed at the time of getting information) and the status is not obtained this time (flink rest server is closed), the task is considered CANCELED */ Byte flag = CANCELING_CACHE.getIfPresent(application.getId()); - StopFrom stopFrom = getAppStopFrom(application.getId()); + StopFromEnum stopFromEnum = getAppStopFrom(application.getId()); if (flag != null) { log.info("FlinkAppHttpWatcher previous state: canceling."); - if (stopFrom.isNone()) { + if (stopFromEnum.isNone()) { log.error( "FlinkAppHttpWatcher query previous state was canceling and stopFrom NotFound,savePoint expired!"); savePointService.expire(application.getId()); } - application.setState(FlinkAppState.CANCELED.getValue()); + application.setState(FlinkAppStateEnum.CANCELED.getValue()); cleanSavepoint(application); - cleanOptioning(optionState, application.getId()); + cleanOptioning(optionStateEnum, application.getId()); doPersistMetrics(application, true); } else { // query the status from the yarn rest Api YarnAppInfo yarnAppInfo = httpYarnAppInfo(application); if (yarnAppInfo == null) { - if (ExecutionMode.REMOTE != application.getExecutionModeEnum()) { + if (ExecutionModeEnum.REMOTE != application.getExecutionModeEnum()) { throw new RuntimeException("FlinkAppHttpWatcher getStateFromYarn failed "); } } else { try { String state = yarnAppInfo.getApp().getFinalStatus(); - FlinkAppState flinkAppState = FlinkAppState.of(state); - if (FlinkAppState.OTHER == flinkAppState) { + FlinkAppStateEnum flinkAppStateEnum = FlinkAppStateEnum.of(state); + if (FlinkAppStateEnum.OTHER == flinkAppStateEnum) { return; } - if (FlinkAppState.KILLED == flinkAppState) { - if (stopFrom.isNone()) { + if (FlinkAppStateEnum.KILLED == flinkAppStateEnum) { + if (stopFromEnum.isNone()) { log.error( "FlinkAppHttpWatcher getStateFromYarn,job was killed and stopFrom NotFound,savePoint expired!"); savePointService.expire(application.getId()); } - flinkAppState = FlinkAppState.CANCELED; + flinkAppStateEnum = FlinkAppStateEnum.CANCELED; cleanSavepoint(application); application.setEndTime(new Date()); } - if (FlinkAppState.SUCCEEDED == flinkAppState) { - flinkAppState = FlinkAppState.FINISHED; + if (FlinkAppStateEnum.SUCCEEDED == flinkAppStateEnum) { + flinkAppStateEnum = FlinkAppStateEnum.FINISHED; } - application.setState(flinkAppState.getValue()); - cleanOptioning(optionState, application.getId()); + application.setState(flinkAppStateEnum.getValue()); + cleanOptioning(optionStateEnum, application.getId()); doPersistMetrics(application, true); - if (FlinkAppState.FAILED == flinkAppState - || FlinkAppState.LOST == flinkAppState - || (FlinkAppState.CANCELED == flinkAppState && stopFrom.isNone()) + if (FlinkAppStateEnum.FAILED == flinkAppStateEnum + || FlinkAppStateEnum.LOST == flinkAppStateEnum + || (FlinkAppStateEnum.CANCELED == flinkAppStateEnum && stopFromEnum.isNone()) || applicationInfoService.checkAlter(application)) { - doAlert(application, flinkAppState); + doAlert(application, flinkAppStateEnum); stopCanceledJob(application.getId()); - if (FlinkAppState.FAILED == flinkAppState) { + if (FlinkAppStateEnum.FAILED == flinkAppStateEnum) { applicationActionService.start(application, true); } } } catch (Exception e) { - if (ExecutionMode.REMOTE != application.getExecutionModeEnum()) { + if (ExecutionModeEnum.REMOTE != application.getExecutionModeEnum()) { throw new RuntimeException("FlinkAppHttpWatcher getStateFromYarn error,", e); } } @@ -368,19 +369,19 @@ private void doStateFailed(Application application) { Query from flink's restAPI and yarn's restAPI both failed. In this case, it is necessary to decide whether to return to the final state depending on the state being operated */ - final OptionState optionState = OPTIONING.get(application.getId()); - if (OptionState.STARTING != optionState) { + final OptionStateEnum optionStateEnum = OPTIONING.get(application.getId()); + if (OptionStateEnum.STARTING != optionStateEnum) { // non-mapping - if (application.getStateEnum() != FlinkAppState.MAPPING) { + if (application.getStateEnum() != FlinkAppStateEnum.MAPPING) { log.error( "FlinkAppHttpWatcher getStateFromFlink and getStateFromYARN error,job failed, savePoint expired!"); - StopFrom stopFrom = getAppStopFrom(application.getId()); - if (stopFrom.isNone()) { + StopFromEnum stopFromEnum = getAppStopFrom(application.getId()); + if (stopFromEnum.isNone()) { savePointService.expire(application.getId()); - application.setState(FlinkAppState.LOST.getValue()); - doAlert(application, FlinkAppState.LOST); + application.setState(FlinkAppStateEnum.LOST.getValue()); + doAlert(application, FlinkAppStateEnum.LOST); } else { - application.setState(FlinkAppState.CANCELED.getValue()); + application.setState(FlinkAppStateEnum.CANCELED.getValue()); } } /* @@ -390,12 +391,12 @@ private void doStateFailed(Application application) { */ application.setEndTime(new Date()); cleanSavepoint(application); - cleanOptioning(optionState, application.getId()); + cleanOptioning(optionStateEnum, application.getId()); doPersistMetrics(application, true); - FlinkAppState appState = application.getStateEnum(); - if (FlinkAppState.FAILED == appState || FlinkAppState.LOST == appState) { + FlinkAppStateEnum appState = application.getStateEnum(); + if (FlinkAppStateEnum.FAILED == appState || FlinkAppStateEnum.LOST == appState) { doAlert(application, application.getStateEnum()); - if (FlinkAppState.FAILED == appState) { + if (FlinkAppStateEnum.FAILED == appState) { try { applicationActionService.start(application, true); } catch (Exception e) { @@ -455,11 +456,11 @@ private void handleCheckPoints(Application application) throws Exception { * Handle running task * * @param application application - * @param optionState optionState + * @param optionStateEnum optionState * @param currentState currentState */ private void handleRunningState( - Application application, OptionState optionState, FlinkAppState currentState) { + Application application, OptionStateEnum optionStateEnum, FlinkAppStateEnum currentState) { /* if the last recorded state is STARTING and the latest state obtained this time is RUNNING, which means it is the first tracking after restart. @@ -469,16 +470,16 @@ private void handleRunningState( NEED_RESTART_AFTER_ROLLBACK (Need to restart after rollback) NEED_RESTART_AFTER_DEPLOY (Need to rollback after deploy) */ - if (OptionState.STARTING == optionState) { + if (OptionStateEnum.STARTING == optionStateEnum) { Application latestApp = WATCHING_APPS.get(application.getId()); - ReleaseState releaseState = latestApp.getReleaseState(); - switch (releaseState) { + ReleaseStateEnum releaseStateEnum = latestApp.getReleaseState(); + switch (releaseStateEnum) { case NEED_RESTART: case NEED_ROLLBACK: LambdaUpdateWrapper updateWrapper = new LambdaUpdateWrapper() .eq(Application::getId, application.getId()) - .set(Application::getRelease, ReleaseState.DONE.get()); + .set(Application::getRelease, ReleaseStateEnum.DONE.get()); applicationManageService.update(updateWrapper); break; default: @@ -489,17 +490,17 @@ private void handleRunningState( // The current state is running, and there is a current task in the savePointCache, // indicating that the task is doing savepoint if (SAVEPOINT_CACHE.getIfPresent(application.getId()) != null) { - application.setOptionState(OptionState.SAVEPOINTING.getValue()); + application.setOptionState(OptionStateEnum.SAVEPOINTING.getValue()); } else { - application.setOptionState(OptionState.NONE.getValue()); + application.setOptionState(OptionStateEnum.NONE.getValue()); } application.setState(currentState.getValue()); doPersistMetrics(application, false); - cleanOptioning(optionState, application.getId()); + cleanOptioning(optionStateEnum, application.getId()); } private void doPersistMetrics(Application application, boolean stopWatch) { - if (FlinkAppState.isEndState(application.getState())) { + if (FlinkAppStateEnum.isEndState(application.getState())) { application.setOverview(null); application.setTotalTM(null); application.setTotalSlot(null); @@ -520,11 +521,11 @@ private void doPersistMetrics(Application application, boolean stopWatch) { * Handle not running task * * @param application application - * @param optionState optionState + * @param optionStateEnum optionState * @param currentState currentState */ private void handleNotRunState( - Application application, OptionState optionState, FlinkAppState currentState) + Application application, OptionStateEnum optionStateEnum, FlinkAppStateEnum currentState) throws Exception { switch (currentState) { @@ -540,26 +541,26 @@ private void handleNotRunState( currentState.name()); cleanSavepoint(application); application.setState(currentState.getValue()); - StopFrom stopFrom = getAppStopFrom(application.getId()); - if (stopFrom.isNone() || applicationInfoService.checkAlter(application)) { - if (stopFrom.isNone()) { + StopFromEnum stopFromEnum = getAppStopFrom(application.getId()); + if (stopFromEnum.isNone() || applicationInfoService.checkAlter(application)) { + if (stopFromEnum.isNone()) { log.info( "FlinkAppHttpWatcher getFromFlinkRestApi, job cancel is not form StreamPark,savePoint expired!"); savePointService.expire(application.getId()); } stopCanceledJob(application.getId()); - doAlert(application, FlinkAppState.CANCELED); + doAlert(application, FlinkAppStateEnum.CANCELED); } STOP_FROM_MAP.remove(application.getId()); doPersistMetrics(application, true); - cleanOptioning(optionState, application.getId()); + cleanOptioning(optionStateEnum, application.getId()); break; case FAILED: cleanSavepoint(application); STOP_FROM_MAP.remove(application.getId()); - application.setState(FlinkAppState.FAILED.getValue()); + application.setState(FlinkAppStateEnum.FAILED.getValue()); doPersistMetrics(application, true); - doAlert(application, FlinkAppState.FAILED); + doAlert(application, FlinkAppStateEnum.FAILED); applicationActionService.start(application, true); break; case RESTARTING: @@ -574,8 +575,8 @@ private void handleNotRunState( } } - private void cleanOptioning(OptionState optionState, Long key) { - if (optionState != null) { + private void cleanOptioning(OptionStateEnum optionStateEnum, Long key) { + if (optionStateEnum != null) { lastOptionTime = System.currentTimeMillis(); OPTIONING.remove(key); } @@ -583,18 +584,18 @@ private void cleanOptioning(OptionState optionState, Long key) { public void cleanSavepoint(Application application) { SAVEPOINT_CACHE.invalidate(application.getId()); - application.setOptionState(OptionState.NONE.getValue()); + application.setOptionState(OptionStateEnum.NONE.getValue()); } /** set current option state */ - public static void setOptionState(Long appId, OptionState state) { + public static void setOptionState(Long appId, OptionStateEnum state) { if (isKubernetesApp(appId)) { return; } log.info("FlinkAppHttpWatcher setOptioning"); OPTIONING.put(appId, state); - if (OptionState.CANCELLING == state) { - STOP_FROM_MAP.put(appId, StopFrom.STREAMPARK); + if (OptionStateEnum.CANCELLING == state) { + STOP_FROM_MAP.put(appId, StopFromEnum.STREAMPARK); } } @@ -661,8 +662,8 @@ private YarnAppInfo httpYarnAppInfo(Application application) throws Exception { private Overview httpOverview(Application application) throws IOException { String appId = application.getAppId(); if (appId != null) { - if (ExecutionMode.YARN_APPLICATION == application.getExecutionModeEnum() - || ExecutionMode.YARN_PER_JOB == application.getExecutionModeEnum()) { + if (ExecutionModeEnum.YARN_APPLICATION == application.getExecutionModeEnum() + || ExecutionModeEnum.YARN_PER_JOB == application.getExecutionModeEnum()) { String reqURL; if (StringUtils.isBlank(application.getJobManagerUrl())) { String format = "proxy/%s/overview"; @@ -679,8 +680,8 @@ private Overview httpOverview(Application application) throws IOException { private JobsOverview httpJobsOverview(Application application) throws Exception { final String flinkUrl = "jobs/overview"; - ExecutionMode execMode = application.getExecutionModeEnum(); - if (ExecutionMode.isYarnMode(execMode)) { + ExecutionModeEnum execMode = application.getExecutionModeEnum(); + if (ExecutionModeEnum.isYarnMode(execMode)) { String reqURL; if (StringUtils.isBlank(application.getJobManagerUrl())) { String format = "proxy/%s/" + flinkUrl; @@ -692,7 +693,7 @@ private JobsOverview httpJobsOverview(Application application) throws Exception return yarnRestRequest(reqURL, JobsOverview.class); } - if (application.getJobId() != null && ExecutionMode.isRemoteMode(execMode)) { + if (application.getJobId() != null && ExecutionModeEnum.isRemoteMode(execMode)) { return httpRemoteCluster( application.getFlinkClusterId(), cluster -> { @@ -713,8 +714,8 @@ private JobsOverview httpJobsOverview(Application application) throws Exception private CheckPoints httpCheckpoints(Application application) throws Exception { final String flinkUrl = "jobs/%s/checkpoints"; - ExecutionMode execMode = application.getExecutionModeEnum(); - if (ExecutionMode.isYarnMode(execMode)) { + ExecutionModeEnum execMode = application.getExecutionModeEnum(); + if (ExecutionModeEnum.isYarnMode(execMode)) { String reqURL; if (StringUtils.isBlank(application.getJobManagerUrl())) { String format = "proxy/%s/" + flinkUrl; @@ -726,7 +727,7 @@ private CheckPoints httpCheckpoints(Application application) throws Exception { return yarnRestRequest(reqURL, CheckPoints.class); } - if (application.getJobId() != null && ExecutionMode.isRemoteMode(execMode)) { + if (application.getJobId() != null && ExecutionModeEnum.isRemoteMode(execMode)) { return httpRemoteCluster( application.getFlinkClusterId(), cluster -> { @@ -805,7 +806,7 @@ interface Callback { * @param app application * @param appState application state */ - private void doAlert(Application app, FlinkAppState appState) { + private void doAlert(Application app, FlinkAppStateEnum appState) { if (app.getProbing()) { log.info("application with id {} is probing, don't send alert", app.getId()); return; diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/task/FlinkAppLostWatcher.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/task/FlinkAppLostWatcher.java index ebb3ab76ca..b0c0e29bbc 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/task/FlinkAppLostWatcher.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/task/FlinkAppLostWatcher.java @@ -20,7 +20,7 @@ import org.apache.streampark.console.core.bean.AlertProbeMsg; import org.apache.streampark.console.core.bean.AlertTemplate; import org.apache.streampark.console.core.entity.Application; -import org.apache.streampark.console.core.enums.FlinkAppState; +import org.apache.streampark.console.core.enums.FlinkAppStateEnum; import org.apache.streampark.console.core.service.alert.AlertService; import org.apache.streampark.console.core.service.application.ApplicationManageService; import org.apache.streampark.flink.kubernetes.FlinkK8sWatcher; @@ -39,7 +39,7 @@ import java.util.concurrent.atomic.AtomicBoolean; import java.util.stream.Collectors; -import static org.apache.streampark.console.core.enums.FlinkAppState.LOST; +import static org.apache.streampark.console.core.enums.FlinkAppStateEnum.LOST; import static org.apache.streampark.console.core.task.FlinkK8sWatcherWrapper.Bridge.toTrackId; import static org.apache.streampark.console.core.task.FlinkK8sWatcherWrapper.isKubernetesApp; @@ -95,7 +95,7 @@ public void watch(List applications) { isProbing.set(true); probeApplication = probeApplication.stream() - .filter(application -> FlinkAppState.isLost(application.getState())) + .filter(application -> FlinkAppStateEnum.isLost(application.getState())) .collect(Collectors.toList()); updateState(probeApplication); probeApplication.stream().forEach(this::monitorApplication); @@ -103,10 +103,10 @@ public void watch(List applications) { private void updateState(List applications) { applications.stream() - .filter(application -> FlinkAppState.isLost(application.getState())) + .filter(application -> FlinkAppStateEnum.isLost(application.getState())) .forEach( application -> { - application.setState(FlinkAppState.PROBING.getValue()); + application.setState(FlinkAppStateEnum.PROBING.getValue()); application.setProbing(true); }); applicationManageService.updateBatchById(applications); diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/task/FlinkCheckpointProcessor.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/task/FlinkCheckpointProcessor.java index 440170c426..4d134b27ae 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/task/FlinkCheckpointProcessor.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/task/FlinkCheckpointProcessor.java @@ -20,8 +20,8 @@ import org.apache.streampark.console.core.bean.AlertTemplate; import org.apache.streampark.console.core.entity.Application; import org.apache.streampark.console.core.entity.SavePoint; -import org.apache.streampark.console.core.enums.CheckPointStatus; -import org.apache.streampark.console.core.enums.FailoverStrategy; +import org.apache.streampark.console.core.enums.CheckPointStatusEnum; +import org.apache.streampark.console.core.enums.FailoverStrategyEnum; import org.apache.streampark.console.core.metrics.flink.CheckPoints; import org.apache.streampark.console.core.service.SavePointService; import org.apache.streampark.console.core.service.alert.AlertService; @@ -79,10 +79,10 @@ public void process(Application application, @Nonnull CheckPoints checkPoints) { private void process(Application application, @Nonnull CheckPoints.CheckPoint checkPoint) { String jobID = application.getJobId(); Long appId = application.getId(); - CheckPointStatus status = checkPoint.getCheckPointStatus(); + CheckPointStatusEnum status = checkPoint.getCheckPointStatus(); CheckPointKey checkPointKey = new CheckPointKey(appId, jobID, checkPoint.getId()); - if (CheckPointStatus.COMPLETED == status) { + if (CheckPointStatusEnum.COMPLETED == status) { if (shouldStoreAsSavepoint(checkPointKey, checkPoint)) { savepointedCache.put(checkPointKey.getSavePointId(), DEFAULT_FLAG_BYTE); saveSavepoint(checkPoint, application.getId()); @@ -104,15 +104,17 @@ private void process(Application application, @Nonnull CheckPoints.CheckPoint ch if (minute <= application.getCpFailureRateInterval() && counter.getCount() >= application.getCpMaxFailureInterval()) { checkPointFailedCache.remove(appId); - FailoverStrategy failoverStrategy = FailoverStrategy.of(application.getCpFailureAction()); - if (failoverStrategy == null) { + FailoverStrategyEnum failoverStrategyEnum = + FailoverStrategyEnum.of(application.getCpFailureAction()); + if (failoverStrategyEnum == null) { throw new IllegalArgumentException( "Unexpected cpFailureAction: " + application.getCpFailureAction()); } - switch (failoverStrategy) { + switch (failoverStrategyEnum) { case ALERT: alertService.alert( - application.getAlertId(), AlertTemplate.of(application, CheckPointStatus.FAILED)); + application.getAlertId(), + AlertTemplate.of(application, CheckPointStatusEnum.FAILED)); break; case RESTART: try { @@ -160,8 +162,8 @@ private Long getLatestCheckpointedId(Long appId, String cacheId) { } private boolean shouldProcessFailedTrigger( - CheckPoints.CheckPoint checkPoint, boolean cpFailedTrigger, CheckPointStatus status) { - return CheckPointStatus.FAILED == status && !checkPoint.getIsSavepoint() && cpFailedTrigger; + CheckPoints.CheckPoint checkPoint, boolean cpFailedTrigger, CheckPointStatusEnum status) { + return CheckPointStatusEnum.FAILED == status && !checkPoint.getIsSavepoint() && cpFailedTrigger; } private void saveSavepoint(CheckPoints.CheckPoint checkPoint, Long appId) { diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/task/FlinkClusterWatcher.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/task/FlinkClusterWatcher.java index f94df947b1..0d697319ee 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/task/FlinkClusterWatcher.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/task/FlinkClusterWatcher.java @@ -19,8 +19,8 @@ import org.apache.streampark.common.conf.CommonConfig; import org.apache.streampark.common.conf.InternalConfigHolder; -import org.apache.streampark.common.enums.ClusterState; -import org.apache.streampark.common.enums.ExecutionMode; +import org.apache.streampark.common.enums.ClusterStateEnum; +import org.apache.streampark.common.enums.ExecutionModeEnum; import org.apache.streampark.common.util.HadoopUtils; import org.apache.streampark.common.util.HttpClientUtils; import org.apache.streampark.common.util.ThreadUtils; @@ -77,7 +77,7 @@ public class FlinkClusterWatcher { /** Watcher cluster lists */ private static final Map WATCHER_CLUSTERS = new ConcurrentHashMap<>(8); - private static final Cache FAILED_STATES = + private static final Cache FAILED_STATES = Caffeine.newBuilder().expireAfterWrite(WATCHER_INTERVAL).build(); private boolean immediateWatch = false; @@ -99,9 +99,9 @@ private void init() { List flinkClusters = flinkClusterService.list( new LambdaQueryWrapper() - .eq(FlinkCluster::getClusterState, ClusterState.RUNNING.getState()) + .eq(FlinkCluster::getClusterState, ClusterStateEnum.RUNNING.getState()) // excluding flink clusters on kubernetes - .notIn(FlinkCluster::getExecutionMode, ExecutionMode.getKubernetesMode())); + .notIn(FlinkCluster::getExecutionMode, ExecutionModeEnum.getKubernetesMode())); flinkClusters.forEach(cluster -> WATCHER_CLUSTERS.put(cluster.getId(), cluster)); } @@ -115,7 +115,7 @@ private void start() { (aLong, flinkCluster) -> EXECUTOR.execute( () -> { - ClusterState state = getClusterState(flinkCluster); + ClusterStateEnum state = getClusterState(flinkCluster); switch (state) { case FAILED: case LOST: @@ -132,7 +132,7 @@ private void start() { } } - private void alert(FlinkCluster cluster, ClusterState state) { + private void alert(FlinkCluster cluster, ClusterStateEnum state) { if (cluster.getAlertId() != null) { cluster.setAllJobs(applicationInfoService.countByClusterId(cluster.getId())); cluster.setAffectedJobs( @@ -150,13 +150,13 @@ private void alert(FlinkCluster cluster, ClusterState state) { * @param flinkCluster The FlinkCluster object representing the cluster. * @return The ClusterState object representing the state of the cluster. */ - public ClusterState getClusterState(FlinkCluster flinkCluster) { - ClusterState state = FAILED_STATES.getIfPresent(flinkCluster.getId()); + public ClusterStateEnum getClusterState(FlinkCluster flinkCluster) { + ClusterStateEnum state = FAILED_STATES.getIfPresent(flinkCluster.getId()); if (state != null) { return state; } state = httpClusterState(flinkCluster); - if (ClusterState.isRunning(state)) { + if (ClusterStateEnum.isRunning(state)) { FAILED_STATES.invalidate(flinkCluster.getId()); } else { immediateWatch = true; @@ -171,7 +171,7 @@ public ClusterState getClusterState(FlinkCluster flinkCluster) { * @param flinkCluster The FlinkCluster object representing the cluster. * @return The ClusterState object representing the state of the cluster. */ - private ClusterState httpRemoteClusterState(FlinkCluster flinkCluster) { + private ClusterStateEnum httpRemoteClusterState(FlinkCluster flinkCluster) { return getStateFromFlinkRestApi(flinkCluster); } @@ -181,9 +181,9 @@ private ClusterState httpRemoteClusterState(FlinkCluster flinkCluster) { * @param flinkCluster * @return */ - private ClusterState httpYarnSessionClusterState(FlinkCluster flinkCluster) { - ClusterState state = getStateFromFlinkRestApi(flinkCluster); - if (ClusterState.LOST == state) { + private ClusterStateEnum httpYarnSessionClusterState(FlinkCluster flinkCluster) { + ClusterStateEnum state = getStateFromFlinkRestApi(flinkCluster); + if (ClusterStateEnum.LOST == state) { return getStateFromYarnRestApi(flinkCluster); } return state; @@ -195,14 +195,14 @@ private ClusterState httpYarnSessionClusterState(FlinkCluster flinkCluster) { * @param flinkCluster * @return */ - private ClusterState httpClusterState(FlinkCluster flinkCluster) { + private ClusterStateEnum httpClusterState(FlinkCluster flinkCluster) { switch (flinkCluster.getExecutionModeEnum()) { case REMOTE: return httpRemoteClusterState(flinkCluster); case YARN_SESSION: return httpYarnSessionClusterState(flinkCluster); default: - return ClusterState.UNKNOWN; + return ClusterStateEnum.UNKNOWN; } } @@ -212,7 +212,7 @@ private ClusterState httpClusterState(FlinkCluster flinkCluster) { * @param flinkCluster * @return */ - private ClusterState getStateFromFlinkRestApi(FlinkCluster flinkCluster) { + private ClusterStateEnum getStateFromFlinkRestApi(FlinkCluster flinkCluster) { String address = flinkCluster.getAddress(); String jobManagerUrl = flinkCluster.getJobManagerUrl(); String flinkUrl = @@ -225,11 +225,11 @@ private ClusterState getStateFromFlinkRestApi(FlinkCluster flinkCluster) { flinkUrl, RequestConfig.custom().setConnectTimeout(5000, TimeUnit.MILLISECONDS).build()); JacksonUtils.read(res, Overview.class); - return ClusterState.RUNNING; + return ClusterStateEnum.RUNNING; } catch (Exception ignored) { log.error("cluster id:{} get state from flink api failed", flinkCluster.getId()); } - return ClusterState.LOST; + return ClusterStateEnum.LOST; } /** @@ -238,12 +238,12 @@ private ClusterState getStateFromFlinkRestApi(FlinkCluster flinkCluster) { * @param flinkCluster * @return */ - private ClusterState getStateFromYarnRestApi(FlinkCluster flinkCluster) { + private ClusterStateEnum getStateFromYarnRestApi(FlinkCluster flinkCluster) { String yarnUrl = "ws/v1/cluster/apps/".concat(flinkCluster.getClusterId()); try { String result = YarnUtils.restRequest(yarnUrl); if (null == result) { - return ClusterState.UNKNOWN; + return ClusterStateEnum.UNKNOWN; } YarnAppInfo yarnAppInfo = JacksonUtils.read(result, YarnAppInfo.class); YarnApplicationState status = HadoopUtils.toYarnState(yarnAppInfo.getApp().getState()); @@ -251,11 +251,11 @@ private ClusterState getStateFromYarnRestApi(FlinkCluster flinkCluster) { log.error( "cluster id:{} final application status convert failed, invalid string ", flinkCluster.getId()); - return ClusterState.UNKNOWN; + return ClusterStateEnum.UNKNOWN; } return yarnStateConvertClusterState(status); } catch (Exception e) { - return ClusterState.LOST; + return ClusterStateEnum.LOST; } } @@ -265,7 +265,7 @@ private ClusterState getStateFromYarnRestApi(FlinkCluster flinkCluster) { * @param flinkCluster */ public static void addWatching(FlinkCluster flinkCluster) { - if (!ExecutionMode.isKubernetesMode(flinkCluster.getExecutionModeEnum()) + if (!ExecutionModeEnum.isKubernetesMode(flinkCluster.getExecutionModeEnum()) && !WATCHER_CLUSTERS.containsKey(flinkCluster.getId())) { log.info("add the cluster with id:{} to watcher cluster cache", flinkCluster.getId()); WATCHER_CLUSTERS.put(flinkCluster.getId(), flinkCluster); @@ -286,10 +286,10 @@ public static void unWatching(FlinkCluster flinkCluster) { * @param state * @return */ - private ClusterState yarnStateConvertClusterState(YarnApplicationState state) { + private ClusterStateEnum yarnStateConvertClusterState(YarnApplicationState state) { return state == YarnApplicationState.FINISHED - ? ClusterState.CANCELED - : ClusterState.of(state.toString()); + ? ClusterStateEnum.CANCELED + : ClusterStateEnum.of(state.toString()); } /** @@ -298,7 +298,7 @@ private ClusterState yarnStateConvertClusterState(YarnApplicationState state) { * @return false if the connection of the cluster is invalid, true else. */ public Boolean verifyClusterConnection(FlinkCluster flinkCluster) { - ClusterState clusterState = httpClusterState(flinkCluster); - return ClusterState.isRunning(clusterState); + ClusterStateEnum clusterStateEnum = httpClusterState(flinkCluster); + return ClusterStateEnum.isRunning(clusterStateEnum); } } diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/task/FlinkK8sChangeEventListener.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/task/FlinkK8sChangeEventListener.java index c097dfb5bb..c24bcd1d73 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/task/FlinkK8sChangeEventListener.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/task/FlinkK8sChangeEventListener.java @@ -17,18 +17,18 @@ package org.apache.streampark.console.core.task; -import org.apache.streampark.common.enums.ExecutionMode; +import org.apache.streampark.common.enums.ExecutionModeEnum; import org.apache.streampark.common.util.ThreadUtils; import org.apache.streampark.console.core.bean.AlertTemplate; import org.apache.streampark.console.core.entity.Application; -import org.apache.streampark.console.core.enums.FlinkAppState; -import org.apache.streampark.console.core.enums.OptionState; +import org.apache.streampark.console.core.enums.FlinkAppStateEnum; +import org.apache.streampark.console.core.enums.OptionStateEnum; import org.apache.streampark.console.core.metrics.flink.CheckPoints; import org.apache.streampark.console.core.service.alert.AlertService; import org.apache.streampark.console.core.service.application.ApplicationInfoService; import org.apache.streampark.console.core.service.application.ApplicationManageService; -import org.apache.streampark.flink.kubernetes.enums.FlinkJobState; -import org.apache.streampark.flink.kubernetes.enums.FlinkK8sExecuteMode; +import org.apache.streampark.flink.kubernetes.enums.FlinkJobStateEnum; +import org.apache.streampark.flink.kubernetes.enums.FlinkK8sExecuteModeEnum; import org.apache.streampark.flink.kubernetes.event.FlinkClusterMetricChangeEvent; import org.apache.streampark.flink.kubernetes.event.FlinkJobCheckpointChangeEvent; import org.apache.streampark.flink.kubernetes.event.FlinkJobStatusChangeEvent; @@ -52,8 +52,8 @@ import scala.Enumeration; -import static org.apache.streampark.console.core.enums.FlinkAppState.Bridge.fromK8sFlinkJobState; -import static org.apache.streampark.console.core.enums.FlinkAppState.Bridge.toK8sFlinkJobState; +import static org.apache.streampark.console.core.enums.FlinkAppStateEnum.Bridge.fromK8sFlinkJobState; +import static org.apache.streampark.console.core.enums.FlinkAppStateEnum.Bridge.toK8sFlinkJobState; /** * Event Listener for K8sFlinkTrackMonitor。 @@ -104,11 +104,11 @@ public void subscribeJobStatusChange(FlinkJobStatusChangeEvent event) { applicationInfoService.persistMetrics(app); // email alerts when necessary - FlinkAppState state = app.getStateEnum(); - if (FlinkAppState.FAILED == state - || FlinkAppState.LOST == state - || FlinkAppState.RESTARTING == state - || FlinkAppState.FINISHED == state) { + FlinkAppStateEnum state = app.getStateEnum(); + if (FlinkAppStateEnum.FAILED == state + || FlinkAppStateEnum.LOST == state + || FlinkAppStateEnum.RESTARTING == state + || FlinkAppStateEnum.FINISHED == state) { executor.execute( () -> { if (app.getProbing()) { @@ -129,9 +129,9 @@ public void subscribeJobStatusChange(FlinkJobStatusChangeEvent event) { @Subscribe public void subscribeMetricsChange(FlinkClusterMetricChangeEvent event) { TrackId trackId = event.trackId(); - ExecutionMode mode = FlinkK8sExecuteMode.toExecutionMode(trackId.executeMode()); + ExecutionModeEnum mode = FlinkK8sExecuteModeEnum.toExecutionMode(trackId.executeMode()); // discard session mode change - if (ExecutionMode.KUBERNETES_NATIVE_SESSION == mode) { + if (ExecutionModeEnum.KUBERNETES_NATIVE_SESSION == mode) { return; } @@ -184,7 +184,7 @@ private void setByJobStatusCV(Application app, JobStatusCV jobStatus) { long endTime = Math.max(jobStatus.jobEndTime(), preEndTime); long duration = jobStatus.duration(); - if (FlinkJobState.isEndState(state)) { + if (FlinkJobStateEnum.isEndState(state)) { if (endTime < startTime) { endTime = System.currentTimeMillis(); } @@ -202,6 +202,6 @@ private void setByJobStatusCV(Application app, JobStatusCV jobStatus) { app.setDuration(duration > 0 ? duration : 0); // when a flink job status change event can be received, it means // that the operation command sent by streampark has been completed. - app.setOptionState(OptionState.NONE.getValue()); + app.setOptionState(OptionStateEnum.NONE.getValue()); } } diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/task/FlinkK8sWatcherWrapper.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/task/FlinkK8sWatcherWrapper.java index 6c82fe142b..ca6dd12a80 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/task/FlinkK8sWatcherWrapper.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/task/FlinkK8sWatcherWrapper.java @@ -18,15 +18,15 @@ package org.apache.streampark.console.core.task; import org.apache.streampark.common.conf.K8sFlinkConfig; -import org.apache.streampark.common.enums.ExecutionMode; +import org.apache.streampark.common.enums.ExecutionModeEnum; import org.apache.streampark.console.core.entity.Application; -import org.apache.streampark.console.core.enums.FlinkAppState; +import org.apache.streampark.console.core.enums.FlinkAppStateEnum; import org.apache.streampark.console.core.service.application.ApplicationManageService; import org.apache.streampark.flink.kubernetes.FlinkK8sWatcher; import org.apache.streampark.flink.kubernetes.FlinkK8sWatcherFactory; import org.apache.streampark.flink.kubernetes.FlinkTrackConfig; -import org.apache.streampark.flink.kubernetes.enums.FlinkJobState; -import org.apache.streampark.flink.kubernetes.enums.FlinkK8sExecuteMode; +import org.apache.streampark.flink.kubernetes.enums.FlinkJobStateEnum; +import org.apache.streampark.flink.kubernetes.enums.FlinkK8sExecuteModeEnum; import org.apache.streampark.flink.kubernetes.model.TrackId; import org.apache.commons.collections.CollectionUtils; @@ -45,7 +45,7 @@ import scala.Enumeration; -import static org.apache.streampark.console.core.enums.FlinkAppState.Bridge.toK8sFlinkJobState; +import static org.apache.streampark.console.core.enums.FlinkAppStateEnum.Bridge.toK8sFlinkJobState; /** * Flink K8s Tracking Monitor Wrapper. @@ -99,8 +99,8 @@ private List getK8sWatchingApps() { final LambdaQueryWrapper queryWrapper = new LambdaQueryWrapper<>(); queryWrapper .eq(Application::getTracking, 1) - .ne(Application::getState, FlinkAppState.LOST.getValue()) - .in(Application::getExecutionMode, ExecutionMode.getKubernetesMode()); + .ne(Application::getState, FlinkAppStateEnum.LOST.getValue()) + .in(Application::getExecutionMode, ExecutionModeEnum.getKubernetesMode()); List k8sApplication = applicationManageService.list(queryWrapper); if (CollectionUtils.isEmpty(k8sApplication)) { @@ -116,7 +116,7 @@ private List getK8sWatchingApps() { } // filter out the application that should be tracking return k8sApplication.stream() - .filter(app -> !FlinkJobState.isEndState(toK8sFlinkJobState(app.getStateEnum()))) + .filter(app -> !FlinkJobStateEnum.isEndState(toK8sFlinkJobState(app.getStateEnum()))) .map(Bridge::toTrackId) .collect(Collectors.toList()); } @@ -126,15 +126,15 @@ public static class Bridge { // covert Application to TrackId public static TrackId toTrackId(@Nonnull Application app) { - Enumeration.Value mode = FlinkK8sExecuteMode.of(app.getExecutionModeEnum()); - if (FlinkK8sExecuteMode.APPLICATION() == mode) { + Enumeration.Value mode = FlinkK8sExecuteModeEnum.of(app.getExecutionModeEnum()); + if (FlinkK8sExecuteModeEnum.APPLICATION() == mode) { return TrackId.onApplication( app.getK8sNamespace(), app.getClusterId(), app.getId(), app.getJobId(), app.getTeamId().toString()); - } else if (FlinkK8sExecuteMode.SESSION() == mode) { + } else if (FlinkK8sExecuteModeEnum.SESSION() == mode) { return TrackId.onSession( app.getK8sNamespace(), app.getClusterId(), @@ -153,6 +153,6 @@ public static boolean isKubernetesApp(Application application) { if (application == null) { return false; } - return ExecutionMode.isKubernetesMode(application.getExecutionMode()); + return ExecutionModeEnum.isKubernetesMode(application.getExecutionMode()); } } diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/task/ProjectBuildTask.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/task/ProjectBuildTask.java index b423b0527e..ecabb95ad7 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/task/ProjectBuildTask.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/core/task/ProjectBuildTask.java @@ -21,7 +21,7 @@ import org.apache.streampark.common.util.Utils; import org.apache.streampark.console.base.util.GitUtils; import org.apache.streampark.console.core.entity.Project; -import org.apache.streampark.console.core.enums.BuildState; +import org.apache.streampark.console.core.enums.BuildStateEnum; import ch.qos.logback.classic.Logger; import lombok.extern.slf4j.Slf4j; @@ -40,14 +40,14 @@ public class ProjectBuildTask extends AbstractLogFileTask { final Project project; - final Consumer stateUpdateConsumer; + final Consumer stateUpdateConsumer; final Consumer notifyReleaseConsumer; public ProjectBuildTask( String logPath, Project project, - Consumer stateUpdateConsumer, + Consumer stateUpdateConsumer, Consumer notifyReleaseConsumer) { super(logPath, true); this.project = project; @@ -62,23 +62,23 @@ protected void doRun() throws Throwable { boolean cloneSuccess = cloneSourceCode(project); if (!cloneSuccess) { fileLogger.error("[StreamPark] clone or pull error."); - stateUpdateConsumer.accept(BuildState.FAILED); + stateUpdateConsumer.accept(BuildStateEnum.FAILED); return; } boolean build = projectBuild(project); if (!build) { - stateUpdateConsumer.accept(BuildState.FAILED); + stateUpdateConsumer.accept(BuildStateEnum.FAILED); fileLogger.error("build error, project name: {} ", project.getName()); return; } - stateUpdateConsumer.accept(BuildState.SUCCESSFUL); + stateUpdateConsumer.accept(BuildStateEnum.SUCCESSFUL); this.deploy(project); notifyReleaseConsumer.accept(fileLogger); } @Override protected void processException(Throwable t) { - stateUpdateConsumer.accept(BuildState.FAILED); + stateUpdateConsumer.accept(BuildStateEnum.FAILED); fileLogger.error("Build error, project name: {}", project.getName(), t); } diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/controller/AccessTokenController.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/controller/AccessTokenController.java index 9a58bd8b67..7deea1c235 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/controller/AccessTokenController.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/controller/AccessTokenController.java @@ -21,7 +21,7 @@ import org.apache.streampark.console.base.domain.RestRequest; import org.apache.streampark.console.base.domain.RestResponse; import org.apache.streampark.console.base.exception.InternalException; -import org.apache.streampark.console.core.enums.AccessTokenState; +import org.apache.streampark.console.core.enums.AccessTokenStateEnum; import org.apache.streampark.console.core.service.CommonService; import org.apache.streampark.console.system.entity.AccessToken; import org.apache.streampark.console.system.service.AccessTokenService; @@ -92,14 +92,14 @@ public RestResponse verifyToken() { if (userId != null) { AccessToken accessToken = accessTokenService.getByUserId(userId); if (accessToken == null) { - restResponse.data(AccessTokenState.NULL.get()); + restResponse.data(AccessTokenStateEnum.NULL.get()); } else if (AccessToken.STATUS_DISABLE.equals(accessToken.getFinalStatus())) { - restResponse.data(AccessTokenState.INVALID.get()); + restResponse.data(AccessTokenStateEnum.INVALID.get()); } else { - restResponse.data(AccessTokenState.OK.get()); + restResponse.data(AccessTokenStateEnum.OK.get()); } } else { - restResponse.data(AccessTokenState.INVALID.get()); + restResponse.data(AccessTokenStateEnum.INVALID.get()); } return restResponse; } diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/controller/MemberController.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/controller/MemberController.java index bcbfd20467..809e1689d8 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/controller/MemberController.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/controller/MemberController.java @@ -20,7 +20,7 @@ import org.apache.streampark.console.base.domain.RestRequest; import org.apache.streampark.console.base.domain.RestResponse; import org.apache.streampark.console.core.annotation.PermissionAction; -import org.apache.streampark.console.core.enums.PermissionType; +import org.apache.streampark.console.core.enums.PermissionTypeEnum; import org.apache.streampark.console.system.entity.Member; import org.apache.streampark.console.system.entity.Team; import org.apache.streampark.console.system.entity.User; @@ -83,7 +83,7 @@ public RestResponse check(@NotBlank(message = "{required}") Long teamId, String } @Operation(summary = "Create member") - @PermissionAction(id = "#member.teamId", type = PermissionType.TEAM) + @PermissionAction(id = "#member.teamId", type = PermissionTypeEnum.TEAM) @PostMapping("post") @RequiresPermissions("member:add") public RestResponse create(@Valid Member member) { @@ -92,7 +92,7 @@ public RestResponse create(@Valid Member member) { } @Operation(summary = "Delete member") - @PermissionAction(id = "#member.teamId", type = PermissionType.TEAM) + @PermissionAction(id = "#member.teamId", type = PermissionTypeEnum.TEAM) @DeleteMapping("delete") @RequiresPermissions("member:delete") public RestResponse delete(Member member) { @@ -101,7 +101,7 @@ public RestResponse delete(Member member) { } @Operation(summary = "Update member") - @PermissionAction(id = "#member.teamId", type = PermissionType.TEAM) + @PermissionAction(id = "#member.teamId", type = PermissionTypeEnum.TEAM) @PutMapping("update") @RequiresPermissions("member:update") public RestResponse update(Member member) { diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/controller/SsoController.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/controller/SsoController.java index 06956d1076..b12791771c 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/controller/SsoController.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/controller/SsoController.java @@ -19,7 +19,7 @@ import org.apache.streampark.console.base.domain.RestResponse; import org.apache.streampark.console.base.exception.ApiAlertException; -import org.apache.streampark.console.core.enums.LoginType; +import org.apache.streampark.console.core.enums.LoginTypeEnum; import org.apache.streampark.console.system.entity.User; import org.apache.streampark.console.system.security.Authenticator; import org.apache.streampark.console.system.service.UserService; @@ -83,7 +83,7 @@ public RestResponse token() throws Exception { throw new ApiAlertException("Please configure the correct Principal Name Attribute"); } - User user = authenticator.authenticate(principal.getName(), null, LoginType.SSO.toString()); + User user = authenticator.authenticate(principal.getName(), null, LoginTypeEnum.SSO.toString()); return userService.getLoginUserInfo(user); } } diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/controller/UserController.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/controller/UserController.java index b0e3aa26b7..4f24d8fd82 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/controller/UserController.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/controller/UserController.java @@ -22,8 +22,8 @@ import org.apache.streampark.console.base.domain.RestResponse; import org.apache.streampark.console.base.exception.ApiAlertException; import org.apache.streampark.console.core.annotation.PermissionAction; -import org.apache.streampark.console.core.enums.LoginType; -import org.apache.streampark.console.core.enums.PermissionType; +import org.apache.streampark.console.core.enums.LoginTypeEnum; +import org.apache.streampark.console.core.enums.PermissionTypeEnum; import org.apache.streampark.console.core.service.CommonService; import org.apache.streampark.console.system.entity.Team; import org.apache.streampark.console.system.entity.User; @@ -77,7 +77,7 @@ public RestResponse userList(RestRequest restRequest, User user) { @PostMapping("post") @RequiresPermissions("user:add") public RestResponse addUser(@Valid User user) throws Exception { - user.setLoginType(LoginType.PASSWORD); + user.setLoginTypeEnum(LoginTypeEnum.PASSWORD); this.userService.createUser(user); return RestResponse.success(); } @@ -112,7 +112,7 @@ public RestResponse checkUserName(@NotBlank(message = "{required}") String usern } @Operation(summary = "Update password") - @PermissionAction(id = "#user.userId", type = PermissionType.USER) + @PermissionAction(id = "#user.userId", type = PermissionTypeEnum.USER) @PutMapping("password") public RestResponse updatePassword(User user) throws Exception { userService.updatePassword(user); diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/entity/User.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/entity/User.java index 7a328a1f9d..9259fc98ff 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/entity/User.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/entity/User.java @@ -18,8 +18,8 @@ package org.apache.streampark.console.system.entity; import org.apache.streampark.common.conf.ConfigConst; -import org.apache.streampark.console.core.enums.LoginType; -import org.apache.streampark.console.core.enums.UserType; +import org.apache.streampark.console.core.enums.LoginTypeEnum; +import org.apache.streampark.console.core.enums.UserTypeEnum; import com.baomidou.mybatisplus.annotation.IdType; import com.baomidou.mybatisplus.annotation.TableId; @@ -63,9 +63,9 @@ public class User implements Serializable { @Email(message = "{email}") private String email; - private UserType userType; + private UserTypeEnum userTypeEnum; - private LoginType loginType; + private LoginTypeEnum loginTypeEnum; @NotBlank(message = "{required}") private String status; diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/security/impl/AuthenticatorImpl.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/security/impl/AuthenticatorImpl.java index 2b81a5dd0d..9ff76aa8b0 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/security/impl/AuthenticatorImpl.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/security/impl/AuthenticatorImpl.java @@ -19,8 +19,8 @@ import org.apache.streampark.console.base.exception.ApiAlertException; import org.apache.streampark.console.base.util.ShaHashUtils; -import org.apache.streampark.console.core.enums.LoginType; -import org.apache.streampark.console.core.enums.UserType; +import org.apache.streampark.console.core.enums.LoginTypeEnum; +import org.apache.streampark.console.core.enums.UserTypeEnum; import org.apache.streampark.console.system.entity.User; import org.apache.streampark.console.system.security.Authenticator; import org.apache.streampark.console.system.service.UserService; @@ -39,7 +39,7 @@ public class AuthenticatorImpl implements Authenticator { @Override public User authenticate(String username, String password, String loginType) throws Exception { - LoginType loginTypeEnum = LoginType.of(loginType); + LoginTypeEnum loginTypeEnum = LoginTypeEnum.of(loginType); if (loginTypeEnum == null) { throw new ApiAlertException( String.format("the login type [%s] is not supported.", loginType)); @@ -62,7 +62,7 @@ private User passwordAuthenticate(String username, String password) { if (user == null) { throw new ApiAlertException(String.format("user [%s] does not exist", username)); } - if (user.getLoginType() != LoginType.PASSWORD) { + if (user.getLoginTypeEnum() != LoginTypeEnum.PASSWORD) { throw new ApiAlertException(String.format("user [%s] can not login with PASSWORD", username)); } String salt = user.getSalt(); @@ -82,35 +82,35 @@ private User ldapAuthenticate(String username, String password) throws Exception User user = usersService.findByName(username); if (user != null) { - if (user.getLoginType() != LoginType.LDAP) { + if (user.getLoginTypeEnum() != LoginTypeEnum.LDAP) { throw new ApiAlertException( - String.format("user [%s] can only sign in with %s", username, user.getLoginType())); + String.format("user [%s] can only sign in with %s", username, user.getLoginTypeEnum())); } return user; } - return this.newUserCreate(LoginType.LDAP, username); + return this.newUserCreate(LoginTypeEnum.LDAP, username); } private User ssoAuthenticate(String username) throws Exception { // check if user exist User user = usersService.findByName(username); if (user != null) { - if (user.getLoginType() != LoginType.SSO) { + if (user.getLoginTypeEnum() != LoginTypeEnum.SSO) { throw new ApiAlertException( - String.format("user [%s] can only sign in with %s", username, user.getLoginType())); + String.format("user [%s] can only sign in with %s", username, user.getLoginTypeEnum())); } return user; } - return this.newUserCreate(LoginType.SSO, username); + return this.newUserCreate(LoginTypeEnum.SSO, username); } - private User newUserCreate(LoginType loginType, String username) throws Exception { + private User newUserCreate(LoginTypeEnum loginTypeEnum, String username) throws Exception { User newUser = new User(); newUser.setCreateTime(new Date()); newUser.setUsername(username); newUser.setNickName(username); - newUser.setLoginType(loginType); - newUser.setUserType(UserType.USER); + newUser.setLoginTypeEnum(loginTypeEnum); + newUser.setUserTypeEnum(UserTypeEnum.USER); newUser.setStatus(User.STATUS_VALID); newUser.setSex(User.SEX_UNKNOWN); usersService.createUser(newUser); diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/service/impl/MenuServiceImpl.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/service/impl/MenuServiceImpl.java index 4ce0aaca90..15a38a7456 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/service/impl/MenuServiceImpl.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/service/impl/MenuServiceImpl.java @@ -21,7 +21,7 @@ import org.apache.streampark.console.base.domain.router.RouterTree; import org.apache.streampark.console.base.domain.router.VueRouter; import org.apache.streampark.console.base.util.VueRouterUtils; -import org.apache.streampark.console.core.enums.UserType; +import org.apache.streampark.console.core.enums.UserTypeEnum; import org.apache.streampark.console.system.entity.Menu; import org.apache.streampark.console.system.entity.User; import org.apache.streampark.console.system.mapper.MenuMapper; @@ -66,7 +66,7 @@ public List findUserPermissions(Long userId, Long teamId) { new IllegalArgumentException( String.format("The userId [%s] not found", userId))); // Admin has the permission for all menus. - if (UserType.ADMIN == user.getUserType()) { + if (UserTypeEnum.ADMIN == user.getUserTypeEnum()) { return this.list().stream().map(Menu::getPerms).collect(Collectors.toList()); } return this.baseMapper.findUserPermissions(userId, teamId); @@ -81,7 +81,7 @@ public List findUserMenus(Long userId, Long teamId) { new IllegalArgumentException( String.format("The userId:[%s] not found", userId))); // Admin has the permission for all menus. - if (UserType.ADMIN == user.getUserType()) { + if (UserTypeEnum.ADMIN == user.getUserTypeEnum()) { LambdaQueryWrapper queryWrapper = new LambdaQueryWrapper().eq(Menu::getType, "0").orderByAsc(Menu::getOrderNum); return this.list(queryWrapper); diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/service/impl/TeamServiceImpl.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/service/impl/TeamServiceImpl.java index f89c3b3988..676f1be80c 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/service/impl/TeamServiceImpl.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/service/impl/TeamServiceImpl.java @@ -19,7 +19,7 @@ import org.apache.streampark.console.base.domain.RestRequest; import org.apache.streampark.console.base.exception.ApiAlertException; -import org.apache.streampark.console.core.enums.UserType; +import org.apache.streampark.console.core.enums.UserTypeEnum; import org.apache.streampark.console.core.service.CommonService; import org.apache.streampark.console.core.service.ProjectService; import org.apache.streampark.console.core.service.VariableService; @@ -143,7 +143,7 @@ public List findUserTeams(Long userId) { .orElseThrow( () -> new ApiAlertException(String.format("The userId [%s] not found.", userId))); // Admin has the permission for all teams. - if (UserType.ADMIN == user.getUserType()) { + if (UserTypeEnum.ADMIN == user.getUserTypeEnum()) { return this.list(); } return baseMapper.findUserTeams(userId); diff --git a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/service/impl/UserServiceImpl.java b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/service/impl/UserServiceImpl.java index a817f0c615..8c0a5a1749 100644 --- a/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/service/impl/UserServiceImpl.java +++ b/streampark-console/streampark-console-service/src/main/java/org/apache/streampark/console/system/service/impl/UserServiceImpl.java @@ -26,7 +26,7 @@ import org.apache.streampark.console.base.properties.ShiroProperties; import org.apache.streampark.console.base.util.ShaHashUtils; import org.apache.streampark.console.base.util.WebUtils; -import org.apache.streampark.console.core.enums.LoginType; +import org.apache.streampark.console.core.enums.LoginTypeEnum; import org.apache.streampark.console.core.service.ResourceService; import org.apache.streampark.console.core.service.application.ApplicationInfoService; import org.apache.streampark.console.core.service.application.ApplicationManageService; @@ -129,7 +129,7 @@ public void createUser(User user) { @Transactional(rollbackFor = Exception.class) public RestResponse updateUser(User user) { User existsUser = getById(user.getUserId()); - user.setLoginType(null); + user.setLoginTypeEnum(null); user.setPassword(null); user.setModifyTime(new Date()); if (needTransferResource(existsUser, user)) { @@ -154,7 +154,7 @@ public void updatePassword(User userParam) { User user = getById(userParam.getUserId()); ApiAlertException.throwIfNull(user, "User is null. Update password failed."); ApiAlertException.throwIfFalse( - user.getLoginType() == LoginType.PASSWORD, + user.getLoginTypeEnum() == LoginTypeEnum.PASSWORD, "Can only update password for user who sign in with PASSWORD"); String saltPassword = ShaHashUtils.encrypt(user.getSalt(), userParam.getOldPassword()); diff --git a/streampark-console/streampark-console-service/src/main/resources/mapper/core/AlertConfigMapper.xml b/streampark-console/streampark-console-service/src/main/resources/mapper/core/AlertConfigMapper.xml index 935da7b960..eb962865ef 100644 --- a/streampark-console/streampark-console-service/src/main/resources/mapper/core/AlertConfigMapper.xml +++ b/streampark-console/streampark-console-service/src/main/resources/mapper/core/AlertConfigMapper.xml @@ -22,7 +22,7 @@ - + diff --git a/streampark-console/streampark-console-service/src/main/resources/mapper/core/ApplicationMapper.xml b/streampark-console/streampark-console-service/src/main/resources/mapper/core/ApplicationMapper.xml index f57b8efe76..d0b1e9c471 100644 --- a/streampark-console/streampark-console-service/src/main/resources/mapper/core/ApplicationMapper.xml +++ b/streampark-console/streampark-console-service/src/main/resources/mapper/core/ApplicationMapper.xml @@ -37,7 +37,7 @@ - + @@ -56,14 +56,14 @@ - + - + @@ -146,8 +146,8 @@ #{item} - - and t.execution_mode = #{application.executionMode} + + and t.execution_mode = #{application.executionModeEnum} and t.job_name like '%${application.jobName}%' @@ -193,8 +193,8 @@ tracking=#{application.tracking}, - - option_state=#{application.optionState}, + + option_state=#{application.optionStateEnum}, start_time=#{application.startTime}, @@ -304,7 +304,7 @@ select cluster_id, max(create_time) as ct from t_flink_app where cluster_id is not null - and execution_mode = #{executionMode} + and execution_mode = #{executionModeEnum} group by cluster_id order by ct desc ) as ci diff --git a/streampark-console/streampark-console-service/src/main/resources/mapper/core/FlinkClusterMapper.xml b/streampark-console/streampark-console-service/src/main/resources/mapper/core/FlinkClusterMapper.xml index 769fe521c3..e6f3f1cf8b 100644 --- a/streampark-console/streampark-console-service/src/main/resources/mapper/core/FlinkClusterMapper.xml +++ b/streampark-console/streampark-console-service/src/main/resources/mapper/core/FlinkClusterMapper.xml @@ -25,7 +25,7 @@ - + @@ -36,9 +36,9 @@ - + - + diff --git a/streampark-console/streampark-console-service/src/main/resources/mapper/core/ProjectMapper.xml b/streampark-console/streampark-console-service/src/main/resources/mapper/core/ProjectMapper.xml index 46f733e582..7737379277 100644 --- a/streampark-console/streampark-console-service/src/main/resources/mapper/core/ProjectMapper.xml +++ b/streampark-console/streampark-console-service/src/main/resources/mapper/core/ProjectMapper.xml @@ -22,7 +22,7 @@ - + @@ -34,7 +34,7 @@ - + @@ -85,8 +85,8 @@ and t.name like '%${project.name}%' - - and t.build_state = #{project.buildState} + + and t.build_state = #{project.buildStateEnum} diff --git a/streampark-console/streampark-console-service/src/main/resources/mapper/system/UserMapper.xml b/streampark-console/streampark-console-service/src/main/resources/mapper/system/UserMapper.xml index aeec2eb4f7..a196cb1ce0 100644 --- a/streampark-console/streampark-console-service/src/main/resources/mapper/system/UserMapper.xml +++ b/streampark-console/streampark-console-service/src/main/resources/mapper/system/UserMapper.xml @@ -31,7 +31,7 @@ - +