Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Improve] custom-code job support dependencies #3291

Merged
merged 7 commits into from
Oct 26, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ import java.util.regex.Pattern
import scala.collection.JavaConversions._
import scala.collection.JavaConverters._
import scala.collection.mutable
import scala.collection.mutable.{Map => MutableMap}
import scala.collection.mutable.{ArrayBuffer, Map => MutableMap}

object PropertiesUtils extends Logger {

Expand Down Expand Up @@ -305,6 +305,47 @@ object PropertiesUtils extends Logger {
}
}

@Nonnull def extractArguments(args: String): List[String] = {
val programArgs = new ArrayBuffer[String]()
if (StringUtils.isNotEmpty(args)) {
val array = args.split("\\s+")
val iter = array.iterator
def join(s: String, v: String): Unit = {
if (v.startsWith(s)) {
if (v.endsWith(s)) {
programArgs += v.replaceAll(s"^$s|$s$$", "")
} else {
var value = v
while (!value.endsWith(s) && iter.hasNext) {
value += s" ${iter.next()}"
}
programArgs += value.replaceAll(s"^$s|$s$$", "")
}
}
}

while (iter.hasNext) {
val v = iter.next()
if (v.startsWith("'")) {
if (v.endsWith("'")) {
programArgs += v.replaceAll("^'|'$", "")
} else {
join("'", v)
}
} else if (v.startsWith("\"")) {
if (v.endsWith("\"")) {
programArgs += v.replaceAll("^\"|\"$", "")
} else {
join("\"", v)
}
} else {
programArgs += v
}
}
}
programArgs.toList
}

@Nonnull def extractDynamicPropertiesAsJava(properties: String): JavaMap[String, String] =
new JavaMap[String, String](extractDynamicProperties(properties).asJava)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,75 +16,19 @@
*/
package org.apache.streampark.common.util

import org.apache.commons.lang3.StringUtils
import org.junit.jupiter.api.{Assertions, Test}

import scala.collection.mutable.ArrayBuffer
import scala.language.postfixOps

class PropertiesUtilsTestCase {

@Test def testExtractProgramArgs(): Unit = {
val argsStr = "--host localhost:8123\n\n\n" +
"--sql \"\"\"insert into table_a select * from table_b\"\"\"\n" +
"--c d\r\n" +
"--x yyy"
val args =
"mysql-sync-database \n--database employees \n--mysql-conf hostname=127.0.0.1 \n--mysql-conf port=3306 \n--mysql-conf username=root \n--mysql-conf password=123456 \n--mysql-conf database-name=employees \n--including-tables 'test|test.*' \n--sink-conf fenodes=127.0.0.1:8030 \n--sink-conf username=root \n--sink-conf password= \n--sink-conf jdbc-url=jdbc:mysql://127.0.0.1:9030 \n--sink-conf sink.label-prefix=label\n--table-conf replication_num=1 "
val programArgs = new ArrayBuffer[String]()
if (StringUtils.isNotEmpty(argsStr)) {
val multiLineChar = "\"\"\""
val array = argsStr.split("\\s+")
if (array.filter(_.startsWith(multiLineChar)).isEmpty) {
array.foreach(programArgs +=)
} else {
val argsArray = new ArrayBuffer[String]()
val tempBuffer = new ArrayBuffer[String]()

def processElement(index: Int, multiLine: Boolean): Unit = {

if (index == array.length) {
if (tempBuffer.nonEmpty) {
argsArray += tempBuffer.mkString(" ")
}
return
}

val next = index + 1
val elem = array(index)

if (elem.trim.nonEmpty) {
if (!multiLine) {
if (elem.startsWith(multiLineChar)) {
tempBuffer += elem.drop(3)
processElement(next, true)
} else {
argsArray += elem
processElement(next, false)
}
} else {
if (elem.endsWith(multiLineChar)) {
tempBuffer += elem.dropRight(3)
argsArray += tempBuffer.mkString(" ")
tempBuffer.clear()
processElement(next, false)
} else {
tempBuffer += elem
processElement(next, multiLine)
}
}
} else {
tempBuffer += elem
processElement(next, false)
}
}

processElement(0, false)
argsArray.foreach(x => programArgs += x.trim)
}
}

Assertions.assertEquals("localhost:8123", programArgs(1))
Assertions.assertEquals("insert into table_a select * from table_b", programArgs(3))
Assertions.assertEquals("d", programArgs(5))
Assertions.assertEquals("yyy", programArgs(7))
programArgs ++= PropertiesUtils.extractArguments(args)
println(programArgs)
}

@Test def testDynamicProperties(): Unit = {
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

use streampark;

set names utf8mb4;
set foreign_key_checks = 0;

alter table `t_flink_app`
add column `dependency` text collate utf8mb4_general_ci default null after `main_class`;

set foreign_key_checks = 1;
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

alter table "public"."t_flink_app"
add column "dependency" text collate "pg_catalog"."default";
Original file line number Diff line number Diff line change
Expand Up @@ -160,6 +160,9 @@ public class Application implements Serializable {

private String mainClass;

@TableField(updateStrategy = FieldStrategy.IGNORED)
private String dependency;

private Date startTime;

@TableField(updateStrategy = FieldStrategy.IGNORED)
Expand Down Expand Up @@ -220,7 +223,6 @@ public class Application implements Serializable {
/** running job */
private transient JobsOverview.Task overview;

private transient String dependency;
private transient Long sqlId;
private transient String flinkSql;

Expand Down Expand Up @@ -390,15 +392,6 @@ public boolean cpFailedTrigger() {
&& this.cpFailureAction != null;
}

public boolean eqFlinkJob(Application other) {
if (this.isFlinkSqlJob() && other.isFlinkSqlJob()) {
if (this.getFlinkSql().trim().equals(other.getFlinkSql().trim())) {
return this.getDependencyObject().eq(other.getDependencyObject());
}
}
return false;
}

/** Local compilation and packaging working directory */
@JsonIgnore
public String getDistHome() {
Expand Down Expand Up @@ -752,11 +745,11 @@ public int hashCode() {

@Override
public String toString() {
return groupId + ":" + artifactId + ":" + version + getClassifier(":");
return groupId + ":" + artifactId + ":" + version + getClassifier();
}

private String getClassifier(String joiner) {
return StringUtils.isEmpty(classifier) ? "" : joiner + classifier;
private String getClassifier() {
return StringUtils.isEmpty(classifier) ? "" : ":" + classifier;
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -184,32 +184,24 @@ public void onStart(PipeSnapshot snapshot) {

if (app.isCustomCodeJob()) {
// customCode upload jar to appHome...
String appHome = app.getAppHome();
FsOperator fsOperator = app.getFsOperator();
fsOperator.delete(appHome);
if (app.isUploadJob()) {
if (app.isCICDJob()) {
String appHome = app.getAppHome();
fsOperator.mkCleanDirs(appHome);
fsOperator.upload(app.getDistHome(), appHome);
} else {
File localJar = new File(WebUtils.getAppTempDir(), app.getJar());
// upload jar copy to appHome
String uploadJar = appUploads.concat("/").concat(app.getJar());
checkOrElseUploadJar(app.getFsOperator(), localJar, uploadJar, appUploads);
switch (app.getApplicationType()) {
case STREAMPARK_FLINK:
fsOperator.mkdirs(app.getAppLib());
fsOperator.copy(uploadJar, app.getAppLib(), false, true);
break;
case APACHE_FLINK:
fsOperator.mkdirs(appHome);
fsOperator.copy(uploadJar, appHome, false, true);
break;
default:
throw new IllegalArgumentException(
"[StreamPark] unsupported ApplicationType of custom code: "
+ app.getApplicationType());
if (app.getApplicationType() == ApplicationType.STREAMPARK_FLINK) {
fsOperator.mkdirs(app.getAppLib());
fsOperator.copy(uploadJar, app.getAppLib(), false, true);
}
} else {
fsOperator.upload(app.getDistHome(), appHome);
}
} else {
}

if (app.isFlinkSqlJob() || app.isUploadJob()) {
if (!app.getDependencyObject().getJar().isEmpty()) {
String localUploads = Workspace.local().APP_UPLOADS();
// copy jar to local upload dir
Expand Down Expand Up @@ -335,7 +327,8 @@ private BuildPipeline createPipelineInstance(@Nonnull Application app) {
FlinkEnv flinkEnv = flinkEnvService.getByIdOrDefault(app.getVersionId());
String flinkUserJar = retrieveFlinkUserJar(flinkEnv, app);
ExecutionMode executionMode = app.getExecutionModeEnum();
String mainClass = ConfigConst.STREAMPARK_FLINKSQL_CLIENT_CLASS();
String mainClass =
app.isCustomCodeJob() ? app.getMainClass() : ConfigConst.STREAMPARK_FLINKSQL_CLIENT_CLASS();
switch (executionMode) {
case YARN_APPLICATION:
String yarnProvidedPath = app.getAppLib();
Expand Down Expand Up @@ -364,7 +357,6 @@ private BuildPipeline createPipelineInstance(@Nonnull Application app) {
app.getLocalAppHome(),
mainClass,
flinkUserJar,
app.isCustomCodeJob(),
app.getExecutionModeEnum(),
app.getDevelopmentMode(),
flinkEnv.getFlinkVersion(),
Expand Down Expand Up @@ -424,7 +416,7 @@ private String retrieveFlinkUserJar(FlinkEnv flinkEnv, Application app) {
case STREAMPARK_FLINK:
return String.format("%s/%s", app.getAppLib(), app.getModule().concat(".jar"));
case APACHE_FLINK:
return String.format("%s/%s", app.getAppHome(), app.getJar());
return String.format("%s/%s", WebUtils.getAppTempDir(), app.getJar());
default:
throw new IllegalArgumentException(
"[StreamPark] unsupported ApplicationType of custom code: "
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -703,6 +703,17 @@ else if (ExecutionMode.isKubernetesMode(appParam.getExecutionMode())
public boolean create(Application appParam) {
ApiAlertException.throwIfNull(
appParam.getTeamId(), "The teamId can't be null. Create application failed.");

if (appParam.isFlinkSqlJob()) {
appParam.setBuild(true);
} else {
if (appParam.isUploadJob()) {
appParam.setBuild(!appParam.getDependencyObject().isEmpty());
} else {
appParam.setBuild(false);
}
}

appParam.setUserId(commonService.getUserId());
appParam.setState(FlinkAppState.ADDED.getValue());
appParam.setRelease(ReleaseState.NEED_RELEASE.get());
Expand Down Expand Up @@ -736,6 +747,17 @@ public boolean create(Application appParam) {
}
}

@Override
public boolean save(Application entity) {
String dependency = entity.getDependency();
if (entity.isFlinkSqlJob()) {
entity.setDependency(null);
}
boolean flag = super.save(entity);
entity.setDependency(dependency);
return flag;
}

private boolean existsByJobName(String jobName) {
return this.baseMapper.existsByJobName(jobName);
}
Expand Down Expand Up @@ -836,8 +858,17 @@ public boolean update(Application appParam) {
String.format(ERROR_APP_QUEUE_HINT, appParam.getYarnQueue(), appParam.getTeamId()));

application.setRelease(ReleaseState.NEED_RELEASE.get());

if (application.isUploadJob()) {
if (!ObjectUtils.safeEquals(application.getJar(), appParam.getJar())) {
Application.Dependency thisDependency =
Application.Dependency.toDependency(appParam.getDependency());
Application.Dependency targetDependency =
Application.Dependency.toDependency(application.getDependency());

if (!thisDependency.eq(targetDependency)) {
application.setDependency(appParam.getDependency());
application.setBuild(true);
} else if (!ObjectUtils.safeEquals(application.getJar(), appParam.getJar())) {
application.setBuild(true);
} else {
File jarFile = new File(WebUtils.getAppTempDir(), appParam.getJar());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ insert into `t_team` values (100001, 'test', 'The test team', now(), now());
-- ----------------------------
-- Records of t_flink_app
-- ----------------------------
insert into `t_flink_app` values (100000, 100000, 2, 4, null, null, 'Flink SQL Demo', null, null, null, null, null, null , null, 100000, null, 1, null, null, null, null, null, null, null, '0', 0, null, null, null, null, null, null, 'Flink SQL Demo', 0, null, null, null, null, null, null, null, 0, 0, now(), now(), null, 1, 1, null, null, null, null, null, null, 0, null, null, null, 'streampark,test');
insert into `t_flink_app` values (100000, 100000, 2, 4, null, null, 'Flink SQL Demo', null, null, null, null, null, null, null , null, 100000, null, 1, null, null, null, null, null, null, null, '0', 0, null, null, null, null, null, null, 'Flink SQL Demo', 0, null, null, null, null, null, null, null, 0, 0, now(), now(), null, 1, 1, null, null, null, null, null, null, 0, null, null, null, 'streampark,test');

-- ----------------------------
-- Records of t_flink_effective
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@ create table if not exists `t_flink_app` (
`jar` varchar(255) default null,
`jar_check_sum` bigint default null,
`main_class` varchar(255) default null,
`dependency` text ,
`args` text,
`options` text,
`hot_params` text ,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@
<result column="tracking" jdbcType="INTEGER" property="tracking"/>
<result column="jar" jdbcType="VARCHAR" property="jar"/>
<result column="jar_check_sum" jdbcType="VARCHAR" property="jarCheckSum"/>
<result column="dependency" jdbcType="LONGVARCHAR" property="dependency"/>
<result column="main_class" jdbcType="VARCHAR" property="mainClass"/>
<result column="job_id" jdbcType="VARCHAR" property="jobId"/>
<result column="job_manager_url" jdbcType="VARCHAR" property="jobManagerUrl"/>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -79,9 +79,7 @@
height: 18px;
background-color: #fff;
border-radius: 50%;
transition:
transform 0.5s,
background-color 0.5s;
transition: transform 0.5s, background-color 0.5s;
will-change: transform;
}

Expand Down
Loading
Loading