Skip to content

Commit

Permalink
[Improve] Sync code from 2.1.3 (#3608)
Browse files Browse the repository at this point in the history
* [Improve] 2.1.3 ddl

* [Improve] docker|email check improvements

* [Improve] new icons

* [Improve] new icons

* [Improve] alert-message minor improvement

* [Improve] scrollbar style improvements

* [Improve] method name improvements

* [Improve] job state style improvements

* [Improve] job state improvements

* [Improve] component import improvements

* [Improve] check-style issue improvements

* [Improve] delete SECURITY.md

---------

Co-authored-by: benjobs <[email protected]>
  • Loading branch information
wolfboys and benjobs authored Mar 14, 2024
1 parent 738501a commit d9827e1
Show file tree
Hide file tree
Showing 43 changed files with 892 additions and 160 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,9 @@ object InternalConfigHolder extends Logger {
/** configuration key options storage (key -> ConfigOption) */
private val confOptions = new ConcurrentHashMap[String, InternalOption](initialCapacity)

/** Initialize the ConfigHub. */
Seq(CommonConfig, K8sFlinkConfig)

/** Register the ConfigOption */
private[conf] def register(@Nonnull conf: InternalOption): Unit = {
confOptions.put(conf.key, conf)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -284,7 +284,8 @@ insert into `t_setting` values (6, 'alert.email.port', null, 'Alert Email Smtp P
insert into `t_setting` values (7, 'alert.email.from', null, 'Alert Sender Email', 'Email to send alerts', 1);
insert into `t_setting` values (8, 'alert.email.userName', null, 'Alert Email User', 'Authentication username used to send alert emails', 1);
insert into `t_setting` values (9, 'alert.email.password', null, 'Alert Email Password', 'Authentication password used to send alarm email', 1);
insert into `t_setting` values (10, 'alert.email.ssl', 'false', 'Alert Email SSL', 'Whether to enable SSL in the mailbox that sends the alert', 2);insert into `t_setting` values (11, 'docker.register.address', null, 'Docker Register Address', 'Docker container service address', 1);
insert into `t_setting` values (10, 'alert.email.ssl', 'false', 'Alert Email SSL', 'Whether to enable SSL in the mailbox that sends the alert', 2);
insert into `t_setting` values (11, 'docker.register.address', null, 'Docker Register Address', 'Docker container service address', 1);
insert into `t_setting` values (12, 'docker.register.user', null, 'Docker Register User', 'Docker container service authentication username', 1);
insert into `t_setting` values (13, 'docker.register.password', null, 'Docker Register Password', 'Docker container service authentication password', 1);
insert into `t_setting` values (14, 'docker.register.namespace', null, 'Docker namespace', 'Namespace for docker image used in docker building env and target image register', 1);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -255,6 +255,7 @@ insert into "public"."t_setting" values (12, 'docker.register.user', null, 'Dock
insert into "public"."t_setting" values (13, 'docker.register.password', null, 'Docker Register Password', 'Docker container service authentication password', 1);
insert into "public"."t_setting" values (14, 'docker.register.namespace', null, 'Docker namespace', 'Namespace for docker image used in docker building env and target image register', 1);
insert into "public"."t_setting" values (15, 'ingress.mode.default', null, 'Ingress domain address', 'Automatically generate an nginx-based ingress by passing in a domain name', 1);

-- ----------------------------
-- Records of t_user
-- ----------------------------
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

use streampark;

set names utf8mb4;
set foreign_key_checks = 0;

alter table `t_flink_app`
add column `dependency` text collate utf8mb4_general_ci default null after `main_class`,
modify column `modify_time` datetime not null comment 'modify time';

set foreign_key_checks = 1;
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

use streampark;

set names utf8mb4;
set foreign_key_checks = 0;

update `t_menu` set menu_name='Apache Flink',order_num=1 where menu_id = 120000;
update `t_menu` set order_num=3 where menu_id = 110000;
update `t_menu` set order_num=2 where menu_id = 130000;
delete from `t_menu` where menu_id=110300;

alter table `t_flink_app`
modify column `args` longtext,
modify column `dynamic_properties` longtext,
modify column `k8s_pod_template` longtext,
modify column `k8s_jm_pod_template` longtext,
modify column `k8s_tm_pod_template` longtext,
modify column `options` longtext comment 'json form of parameter collection ',
modify column `modify_time` datetime default null comment 'modify time';

alter table `t_flink_cluster`
modify column `options` longtext comment 'json form of parameter collection ',
modify column `dynamic_properties` longtext comment 'allows specifying multiple generic configuration options',
modify column `exception` longtext comment 'exception information';

alter table `t_message` modify column `context` longtext;

alter table `t_flink_project` drop column `git_credential`;

set foreign_key_checks = 1;
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

alter table "public"."t_flink_app"
add column "dependency" text collate "pg_catalog"."default";
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

alter table "public"."t_flink_project"
drop column "git_credential";

update "public"."t_menu" set menu_name='Apache Flink',order_num=1 where menu_id = 120000;
update "public"."t_menu" set order_num=3 where menu_id = 110000;
update "public"."t_menu" set order_num=2 where menu_id = 130000;
delete from "public"."t_menu" where menu_id=110300;
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@
@Slf4j
@Validated
@RestController
@RequestMapping("flink/setting")
@RequestMapping("setting")
public class SettingController {

@Autowired private SettingService settingService;
Expand Down Expand Up @@ -122,7 +122,7 @@ public RestResponse updateEmail(SenderEmail senderEmail) {
}

@Operation(summary = "Check hadoop status")
@PostMapping("checkHadoop")
@PostMapping("check/hadoop")
public RestResponse checkHadoop() {
try {
HadoopUtils.hdfs().getStatus();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -77,6 +77,9 @@ public class EnvInitializer implements ApplicationRunner {
@SneakyThrows
@Override
public void run(ApplicationArguments args) throws Exception {

checkAppHome();

// init InternalConfig
initConfig();

Expand All @@ -101,6 +104,7 @@ private void initConfig() {
Utils.requireNotNull(config);
InternalConfigHolder.set(config, env.getProperty(key, config.classType()));
});

InternalConfigHolder.log();

settingService.getMavenConfig().updateConfig();
Expand All @@ -118,8 +122,6 @@ private void overrideSystemProp(String key, String defaultValue) {

public synchronized void storageInitialize(StorageType storageType) {

checkAppHome();

if (initialized.contains(storageType)) {
return;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -146,7 +146,7 @@ public String getSavePointPath(Application appParam) throws Exception {
// task, see if Application conf is configured when the task is defined, if checkpoints are
// configured
// and enabled, read `state.savepoints.dir`
savepointPath = getSavepointFromAppCfgIfStreamParkOrSQLJob(application);
savepointPath = getSavepointFromConfig(application);
if (StringUtils.isNotBlank(savepointPath)) {
return savepointPath;
}
Expand Down Expand Up @@ -340,7 +340,7 @@ public String getSavepointFromDynamicProps(String dynamicProps) {
*/
@VisibleForTesting
@Nullable
public String getSavepointFromAppCfgIfStreamParkOrSQLJob(Application application) {
public String getSavepointFromConfig(Application application) {
if (!application.isStreamParkJob() && !application.isFlinkSqlJob()) {
return null;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -95,17 +95,17 @@ void testGetSavepointFromAppCfgIfStreamParkOrSQLJob() {

// Test for non-(StreamPark job Or FlinkSQL job)
app.setAppType(ApplicationType.APACHE_FLINK.getType());
assertThat(savePointServiceImpl.getSavepointFromAppCfgIfStreamParkOrSQLJob(app)).isNull();
assertThat(savePointServiceImpl.getSavepointFromConfig(app)).isNull();
app.setAppType(ApplicationType.STREAMPARK_FLINK.getType());
app.setJobType(FlinkDevelopmentMode.CUSTOM_CODE.getMode());
assertThat(savePointServiceImpl.getSavepointFromAppCfgIfStreamParkOrSQLJob(app)).isNull();
assertThat(savePointServiceImpl.getSavepointFromConfig(app)).isNull();

// Test for (StreamPark job Or FlinkSQL job) without application config.
app.setAppType(ApplicationType.STREAMPARK_FLINK.getType());
assertThat(savePointServiceImpl.getSavepointFromAppCfgIfStreamParkOrSQLJob(app)).isNull();
assertThat(savePointServiceImpl.getSavepointFromConfig(app)).isNull();
app.setAppType(ApplicationType.STREAMPARK_FLINK.getType());
app.setJobType(FlinkDevelopmentMode.CUSTOM_CODE.getMode());
assertThat(savePointServiceImpl.getSavepointFromAppCfgIfStreamParkOrSQLJob(app)).isNull();
assertThat(savePointServiceImpl.getSavepointFromConfig(app)).isNull();

// Test for (StreamPark job Or FlinkSQL job) with application config just disabled checkpoint.
ApplicationConfig appCfg = new ApplicationConfig();
Expand All @@ -114,15 +114,15 @@ void testGetSavepointFromAppCfgIfStreamParkOrSQLJob() {
appCfg.setContent("state.savepoints.dir=hdfs:///test");
appCfg.setFormat(ConfigFileTypeEnum.PROPERTIES.getValue());
configService.save(appCfg);
assertThat(savePointServiceImpl.getSavepointFromAppCfgIfStreamParkOrSQLJob(app)).isNull();
assertThat(savePointServiceImpl.getSavepointFromConfig(app)).isNull();

// Test for (StreamPark job or FlinkSQL job) with application config and enabled checkpoint and
// configured value.

// Test for non-value for CHECKPOINTING_INTERVAL
appCfg.setContent("");
configService.updateById(appCfg);
assertThat(savePointServiceImpl.getSavepointFromAppCfgIfStreamParkOrSQLJob(app)).isNull();
assertThat(savePointServiceImpl.getSavepointFromConfig(app)).isNull();

// Test for configured CHECKPOINTING_INTERVAL
appCfg.setContent(
Expand All @@ -135,8 +135,7 @@ void testGetSavepointFromAppCfgIfStreamParkOrSQLJob() {
effective.setAppId(appId);
effective.setTargetType(EffectiveTypeEnum.CONFIG.getType());
effectiveService.save(effective);
assertThat(savePointServiceImpl.getSavepointFromAppCfgIfStreamParkOrSQLJob(app))
.isEqualTo("hdfs:///test");
assertThat(savePointServiceImpl.getSavepointFromConfig(app)).isEqualTo("hdfs:///test");
}

@Test
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,11 +18,16 @@ import { SystemSetting } from './types/setting.type';
import { defHttp } from '/@/utils/http/axios';

enum SETTING_APi {
GET = '/flink/setting/get',
ALL = '/flink/setting/all',
CHECK_HADOOP = '/flink/setting/checkHadoop',
SYNC = '/flink/setting/sync',
UPDATE = '/flink/setting/update',
GET = '/setting/get',
GET_DOCKER = '/setting/docker',
GET_EMAIL = '/setting/email',
ALL = '/setting/all',
UPDATE = '/setting/update',
CHECK_HADOOP = '/setting/check/hadoop',
CHECK_DOCKER = '/setting/check/docker',
UPDATE_DOCKER = '/setting/update/docker',
CHECK_EMAIL = '/setting/check/email',
UPDATE_ALERT = '/setting/update/email',
}
/**
* Get system settings
Expand Down Expand Up @@ -58,3 +63,59 @@ export function fetchCheckHadoop(): Promise<boolean> {
url: SETTING_APi.CHECK_HADOOP,
});
}

/**
* get docker setting info
*/
export function fetchDockerConfig() {
return defHttp.post({ url: SETTING_APi.GET_DOCKER });
}

/**
* verify docker setting info
*/
export function fetchVerifyDocker(data: Recordable): Promise<boolean> {
return defHttp.post({
url: SETTING_APi.CHECK_DOCKER,
data,
});
}

/**
* verify docker setting info
*/
export function fetchVerifyEmail(data: Recordable): Promise<boolean> {
return defHttp.post({
url: SETTING_APi.CHECK_EMAIL,
data,
});
}

/**
* get alert setting info
*/
export function fetchEmailConfig() {
return defHttp.post({ url: SETTING_APi.GET_EMAIL });
}

/**
* Update docker setting
* @returns {Promise<Boolean>}
*/
export function fetchDockerUpdate(data: Recordable): Promise<boolean> {
return defHttp.post({
url: SETTING_APi.UPDATE_DOCKER,
data,
});
}

/**
* Update alert setting
* @returns {Promise<Boolean>}
*/
export function fetchEmailUpdate(data: Recordable): Promise<boolean> {
return defHttp.post({
url: SETTING_APi.UPDATE_ALERT,
data,
});
}
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading

0 comments on commit d9827e1

Please sign in to comment.