Skip to content

Commit

Permalink
[opt](regression-test) move some cases to p2 to accelerate the testin…
Browse files Browse the repository at this point in the history
…g speed of p0 (#37590) (#44389)

pick #37590

move some cases to p2 to accelerate the testing speed of p0
  • Loading branch information
sollhui authored Nov 21, 2024
1 parent 88c258c commit 75f8323
Show file tree
Hide file tree
Showing 37 changed files with 5,225 additions and 1,972 deletions.
1,944 changes: 71 additions & 1,873 deletions regression-test/data/load_p0/routine_load/test_routine_load.out

Large diffs are not rendered by default.

2,125 changes: 2,125 additions & 0 deletions regression-test/data/load_p2/routine_load/test_routine_load.out

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
Expand Up @@ -28,12 +28,6 @@ suite("test_routine_load","p0") {

def tables = [
"dup_tbl_basic",
"uniq_tbl_basic",
"mow_tbl_basic",
"agg_tbl_basic",
"dup_tbl_array",
"uniq_tbl_array",
"mow_tbl_array",
]

def multiTables = [
Expand Down Expand Up @@ -1319,99 +1313,6 @@ suite("test_routine_load","p0") {
}
}

// disable_simdjson_reader and load json
i = 0
if (enabled != null && enabled.equalsIgnoreCase("true")) {
def backendId_to_backendIP = [:]
def backendId_to_backendHttpPort = [:]
getBackendIpHttpPort(backendId_to_backendIP, backendId_to_backendHttpPort);

def set_be_param = { paramName, paramValue ->
// for eache be node, set paramName=paramValue
for (String id in backendId_to_backendIP.keySet()) {
def beIp = backendId_to_backendIP.get(id)
def bePort = backendId_to_backendHttpPort.get(id)
def (code, out, err) = curl("POST", String.format("http://%s:%s/api/update_config?%s=%s", beIp, bePort, paramName, paramValue))
assertTrue(out.contains("OK"))
}
}

try {
set_be_param.call("enable_simdjson_reader", "false")

for (String tableName in tables) {
sql new File("""${context.file.parent}/ddl/${tableName}_drop.sql""").text
sql new File("""${context.file.parent}/ddl/${tableName}_create.sql""").text

def name = "routine_load_" + tableName
sql """
CREATE ROUTINE LOAD ${jobs[i]} ON ${name}
COLUMNS(${columns[i]})
PROPERTIES
(
"format" = "json",
"jsonpaths" = '${jsonpaths[i]}',
"max_batch_interval" = "5",
"max_batch_rows" = "300000",
"max_batch_size" = "209715200"
)
FROM KAFKA
(
"kafka_broker_list" = "${externalEnvIp}:${kafka_port}",
"kafka_topic" = "${jsonTopic[i]}",
"property.kafka_default_offsets" = "OFFSET_BEGINNING"
);
"""
sql "sync"
i++
}

i = 0
for (String tableName in tables) {
while (true) {
sleep(1000)
def res = sql "show routine load for ${jobs[i]}"
def state = res[0][8].toString()
if (state == "NEED_SCHEDULE") {
continue;
}
log.info("reason of state changed: ${res[0][17].toString()}".toString())
assertEquals(res[0][8].toString(), "RUNNING")
break;
}

def count = 0
def tableName1 = "routine_load_" + tableName
while (true) {
def res = sql "select count(*) from ${tableName1}"
def state = sql "show routine load for ${jobs[i]}"
log.info("routine load state: ${state[0][8].toString()}".toString())
log.info("routine load statistic: ${state[0][14].toString()}".toString())
log.info("reason of state changed: ${state[0][17].toString()}".toString())
if (res[0][0] > 0) {
break
}
if (count >= 120) {
log.error("routine load can not visible for long time")
assertEquals(20, res[0][0])
break
}
sleep(5000)
count++
}

sql "stop routine load for ${jobs[i]}"
i++
}
} finally {
set_be_param.call("enable_simdjson_reader", "true")
for (String tableName in tables) {
sql new File("""${context.file.parent}/ddl/${tableName}_drop.sql""").text
}
}
}


// TODO: need update kafka script
// i = 0
// if (enabled != null && enabled.equalsIgnoreCase("true")) {
Expand Down

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
[{"k00": "2", "k01": "[0, 0, 0, 0, 0, 0]", "k02": "[117, 117, 117, 117, 117, 117]", "k03": "[-4744, -4744, -4744, -4744, -4744, -4744]", "k04": "[-1593211961, -1593211961, -1593211961, -1593211961, -1593211961, -1593211961]", "k05": "[-3869640069299678780, -3869640069299678780, -3869640069299678780, -3869640069299678780, -3869640069299678780, -3869640069299678780]", "k06": "[8491817458398170567, 8491817458398170567, 8491817458398170567, 8491817458398170567, 8491817458398170567, 8491817458398170567]", "k07": "[-30948.857, -30948.857, -30948.857, -30948.857, -30948.857]", "k08": "[804341131.229905, 804341131.229905, 804341131.229905, 804341131.229905, 804341131.229905, 804341131.229905]", "k09": "[-74019648, -74019648, -74019648, -74019648, -74019648, -74019648]", "k10": "[13024168, 13024168, 13024168, 13024168, 13024168, 13024168]", "k11": "[2023-08-22, 2023-08-22, 2023-08-22, 2023-08-22, 2023-08-22, 2023-08-22]", "k12": "[2022-09-30 07:47:12, 2022-09-30 07:47:12, 2022-09-30 07:47:12, 2022-09-30 07:47:12, 2022-09-30 07:47:12, 2022-09-30 07:47:12]", "k13": "[2023-04-21, 2023-04-21, 2023-04-21, 2023-04-21, 2023-04-21, 2023-04-21]", "k14": "[2022-11-24 15:07:56, 2022-11-24 15:07:56, 2022-11-24 15:07:56, 2022-11-24 15:07:56, 2022-11-24 15:07:56, 2022-11-24 15:07:56]", "k15": "['g', 'g', 'g', 'g', 'g', 'g']", "k16": "['a', 'a', 'a', 'a', 'a', 'a']", "k17": "['S9JEYFrLN4zr1vX1yPUE6ovSX431nJdCuttpBUOVMrp844vBfHStO7laHNc5sI9MehAi8GbGDGV3t322DPMy7SBlquU5D7jsGISMNpX4IWbn3Yrsl', 'S9JEYFrLN4zr1vX1yPUE6ovSX431nJdCuttpBUOVMrp844vBfHStO7laHNc5sI9MehAi8GbGDGV3t322DPMy7SBlquU5D7jsGISMNpX4IWbn3Yrsl', 'S9JEYFrLN4zr1vX1yPUE6ovSX431nJdCuttpBUOVMrp844vBfHStO7laHNc5sI9MehAi8GbGDGV3t322DPMy7SBlquU5D7jsGISMNpX4IWbn3Yrsl', 'S9JEYFrLN4zr1vX1yPUE6ovSX431nJdCuttpBUOVMrp844vBfHStO7laHNc5sI9MehAi8GbGDGV3t322DPMy7SBlquU5D7jsGISMNpX4IWbn3Yrsl', 'S9JEYFrLN4zr1vX1yPUE6ovSX431nJdCuttpBUOVMrp844vBfHStO7laHNc5sI9MehAi8GbGDGV3t322DPMy7SBlquU5D7jsGISMNpX4IWbn3Yrsl', 'S9JEYFrLN4zr1vX1yPUE6ovSX431nJdCuttpBUOVMrp844vBfHStO7laHNc5sI9MehAi8GbGDGV3t322DPMy7SBlquU5D7jsGISMNpX4IWbn3Yrsl']"}]
Loading

0 comments on commit 75f8323

Please sign in to comment.