Skip to content

Commit

Permalink
[regression-test](fix) fix global var (#45917)
Browse files Browse the repository at this point in the history
  • Loading branch information
shuke987 authored Dec 25, 2024
1 parent 303f6bd commit 20c49c3
Show file tree
Hide file tree
Showing 39 changed files with 51 additions and 51 deletions.
2 changes: 1 addition & 1 deletion regression-test/suites/account_p0/test_alter_user.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -170,7 +170,7 @@ suite("test_alter_user", "account,nonConcurrent") {
}
sleep(6000)
sql """set password for 'test_auth_user4' = password('123')"""
result2 = connect('test_auth_user4', '123', context.config.jdbcUrl) {
def result2 = connect('test_auth_user4', '123', context.config.jdbcUrl) {
sql 'select 1'
}
sleep(6000)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,13 +56,13 @@ suite('compaction_width_array_column', "p2") {
}

def table_load_task = { table_name ->
uniqueID = Math.abs(UUID.randomUUID().hashCode()).toString()
loadLabel = table_name + "_" + uniqueID
def uniqueID = Math.abs(UUID.randomUUID().hashCode()).toString()
def loadLabel = table_name + "_" + uniqueID
//loadLabel = table_name + '_load_5'
loadSql = new File("""${context.file.parent}/ddl/${table_name}_load.sql""").text.replaceAll("\\\$\\{s3BucketName\\}", s3BucketName)
def loadSql = new File("""${context.file.parent}/ddl/${table_name}_load.sql""").text.replaceAll("\\\$\\{s3BucketName\\}", s3BucketName)
loadSql = loadSql.replaceAll("\\\$\\{loadLabel\\}", loadLabel)
loadSql = loadSql.replaceAll("\\\$\\{table\\_name\\}", table_name)
nowloadSql = loadSql + s3WithProperties
def nowloadSql = loadSql + s3WithProperties
try_sql nowloadSql

while (true) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -96,10 +96,10 @@ suite("test_base_compaction_with_dup_key_max_file_size_limit", "p2") {

String command = sb.toString()
logger.info(command)
process = command.execute()
code = process.waitFor()
err = IOGroovyMethods.getText(new BufferedReader(new InputStreamReader(process.getErrorStream())));
out = process.getText()
def process = command.execute()
def code = process.waitFor()
def err = IOGroovyMethods.getText(new BufferedReader(new InputStreamReader(process.getErrorStream())));
def out = process.getText()
logger.info("Run compaction: code=" + code + ", out=" + out + ", disableAutoCompaction " + disableAutoCompaction + ", err=" + err)
if (!disableAutoCompaction) {
return "Success, " + out
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -86,9 +86,9 @@ suite("test_single_compaction_fault_injection", "p2, nonConcurrent") {

String command = sb.toString()
logger.info(command)
process = command.execute()
code = process.waitFor()
out = process.getText()
def process = command.execute()
def code = process.waitFor()
def out = process.getText()
logger.info("Get compaction status: code=" + code + ", out=" + out)
assertEquals(code, 0)
def compactionStatus = parseJson(out.trim())
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -93,9 +93,9 @@ suite("test_single_compaction_p2", "p2") {

String command = sb.toString()
logger.info(command)
process = command.execute()
code = process.waitFor()
out = process.getText()
def process = command.execute()
def code = process.waitFor()
def out = process.getText()
logger.info("Get compaction status: code=" + code + ", out=" + out)
assertEquals(code, 0)
def compactionStatus = parseJson(out.trim())
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -79,9 +79,9 @@ suite("test_partial_update_publish_conflict_with_error", "nonConcurrent") {
log.info("http_stream execute 2pc: ${command}")

def process = command.execute()
code = process.waitFor()
out = process.text
json2pc = parseJson(out)
def code = process.waitFor()
def out = process.text
def json2pc = parseJson(out)
log.info("http_stream 2pc result: ${out}".toString())
assertEquals(code, 0)
assertEquals("success", json2pc.status.toLowerCase())
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ suite("test_calc_crc", "nonConcurrent") {
sql """ INSERT INTO ${tableName} VALUES (3, "andy", 100); """
sql """ INSERT INTO ${tableName} VALUES (3, "bason", 99); """

tablets = sql_return_maparray """ show tablets from ${tableName}; """
def tablets = sql_return_maparray """ show tablets from ${tableName}; """
String tablet_id = tablets[0].TabletId
String backend_id = tablets[0].BackendId
String ip = backendId_to_backendIP.get(backend_id)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ suite("test_delete_from_timeout","nonConcurrent") {

GetDebugPoint().enableDebugPointForAllBEs("PushHandler::_do_streaming_ingestion.try_lock_fail")

t1 = Thread.start {
def t1 = Thread.start {
sleep(15000)
GetDebugPoint().disableDebugPointForAllBEs("PushHandler::_do_streaming_ingestion.try_lock_fail")
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ suite("test_full_compaction_run_status","nonConcurrent") {
String tablet_id = tablet.TabletId
backend_id = tablet.BackendId

times = 1
def times = 1
do{
(code, out, err) = be_run_full_compaction(backendId_to_backendIP.get(backend_id), backendId_to_backendHttpPort.get(backend_id), tablet_id)
logger.info("Run compaction: code=" + code + ", out=" + out + ", err=" + err)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,7 @@ suite("test_full_compaction_with_ordered_data","nonConcurrent") {
int rowsetCount = 0
for (def tablet in tablets) {
String tablet_id = tablet.TabletId
(code, out, err) = curl("GET", tablet.CompactionStatus)
def (code, out, err) = curl("GET", tablet.CompactionStatus)
logger.info("Show tablets status: code=" + code + ", out=" + out + ", err=" + err)
assertEquals(code, 0)
def tabletJson = parseJson(out.trim())
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ suite("test_segcompaction_correctness", "nonConcurrent,p2") {

result = sql """ show load where label="$uuid" order by createtime desc limit 1; """
qt_select_default """ SELECT * FROM ${tableName} WHERE col_0=47 order by col_1, col_2; """
tablets = sql """ show tablets from ${tableName}; """
def tablets = sql """ show tablets from ${tableName}; """
} finally {
try_sql("DROP TABLE IF EXISTS ${tableName}")
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@ suite("test_too_many_segments", "nonConcurrent,p2") { // the epic -238 case

result = sql """ show load where label="$uuid" order by createtime desc limit 1; """
qt_select_default """ SELECT * FROM ${tableName} WHERE col_0=47 order by col_1, col_2; """
tablets = sql """ show tablets from ${tableName}; """
def tablets = sql """ show tablets from ${tableName}; """
} finally {
try_sql("DROP TABLE IF EXISTS ${tableName}")
GetDebugPoint().disableDebugPointForAllBEs("BetaRowsetWriter._check_segment_number_limit_too_many_segments")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ suite("test_variant_bloom_filter", "nonConcurrent") {
for (def tablet in tablets) {
int beforeSegmentCount = 0
String tablet_id = tablet.TabletId
(code, out, err) = curl("GET", tablet.CompactionStatus)
def (code, out, err) = curl("GET", tablet.CompactionStatus)
logger.info("Show tablets status: code=" + code + ", out=" + out + ", err=" + err)
assertEquals(code, 0)
def tabletJson = parseJson(out.trim())
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ suite("test_dup_table_inverted_index", "p1") {
def execute_sql = { key, value, sqlList ->
sql """ set ${key} = ${value} """
List<Object> resultList = new ArrayList<>()
for (sqlStr in sqlList) {
for (def sqlStr in sqlList) {
def sqlResult = sql """ ${sqlStr} """
resultList.add(sqlResult)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -549,7 +549,7 @@ suite("test_show_data_with_compaction", "p1") {
def backendId_to_backendHttpPort = [:]
getBackendIpHttpPort(backendId_to_backendIP, backendId_to_backendHttpPort);

backend_id = backendId_to_backendIP.keySet()[0]
def backend_id = backendId_to_backendIP.keySet()[0]
def (code, out, err) = show_be_config(backendId_to_backendIP.get(backend_id), backendId_to_backendHttpPort.get(backend_id))

logger.info("Show config: code=" + code + ", out=" + out + ", err=" + err)
Expand Down
2 changes: 1 addition & 1 deletion regression-test/suites/opensky_p2/load.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ suite("load"){

sql """ DROP TABLE IF EXISTS $tableName """

scriptDir = new File(getClass().protectionDomain.codeSource.location.path).parent
def scriptDir = new File(getClass().protectionDomain.codeSource.location.path).parent

sql new File("""${scriptDir}/ddl/${tableName}.sql""").text

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ suite("test_schema_change") {

waitBrokerLoadJob(loadLabel)
sql "sync"
rowCount = sql "select count(*) from ${tableName}"
def rowCount = sql "select count(*) from ${tableName}"
logger.info("rowCount:{}", rowCount)
assertEquals(rowCount[0][0], 15000000)

Expand Down
2 changes: 1 addition & 1 deletion regression-test/suites/ssb_sf0.1_p1/load.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ suite("load") {
sql new File("""${context.file.parent}/ddl/${table}_delete.sql""").text
sql "set insert_timeout=3600"
def r = sql "select @@insert_timeout"
year_cons = [
def year_cons = [
'lo_orderdate<19930101',
'lo_orderdate>=19930101 and lo_orderdate<19940101',
'lo_orderdate>=19940101 and lo_orderdate<19950101',
Expand Down
2 changes: 1 addition & 1 deletion regression-test/suites/ssb_sf100_p2/load.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ suite('load') {
sql "sync"
def r = sql "select @@insert_timeout"
assertEquals(3600, r[0][0])
year_cons = [
def year_cons = [
'lo_orderdate<19930101',
'lo_orderdate>=19930101 and lo_orderdate<19940101',
'lo_orderdate>=19940101 and lo_orderdate<19950101',
Expand Down
2 changes: 1 addition & 1 deletion regression-test/suites/ssb_sf1_p2/load.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ suite("load") {
sql "set insert_timeout=3600"
def r = sql "select @@insert_timeout"
assertEquals(3600, r[0][0])
year_cons = [
def year_cons = [
'lo_orderdate<19930101',
'lo_orderdate>=19930101 and lo_orderdate<19940101',
'lo_orderdate>=19940101 and lo_orderdate<19950101',
Expand Down
2 changes: 1 addition & 1 deletion regression-test/suites/ssb_sf1_p2/nereids/q1.1.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,6 @@ suite("ssb_sf1_q1_1_nereids") {
AND lo_quantity < 25;
"""

resultFile(file = "../sql/q1.1.out", tag = "q1.1")
resultFile("../sql/q1.1.out", "q1.1")
}
}
2 changes: 1 addition & 1 deletion regression-test/suites/ssb_sf1_p2/nereids/q1.2.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,6 @@ suite("ssb_sf1_q1_2_nereids") {
AND lo_discount BETWEEN 4 AND 6
AND lo_quantity BETWEEN 26 AND 35;"""

resultFile(file = "../sql/q1.2.out", tag = "q1.2")
resultFile("../sql/q1.2.out", "q1.2")
}
}
2 changes: 1 addition & 1 deletion regression-test/suites/ssb_sf1_p2/nereids/q1.3.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,6 @@ suite("ssb_sf1_q1_3_nereids") {
AND lo_quantity BETWEEN 26 AND 35;
"""

resultFile(file = "../sql/q1.3.out", tag = "q1.3")
resultFile("../sql/q1.3.out", "q1.3")
}
}
2 changes: 1 addition & 1 deletion regression-test/suites/ssb_sf1_p2/nereids/q2.1.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,6 @@ suite("ssb_sf1_q2_1_nereids") {
ORDER BY d_year, p_brand;
"""

resultFile(file = "../sql/q2.1.out", tag = "q2.1")
resultFile("../sql/q2.1.out", "q2.1")
}
}
2 changes: 1 addition & 1 deletion regression-test/suites/ssb_sf1_p2/nereids/q2.2.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,6 @@ suite("ssb_sf1_q2_2_nereids") {
ORDER BY d_year, p_brand;
"""

resultFile(file = "../sql/q2.2.out", tag = "q2.2")
resultFile("../sql/q2.2.out", "q2.2")
}
}
2 changes: 1 addition & 1 deletion regression-test/suites/ssb_sf1_p2/nereids/q2.3.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,6 @@ suite("ssb_sf1_q2_3_nereids") {
ORDER BY d_year, p_brand;
"""

resultFile(file = "../sql/q2.3.out", tag = "q2.3")
resultFile("../sql/q2.3.out", "q2.3")
}
}
2 changes: 1 addition & 1 deletion regression-test/suites/ssb_sf1_p2/nereids/q3.1.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,6 @@ suite("ssb_sf1_q3_1_nereids") {
GROUP BY c_nation, s_nation, d_year
ORDER BY d_year ASC, REVENUE DESC;"""

resultFile(file = "../sql/q3.1.out", tag = "q3.1")
resultFile("../sql/q3.1.out", "q3.1")
}
}
2 changes: 1 addition & 1 deletion regression-test/suites/ssb_sf1_p2/nereids/q3.2.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,6 @@ suite("ssb_sf1_q3_2_nereids") {
ORDER BY d_year ASC, REVENUE DESC;
"""

resultFile(file = "../sql/q3.2.out", tag = "q3.2")
resultFile("../sql/q3.2.out", "q3.2")
}
}
2 changes: 1 addition & 1 deletion regression-test/suites/ssb_sf1_p2/nereids/q3.3.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,6 @@ suite("ssb_sf1_q3_3_nereids") {
GROUP BY c_city, s_city, d_year
ORDER BY d_year ASC, REVENUE DESC;"""

resultFile(file = "../sql/q3.3.out", tag = "q3.3")
resultFile("../sql/q3.3.out", "q3.3")
}
}
2 changes: 1 addition & 1 deletion regression-test/suites/ssb_sf1_p2/nereids/q3.4.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,6 @@ suite("ssb_sf1_q3_4_nereids") {
GROUP BY c_city, s_city, d_year
ORDER BY d_year ASC, REVENUE DESC;"""

resultFile(file = "../sql/q3.4.out", tag = "q3.4")
resultFile("../sql/q3.4.out", "q3.4")
}
}
2 changes: 1 addition & 1 deletion regression-test/suites/ssb_sf1_p2/nereids/q4.1.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,6 @@ suite("ssb_sf1_q4_1_nereids") {
ORDER BY d_year, c_nation;
"""

resultFile(file = "../sql/q4.1.out", tag = "q4.1")
resultFile("../sql/q4.1.out", "q4.1")
}
}
2 changes: 1 addition & 1 deletion regression-test/suites/ssb_sf1_p2/nereids/q4.2.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,6 @@ suite("ssb_sf1_q4_2_nereids") {
GROUP BY d_year, s_nation, p_category
ORDER BY d_year, s_nation, p_category;"""

resultFile(file = "../sql/q4.2.out", tag = "q4.2")
resultFile("../sql/q4.2.out", "q4.2")
}
}
2 changes: 1 addition & 1 deletion regression-test/suites/ssb_sf1_p2/nereids/q4.3.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,6 @@ suite("ssb_sf1_q4_3_nereids") {
GROUP BY d_year, s_city, p_brand
ORDER BY d_year, s_city, p_brand;"""

resultFile(file = "../sql/q4.3.out", tag = "q4.3")
resultFile("../sql/q4.3.out", "q4.3")
}
}
2 changes: 1 addition & 1 deletion regression-test/suites/tpcds_sf1_unique_p1/query46.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ suite("tpcds_sf1_unique_p1_q46") {
logger.info("${stats}")
stats = sql "show column stats customer_address"
logger.info("${stats}")
ds46 = """
def ds46 = """
SELECT
c_last_name
, c_first_name
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ suite("load_four_step") {
}
sleep(5000)
}
rowCount = sql "select count(*) from ${tableName}"
def rowCount = sql "select count(*) from ${tableName}"
assertEquals(rows[1], rowCount[0][0])
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ suite("load_one_step") {
}
sleep(5000)
}
rowCount = sql "select count(*) from ${tableName}"
def rowCount = sql "select count(*) from ${tableName}"
assertEquals(rows[1], rowCount[0][0])
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ suite("load_three_step") {
}
sleep(5000)
}
rowCount = sql "select count(*) from ${tableName}"
def rowCount = sql "select count(*) from ${tableName}"
assertEquals(rows[1], rowCount[0][0])
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ suite("load_two_step") {
}
sleep(5000)
}
rowCount = sql "select count(*) from ${tableName}"
def rowCount = sql "select count(*) from ${tableName}"
assertEquals(rows[1], rowCount[0][0])

// step 2: delete all data
Expand Down
2 changes: 1 addition & 1 deletion regression-test/suites/variant_log_data_p2/load.groovy
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ suite("regression_test_variant_logdata", "nonConcurrent,p2"){
"""
}
// 12. streamload remote file
table_name = "logdata"
def table_name = "logdata"
create_table.call(table_name, "DUPLICATE", "4")
// sql "set enable_two_phase_read_opt = false;"
// no sparse columns
Expand Down

0 comments on commit 20c49c3

Please sign in to comment.