Skip to content

Commit

Permalink
fix(interactive): add cis for Insight with GOpt (#4200)
Browse files Browse the repository at this point in the history
<!--
Thanks for your contribution! please review
https://github.com/alibaba/GraphScope/blob/main/CONTRIBUTING.md before
opening an issue.
-->

## What do these changes do?

<!-- Please give a short brief about these changes. -->

As titled.
This pr add test suits based on the new GOpt-based compilation, in the
Insight+Groot ci, including:
* Gremlin Tests on ModernGraph
* Cypher Tests on MovieGraph
* Pattern Tests, IC Tests, SimpleTest on LdbcGraph

## Related issue number

<!-- Are there any issues opened that will be resolved by merging this
change? -->

#4163

---------

Co-authored-by: shirly121 <[email protected]>
  • Loading branch information
BingqingLyu and shirly121 authored Sep 24, 2024
1 parent abca708 commit f9325f8
Show file tree
Hide file tree
Showing 11 changed files with 287 additions and 47 deletions.
15 changes: 12 additions & 3 deletions .github/workflows/gss.yml
Original file line number Diff line number Diff line change
Expand Up @@ -84,11 +84,20 @@ jobs:
mvn clean install -Pgroot-data-load --quiet
sccache --show-stats
- name: Gremlin Test
# - name: Gremlin Test
# run: |
# . ${HOME}/.graphscope_env
# cd interactive_engine/groot-server
# # the ir-core based test
# mvn test -P gremlin-test

- name: Groot with GOpt Integration Test
run: |
. ${HOME}/.graphscope_env
cd interactive_engine/groot-server
mvn test -P gremlin-test
git clone -b master --single-branch --depth=1 https://github.com/7br/gstest.git /tmp/gstest
mv /tmp/gstest/flex/ldbc-sf01-long-date /tmp/gstest/ldbc
cp -r flex/interactive/examples/movies /tmp/gstest/
cd interactive_engine/groot-client && ./gopt_groot_test.sh
- name: Upload tools for helm test to Artifact
uses: actions/upload-artifact@v4
Expand Down
7 changes: 4 additions & 3 deletions interactive_engine/compiler/ir_experimental_advanced_ci.sh
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,8 @@ sleep 10
# start compiler service
cd ${base_dir} && make run graph.schema:=../executor/ir/core/resource/ldbc_schema.json pegasus.hosts:=127.0.0.1:1234,127.0.0.1:1235 &
sleep 5
# run pattern tests and ldbc tests
cd ${base_dir} && make pattern_test && make ldbc_test
# run pattern tests
cd ${base_dir} && make pattern_test
exit_code=$?
# clean compiler service
ps -ef | grep "com.alibaba.graphscope.GraphServer" | awk '{print $2}' | xargs kill -9 || true
Expand All @@ -29,12 +29,13 @@ fi

# Test2: run advanced tests (pattern & ldbc & simple match) on experimental store via calcite-based ir
# start service
export ENGINE_TYPE=pegasus
cd ${base_dir}/../executor/ir/target/release &&
RUST_LOG=info DATA_PATH=/tmp/gstest/ldbc_graph_exp_bin PARTITION_ID=0 ./start_rpc_server --config ${base_dir}/../executor/ir/integrated/config/distributed/server_0 &
cd ${base_dir}/../executor/ir/target/release &&
RUST_LOG=info DATA_PATH=/tmp/gstest/ldbc_graph_exp_bin PARTITION_ID=1 ./start_rpc_server --config ${base_dir}/../executor/ir/integrated/config/distributed/server_1 &
sleep 10
cd ${base_dir} && make run graph.schema:=../executor/ir/core/resource/ldbc_schema.json gremlin.script.language.name=antlr_gremlin_calcite graph.physical.opt=proto graph.planner.opt=CBO graph.statistics=src/test/resources/statistics/ldbc1_statistics.json pegasus.hosts:=127.0.0.1:1234,127.0.0.1:1235 graph.planner.rules=NotMatchToAntiJoinRule,FilterIntoJoinRule,FilterMatchRule,ExtendIntersectRule,ExpandGetVFusionRule &
cd ${base_dir} && make run graph.schema:=../executor/ir/core/resource/ldbc_schema.json gremlin.script.language.name=antlr_gremlin_calcite graph.physical.opt=proto graph.planner.opt=CBO graph.statistics=src/test/resources/statistics/ldbc1_statistics.json pegasus.hosts:=127.0.0.1:1234,127.0.0.1:1235 graph.planner.rules=FilterIntoJoinRule,FilterMatchRule,ExtendIntersectRule,ExpandGetVFusionRule &
sleep 5s
cd ${base_dir} && make pattern_test && make ldbc_test && make simple_test
exit_code=$?
Expand Down
2 changes: 1 addition & 1 deletion interactive_engine/compiler/ir_experimental_ci.sh
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ fi

# Test5: run cypher movie tests on experimental store via calcite-based ir
# restart compiler service
cd ${base_dir} && make run graph.schema:=../executor/ir/core/resource/movie_schema.json graph.planner.opt=CBO graph.statistics:=./src/test/resources/statistics/movie_statistics.json graph.physical.opt=proto graph.planner.rules=NotMatchToAntiJoinRule,FilterIntoJoinRule,FilterMatchRule,ExtendIntersectRule,ExpandGetVFusionRule &
cd ${base_dir} && make run graph.schema:=../executor/ir/core/resource/movie_schema.json graph.planner.opt=CBO graph.statistics:=./src/test/resources/statistics/movie_statistics.json graph.physical.opt=proto graph.planner.rules=FilterIntoJoinRule,FilterMatchRule,ExtendIntersectRule,ExpandGetVFusionRule &
sleep 10s
export ENGINE_TYPE=pegasus
cd ${base_dir} && make cypher_test
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -233,4 +233,18 @@ public Double getEdgeTypeCardinality(EdgeTypeId edgeType) {
return cardinality;
}
}

@Override
public String toString() {
String s = "GlogueSchema:\n";
s += "VertexTypes:\n";
for (Integer v : this.schemaGraph.vertexSet()) {
s += v + " " + this.vertexTypeCardinality.get(v) + "\n";
}
s += "\nEdgeTypes:\n";
for (EdgeTypeId e : this.schemaGraph.edgeSet()) {
s += e.toString() + " " + this.edgeTypeCardinality.get(e) + "\n";
}
return s;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -339,6 +339,93 @@ public static QueryContext get_ldbc_7_test() {
}

public static QueryContext get_ldbc_8_test() {
String query =
"MATCH (person:PERSON {id:"
+ " 2199023256816})<-[:HASCREATOR]-(message)<-[:REPLYOF]-(comment:COMMENT)-[:HASCREATOR]->(author:PERSON)\n"
+ "RETURN \n"
+ "\tauthor.id,\n"
+ "\tauthor.firstName,\n"
+ "\tauthor.lastName,\n"
+ "\tcomment.creationDate as commentDate,\n"
+ "\tcomment.id as commentId,\n"
+ "\tcomment.content\n"
+ "ORDER BY\n"
+ "\tcommentDate desc,\n"
+ "\tcommentId asc\n"
+ "LIMIT 20";
List<String> expected =
Arrays.asList(
"Record<{id: 13194139533482, firstName: \"Ana Paula\", lastName: \"Silva\","
+ " commentDate: 1347504375078, commentId: 1099511667820, content:"
+ " \"About Heinz Guderian, aised and organized under his direction"
+ " About Malacca Sul\"}>",
"Record<{id: 8796093022928, firstName: \"Hao\", lastName: \"Zhu\","
+ " commentDate: 1347198063021, commentId: 1099511964827, content:"
+ " \"About Nothing but the Beat, icki Minaj, Usher, Jennifer Hudson,"
+ " Jessie J and Sia Furler\"}>",
"Record<{id: 10995116278796, firstName: \"Kenji\", lastName: \"Sakai\","
+ " commentDate: 1347191906789, commentId: 1099511964825, content:"
+ " \"About Humayun, to expand the Empire further, leaving a suAbout"
+ " Philip K. Dick, r o\"}>",
"Record<{id: 30786325577752, firstName: \"Jie\", lastName: \"Yang\","
+ " commentDate: 1347173707083, commentId: 1099511964826, content:"
+ " \"no\"}>",
"Record<{id: 24189255812755, firstName: \"Paulo\", lastName: \"Santos\","
+ " commentDate: 1347167706094, commentId: 1099511964828, content:"
+ " \"good\"}>",
"Record<{id: 687, firstName: \"Deepak\", lastName: \"Singh\", commentDate:"
+ " 1347101958087, commentId: 1030792351589, content: \"no"
+ " way!\"}>",
"Record<{id: 2199023256586, firstName: \"Alfonso\", lastName: \"Elizalde\","
+ " commentDate: 1347029913508, commentId: 1030792488768, content:"
+ " \"About Humayun, ial legacy for his son, Akbar. His peaceful About"
+ " Busta Rhymes, sta Rhy\"}>",
"Record<{id: 30786325578896, firstName: \"Yang\", lastName: \"Li\","
+ " commentDate: 1347027425148, commentId: 1030792488774, content:"
+ " \"roflol\"}>",
"Record<{id: 21990232555834, firstName: \"John\", lastName: \"Garcia\","
+ " commentDate: 1347025241067, commentId: 1030792488763, content:"
+ " \"no way!\"}>",
"Record<{id: 13194139534578, firstName: \"Kunal\", lastName: \"Sharma\","
+ " commentDate: 1347020657245, commentId: 1030792488765, content:"
+ " \"maybe\"}>",
"Record<{id: 15393162789932, firstName: \"Fali Sam\", lastName: \"Price\","
+ " commentDate: 1347013079051, commentId: 1030792488767, content:"
+ " \"roflol\"}>",
"Record<{id: 30786325579189, firstName: \"Cheh\", lastName: \"Yang\","
+ " commentDate: 1346995568122, commentId: 1030792488759, content:"
+ " \"yes\"}>",
"Record<{id: 555, firstName: \"Chen\", lastName: \"Yang\", commentDate:"
+ " 1346986024535, commentId: 1030792488769, content: \"About Skin and"
+ " Bones, Another Round, reprising the contribution he made to the"
+ " original a\"}>",
"Record<{id: 13194139534382, firstName: \"A.\", lastName: \"Budjana\","
+ " commentDate: 1346985914312, commentId: 1030792488758, content:"
+ " \"duh\"}>",
"Record<{id: 8796093022290, firstName: \"Alexei\", lastName: \"Codreanu\","
+ " commentDate: 1346966601712, commentId: 1030792488760, content:"
+ " \"ok\"}>",
"Record<{id: 21990232555958, firstName: \"Ernest B\", lastName:"
+ " \"Law-Yone\", commentDate: 1346962688132, commentId:"
+ " 1030792488766, content: \"great\"}>",
"Record<{id: 26388279067760, firstName: \"Max\", lastName: \"Bauer\","
+ " commentDate: 1346954071955, commentId: 1030792488761, content:"
+ " \"thx\"}>",
"Record<{id: 10995116278300, firstName: \"Jie\", lastName: \"Li\","
+ " commentDate: 1346953221751, commentId: 1030792488762, content:"
+ " \"maybe\"}>",
"Record<{id: 10995116279093, firstName: \"Diem\", lastName: \"Nguyen\","
+ " commentDate: 1346953186333, commentId: 1030792488764, content:"
+ " \"thanks\"}>",
"Record<{id: 26388279066662, firstName: \"Alfonso\", lastName:"
+ " \"Rodriguez\", commentDate: 1346935258972, commentId:"
+ " 1030792487632, content: \"good\"}>");
return new QueryContext(query, expected);
}

// minor diff with get_ldbc_8_test since in experiment store the date is in a different format
// (e.g., 20120629020000000)
public static QueryContext get_ldbc_8_test_exp() {
String query =
"MATCH (person:PERSON {id:"
+ " 2199023256816})<-[:HASCREATOR]-(message)<-[:REPLYOF]-(comment:COMMENT)-[:HASCREATOR]->(author:PERSON)\n"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -133,22 +133,22 @@ public static QueryContext get_simple_match_query_9_test() {

public static QueryContext get_simple_match_query_10_test() {
String query =
"MATCH( a {id:933})-[b]-(c {id: 2199023256077}) return labels(a) AS"
"MATCH( a {id:933l})-[b]-(c {id: 2199023256077l}) return labels(a) AS"
+ " vertexLabelName, type(b) AS edgeLabelName;";
List<String> expected =
Arrays.asList("Record<{vertexLabelName: \"PERSON\", edgeLabelName: \"KNOWS\"}>");
return new QueryContext(query, expected);
}

public static QueryContext get_simple_match_query_11_test() {
String query = "Match( p: PLACE) return p ORDER BY p.id LIMIT 5;";
String query = "Match( p: PLACE) return p.id as pid ORDER BY pid LIMIT 5;";
List<String> expected =
Arrays.asList(
"Record<{p: node<0>}>",
"Record<{p: node<1>}>",
"Record<{p: node<2>}>",
"Record<{p: node<3>}>",
"Record<{p: node<4>}>");
"Record<{pid: 0}>",
"Record<{pid: 1}>",
"Record<{pid: 2}>",
"Record<{pid: 3}>",
"Record<{pid: 4}>");
return new QueryContext(query, expected);
}

Expand Down Expand Up @@ -187,7 +187,7 @@ public static QueryContext get_simple_match_query_15_test() {
List<String> expected =
Arrays.asList(
"Record<{aId: 94, c:"
+ " path[[(72057594037928030)-[771484:KNOWS]->(72057594037928923)],"
+ " path[(72057594037928030)-[771484:KNOWS]->(72057594037928923)],"
+ " bId: 987}>",
"Record<{aId: 94, c:"
+ " path[(72057594037928030)-[771485:KNOWS]->(72059793061184090)], bId:"
Expand All @@ -199,7 +199,7 @@ public static QueryContext get_simple_match_query_15_test() {
+ " path[(72057594037928030)-[771487:KNOWS]->(72064191107695368)], bId:"
+ " 6597069767432}>",
"Record<{aId: 94, c:"
+ " path[(72057594037928030)-[771488:KNOWS]->(72066390130950305)]],"
+ " path[(72057594037928030)-[771488:KNOWS]->(72066390130950305)],"
+ " bId: 8796093022369}>");
return new QueryContext(query, expected);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -140,27 +140,27 @@ public Map<String, Object> getProperties(Configs configs) {
String json =
"{\n"
+ " \"vertex_properties\": {\n"
+ " \"-7732428334775821489\": {\n"
+ " \"7279245059266044353\": {\n"
+ " \"name\": \"marko\",\n"
+ " \"age\": 29\n"
+ " },\n"
+ " \"6308168136910223060\": {\n"
+ " \"-4593408084868477921\": {\n"
+ " \"name\": \"vadas\",\n"
+ " \"age\": 27\n"
+ " },\n"
+ " \"-7991964441648465618\": {\n"
+ " \"7263461402022796979\": {\n"
+ " \"name\": \"lop\",\n"
+ " \"lang\": \"java\"\n"
+ " },\n"
+ " \"-6112228345218519679\": {\n"
+ " \"-8728313207994723275\": {\n"
+ " \"name\": \"josh\",\n"
+ " \"age\": 32\n"
+ " },\n"
+ " \"2233628339503041259\": {\n"
+ " \"6632543798356094189\": {\n"
+ " \"name\": \"ripple\",\n"
+ " \"lang\": \"java\"\n"
+ " },\n"
+ " \"-2045066182110421307\": {\n"
+ " \"-5566731246168985051\": {\n"
+ " \"name\": \"peter\",\n"
+ " \"age\": 35\n"
+ " }\n"
Expand All @@ -170,16 +170,16 @@ public Map<String, Object> getProperties(Configs configs) {
+ " \"weight\": 0.5\n"
+ " },\n"
+ " \"1000001\": {\n"
+ " \"weight\": 0.4\n"
+ " \"weight\": 1.0\n"
+ " },\n"
+ " \"1000004\": {\n"
+ " \"weight\": 1.0\n"
+ " \"weight\": 0.4\n"
+ " },\n"
+ " \"1000003\": {\n"
+ " \"weight\": 0.4\n"
+ " \"weight\": 1.0\n"
+ " },\n"
+ " \"1000002\": {\n"
+ " \"weight\": 1.0\n"
+ " \"weight\": 0.4\n"
+ " },\n"
+ " \"1000005\": {\n"
+ " \"weight\": 0.2\n"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,8 @@ public void run_ldbc_3_test() {

@Test
public void run_ldbc_4_test() {
assumeTrue("hiactor".equals(System.getenv("ENGINE_TYPE")));
// skip this test in pegasus (actually exp-store) since the date format is different.
assumeFalse("pegasus".equals(System.getenv("ENGINE_TYPE")));
QueryContext testQuery = LdbcQueries.get_ldbc_4_test();
Result result = session.run(testQuery.getQuery());
Assert.assertEquals(testQuery.getExpectedResult().toString(), result.list().toString());
Expand All @@ -79,21 +80,34 @@ public void run_ldbc_6_test() {

@Test
public void run_ldbc_7_test() {
// skip this test in pegasus as optional match (via optional edge_expand) is not supported
// yet.
assumeTrue("hiactor".equals(System.getenv("ENGINE_TYPE")));
QueryContext testQuery = LdbcQueries.get_ldbc_7_test();
Result result = session.run(testQuery.getQuery());
Assert.assertEquals(testQuery.getExpectedResult().toString(), result.list().toString());
}

// @Test
// public void run_ldbc_8_test() {
// QueryContext testQuery = LdbcQueries.get_ldbc_8_test();
// Result result = session.run(testQuery.getQuery());
// Assert.assertEquals(testQuery.getExpectedResult().toString(), result.list().toString());
// }
@Test
public void run_ldbc_8_test() {
assumeFalse("pegasus".equals(System.getenv("ENGINE_TYPE")));
QueryContext testQuery = LdbcQueries.get_ldbc_8_test();
Result result = session.run(testQuery.getQuery());
Assert.assertEquals(testQuery.getExpectedResult().toString(), result.list().toString());
}

@Test
public void run_ldbc_8_test_exp() {
assumeTrue("pegasus".equals(System.getenv("ENGINE_TYPE")));
QueryContext testQuery = LdbcQueries.get_ldbc_8_test_exp();
Result result = session.run(testQuery.getQuery());
Assert.assertEquals(testQuery.getExpectedResult().toString(), result.list().toString());
}

@Test
public void run_ldbc_10_test() {
// skip this test in pegasus (actually exp-store and groot-store) since the date format is
// different
assumeTrue("hiactor".equals(System.getenv("ENGINE_TYPE")));
QueryContext testQuery = LdbcQueries.get_ldbc_10_test();
Result result = session.run(testQuery.getQuery());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@

package com.alibaba.graphscope.cypher.integration.movie;

import static org.junit.Assume.assumeFalse;
import static org.junit.Assume.assumeTrue;

import com.alibaba.graphscope.cypher.integration.suite.QueryContext;
Expand Down Expand Up @@ -154,7 +155,7 @@ public void run_movie_query19_test() {

@Test
public void run_movie_query20_test() {
assumeTrue("pegasus".equals(System.getenv("ENGINE_TYPE")));
assumeFalse("hiactor".equals(System.getenv("ENGINE_TYPE")));
QueryContext testQuery = MovieQueries.get_movie_query20_test();
Result result = session.run(testQuery.getQuery());
Assert.assertEquals(testQuery.getExpectedResult().toString(), result.list().toString());
Expand Down
Loading

0 comments on commit f9325f8

Please sign in to comment.