diff --git a/.github/workflows/gss.yml b/.github/workflows/gss.yml index 9444238ba176..da6524b7d353 100644 --- a/.github/workflows/gss.yml +++ b/.github/workflows/gss.yml @@ -84,11 +84,20 @@ jobs: mvn clean install -Pgroot-data-load --quiet sccache --show-stats - - name: Gremlin Test + # - name: Gremlin Test + # run: | + # . ${HOME}/.graphscope_env + # cd interactive_engine/groot-server + # # the ir-core based test + # mvn test -P gremlin-test + + - name: Groot with GOpt Integration Test run: | . ${HOME}/.graphscope_env - cd interactive_engine/groot-server - mvn test -P gremlin-test + git clone -b master --single-branch --depth=1 https://github.com/7br/gstest.git /tmp/gstest + mv /tmp/gstest/flex/ldbc-sf01-long-date /tmp/gstest/ldbc + cp -r flex/interactive/examples/movies /tmp/gstest/ + cd interactive_engine/groot-client && ./gopt_groot_test.sh - name: Upload tools for helm test to Artifact uses: actions/upload-artifact@v4 diff --git a/interactive_engine/compiler/ir_experimental_advanced_ci.sh b/interactive_engine/compiler/ir_experimental_advanced_ci.sh index a546c770d553..6d4825d2dbb6 100755 --- a/interactive_engine/compiler/ir_experimental_advanced_ci.sh +++ b/interactive_engine/compiler/ir_experimental_advanced_ci.sh @@ -14,8 +14,8 @@ sleep 10 # start compiler service cd ${base_dir} && make run graph.schema:=../executor/ir/core/resource/ldbc_schema.json pegasus.hosts:=127.0.0.1:1234,127.0.0.1:1235 & sleep 5 -# run pattern tests and ldbc tests -cd ${base_dir} && make pattern_test && make ldbc_test +# run pattern tests +cd ${base_dir} && make pattern_test exit_code=$? # clean compiler service ps -ef | grep "com.alibaba.graphscope.GraphServer" | awk '{print $2}' | xargs kill -9 || true @@ -29,12 +29,13 @@ fi # Test2: run advanced tests (pattern & ldbc & simple match) on experimental store via calcite-based ir # start service +export ENGINE_TYPE=pegasus cd ${base_dir}/../executor/ir/target/release && RUST_LOG=info DATA_PATH=/tmp/gstest/ldbc_graph_exp_bin PARTITION_ID=0 ./start_rpc_server --config ${base_dir}/../executor/ir/integrated/config/distributed/server_0 & cd ${base_dir}/../executor/ir/target/release && RUST_LOG=info DATA_PATH=/tmp/gstest/ldbc_graph_exp_bin PARTITION_ID=1 ./start_rpc_server --config ${base_dir}/../executor/ir/integrated/config/distributed/server_1 & sleep 10 -cd ${base_dir} && make run graph.schema:=../executor/ir/core/resource/ldbc_schema.json gremlin.script.language.name=antlr_gremlin_calcite graph.physical.opt=proto graph.planner.opt=CBO graph.statistics=src/test/resources/statistics/ldbc1_statistics.json pegasus.hosts:=127.0.0.1:1234,127.0.0.1:1235 graph.planner.rules=NotMatchToAntiJoinRule,FilterIntoJoinRule,FilterMatchRule,ExtendIntersectRule,ExpandGetVFusionRule & +cd ${base_dir} && make run graph.schema:=../executor/ir/core/resource/ldbc_schema.json gremlin.script.language.name=antlr_gremlin_calcite graph.physical.opt=proto graph.planner.opt=CBO graph.statistics=src/test/resources/statistics/ldbc1_statistics.json pegasus.hosts:=127.0.0.1:1234,127.0.0.1:1235 graph.planner.rules=FilterIntoJoinRule,FilterMatchRule,ExtendIntersectRule,ExpandGetVFusionRule & sleep 5s cd ${base_dir} && make pattern_test && make ldbc_test && make simple_test exit_code=$? diff --git a/interactive_engine/compiler/ir_experimental_ci.sh b/interactive_engine/compiler/ir_experimental_ci.sh index 71d668c35d94..66e470afa0b6 100755 --- a/interactive_engine/compiler/ir_experimental_ci.sh +++ b/interactive_engine/compiler/ir_experimental_ci.sh @@ -78,7 +78,7 @@ fi # Test5: run cypher movie tests on experimental store via calcite-based ir # restart compiler service -cd ${base_dir} && make run graph.schema:=../executor/ir/core/resource/movie_schema.json graph.planner.opt=CBO graph.statistics:=./src/test/resources/statistics/movie_statistics.json graph.physical.opt=proto graph.planner.rules=NotMatchToAntiJoinRule,FilterIntoJoinRule,FilterMatchRule,ExtendIntersectRule,ExpandGetVFusionRule & +cd ${base_dir} && make run graph.schema:=../executor/ir/core/resource/movie_schema.json graph.planner.opt=CBO graph.statistics:=./src/test/resources/statistics/movie_statistics.json graph.physical.opt=proto graph.planner.rules=FilterIntoJoinRule,FilterMatchRule,ExtendIntersectRule,ExpandGetVFusionRule & sleep 10s export ENGINE_TYPE=pegasus cd ${base_dir} && make cypher_test diff --git a/interactive_engine/compiler/src/main/java/com/alibaba/graphscope/common/ir/rel/metadata/schema/GlogueSchema.java b/interactive_engine/compiler/src/main/java/com/alibaba/graphscope/common/ir/rel/metadata/schema/GlogueSchema.java index 398773e075b8..1d7e09c495f0 100644 --- a/interactive_engine/compiler/src/main/java/com/alibaba/graphscope/common/ir/rel/metadata/schema/GlogueSchema.java +++ b/interactive_engine/compiler/src/main/java/com/alibaba/graphscope/common/ir/rel/metadata/schema/GlogueSchema.java @@ -233,4 +233,18 @@ public Double getEdgeTypeCardinality(EdgeTypeId edgeType) { return cardinality; } } + + @Override + public String toString() { + String s = "GlogueSchema:\n"; + s += "VertexTypes:\n"; + for (Integer v : this.schemaGraph.vertexSet()) { + s += v + " " + this.vertexTypeCardinality.get(v) + "\n"; + } + s += "\nEdgeTypes:\n"; + for (EdgeTypeId e : this.schemaGraph.edgeSet()) { + s += e.toString() + " " + this.edgeTypeCardinality.get(e) + "\n"; + } + return s; + } } diff --git a/interactive_engine/compiler/src/main/java/com/alibaba/graphscope/cypher/integration/suite/ldbc/LdbcQueries.java b/interactive_engine/compiler/src/main/java/com/alibaba/graphscope/cypher/integration/suite/ldbc/LdbcQueries.java index 528ec6f9cb53..4f838df78353 100644 --- a/interactive_engine/compiler/src/main/java/com/alibaba/graphscope/cypher/integration/suite/ldbc/LdbcQueries.java +++ b/interactive_engine/compiler/src/main/java/com/alibaba/graphscope/cypher/integration/suite/ldbc/LdbcQueries.java @@ -339,6 +339,93 @@ public static QueryContext get_ldbc_7_test() { } public static QueryContext get_ldbc_8_test() { + String query = + "MATCH (person:PERSON {id:" + + " 2199023256816})<-[:HASCREATOR]-(message)<-[:REPLYOF]-(comment:COMMENT)-[:HASCREATOR]->(author:PERSON)\n" + + "RETURN \n" + + "\tauthor.id,\n" + + "\tauthor.firstName,\n" + + "\tauthor.lastName,\n" + + "\tcomment.creationDate as commentDate,\n" + + "\tcomment.id as commentId,\n" + + "\tcomment.content\n" + + "ORDER BY\n" + + "\tcommentDate desc,\n" + + "\tcommentId asc\n" + + "LIMIT 20"; + List expected = + Arrays.asList( + "Record<{id: 13194139533482, firstName: \"Ana Paula\", lastName: \"Silva\"," + + " commentDate: 1347504375078, commentId: 1099511667820, content:" + + " \"About Heinz Guderian, aised and organized under his direction" + + " About Malacca Sul\"}>", + "Record<{id: 8796093022928, firstName: \"Hao\", lastName: \"Zhu\"," + + " commentDate: 1347198063021, commentId: 1099511964827, content:" + + " \"About Nothing but the Beat, icki Minaj, Usher, Jennifer Hudson," + + " Jessie J and Sia Furler\"}>", + "Record<{id: 10995116278796, firstName: \"Kenji\", lastName: \"Sakai\"," + + " commentDate: 1347191906789, commentId: 1099511964825, content:" + + " \"About Humayun, to expand the Empire further, leaving a suAbout" + + " Philip K. Dick, r o\"}>", + "Record<{id: 30786325577752, firstName: \"Jie\", lastName: \"Yang\"," + + " commentDate: 1347173707083, commentId: 1099511964826, content:" + + " \"no\"}>", + "Record<{id: 24189255812755, firstName: \"Paulo\", lastName: \"Santos\"," + + " commentDate: 1347167706094, commentId: 1099511964828, content:" + + " \"good\"}>", + "Record<{id: 687, firstName: \"Deepak\", lastName: \"Singh\", commentDate:" + + " 1347101958087, commentId: 1030792351589, content: \"no" + + " way!\"}>", + "Record<{id: 2199023256586, firstName: \"Alfonso\", lastName: \"Elizalde\"," + + " commentDate: 1347029913508, commentId: 1030792488768, content:" + + " \"About Humayun, ial legacy for his son, Akbar. His peaceful About" + + " Busta Rhymes, sta Rhy\"}>", + "Record<{id: 30786325578896, firstName: \"Yang\", lastName: \"Li\"," + + " commentDate: 1347027425148, commentId: 1030792488774, content:" + + " \"roflol\"}>", + "Record<{id: 21990232555834, firstName: \"John\", lastName: \"Garcia\"," + + " commentDate: 1347025241067, commentId: 1030792488763, content:" + + " \"no way!\"}>", + "Record<{id: 13194139534578, firstName: \"Kunal\", lastName: \"Sharma\"," + + " commentDate: 1347020657245, commentId: 1030792488765, content:" + + " \"maybe\"}>", + "Record<{id: 15393162789932, firstName: \"Fali Sam\", lastName: \"Price\"," + + " commentDate: 1347013079051, commentId: 1030792488767, content:" + + " \"roflol\"}>", + "Record<{id: 30786325579189, firstName: \"Cheh\", lastName: \"Yang\"," + + " commentDate: 1346995568122, commentId: 1030792488759, content:" + + " \"yes\"}>", + "Record<{id: 555, firstName: \"Chen\", lastName: \"Yang\", commentDate:" + + " 1346986024535, commentId: 1030792488769, content: \"About Skin and" + + " Bones, Another Round, reprising the contribution he made to the" + + " original a\"}>", + "Record<{id: 13194139534382, firstName: \"A.\", lastName: \"Budjana\"," + + " commentDate: 1346985914312, commentId: 1030792488758, content:" + + " \"duh\"}>", + "Record<{id: 8796093022290, firstName: \"Alexei\", lastName: \"Codreanu\"," + + " commentDate: 1346966601712, commentId: 1030792488760, content:" + + " \"ok\"}>", + "Record<{id: 21990232555958, firstName: \"Ernest B\", lastName:" + + " \"Law-Yone\", commentDate: 1346962688132, commentId:" + + " 1030792488766, content: \"great\"}>", + "Record<{id: 26388279067760, firstName: \"Max\", lastName: \"Bauer\"," + + " commentDate: 1346954071955, commentId: 1030792488761, content:" + + " \"thx\"}>", + "Record<{id: 10995116278300, firstName: \"Jie\", lastName: \"Li\"," + + " commentDate: 1346953221751, commentId: 1030792488762, content:" + + " \"maybe\"}>", + "Record<{id: 10995116279093, firstName: \"Diem\", lastName: \"Nguyen\"," + + " commentDate: 1346953186333, commentId: 1030792488764, content:" + + " \"thanks\"}>", + "Record<{id: 26388279066662, firstName: \"Alfonso\", lastName:" + + " \"Rodriguez\", commentDate: 1346935258972, commentId:" + + " 1030792487632, content: \"good\"}>"); + return new QueryContext(query, expected); + } + + // minor diff with get_ldbc_8_test since in experiment store the date is in a different format + // (e.g., 20120629020000000) + public static QueryContext get_ldbc_8_test_exp() { String query = "MATCH (person:PERSON {id:" + " 2199023256816})<-[:HASCREATOR]-(message)<-[:REPLYOF]-(comment:COMMENT)-[:HASCREATOR]->(author:PERSON)\n" diff --git a/interactive_engine/compiler/src/main/java/com/alibaba/graphscope/cypher/integration/suite/simple/SimpleMatchQueries.java b/interactive_engine/compiler/src/main/java/com/alibaba/graphscope/cypher/integration/suite/simple/SimpleMatchQueries.java index cb00ef88a020..a6cff80461b6 100644 --- a/interactive_engine/compiler/src/main/java/com/alibaba/graphscope/cypher/integration/suite/simple/SimpleMatchQueries.java +++ b/interactive_engine/compiler/src/main/java/com/alibaba/graphscope/cypher/integration/suite/simple/SimpleMatchQueries.java @@ -133,7 +133,7 @@ public static QueryContext get_simple_match_query_9_test() { public static QueryContext get_simple_match_query_10_test() { String query = - "MATCH( a {id:933})-[b]-(c {id: 2199023256077}) return labels(a) AS" + "MATCH( a {id:933l})-[b]-(c {id: 2199023256077l}) return labels(a) AS" + " vertexLabelName, type(b) AS edgeLabelName;"; List expected = Arrays.asList("Record<{vertexLabelName: \"PERSON\", edgeLabelName: \"KNOWS\"}>"); @@ -141,14 +141,14 @@ public static QueryContext get_simple_match_query_10_test() { } public static QueryContext get_simple_match_query_11_test() { - String query = "Match( p: PLACE) return p ORDER BY p.id LIMIT 5;"; + String query = "Match( p: PLACE) return p.id as pid ORDER BY pid LIMIT 5;"; List expected = Arrays.asList( - "Record<{p: node<0>}>", - "Record<{p: node<1>}>", - "Record<{p: node<2>}>", - "Record<{p: node<3>}>", - "Record<{p: node<4>}>"); + "Record<{pid: 0}>", + "Record<{pid: 1}>", + "Record<{pid: 2}>", + "Record<{pid: 3}>", + "Record<{pid: 4}>"); return new QueryContext(query, expected); } @@ -187,7 +187,7 @@ public static QueryContext get_simple_match_query_15_test() { List expected = Arrays.asList( "Record<{aId: 94, c:" - + " path[[(72057594037928030)-[771484:KNOWS]->(72057594037928923)]," + + " path[(72057594037928030)-[771484:KNOWS]->(72057594037928923)]," + " bId: 987}>", "Record<{aId: 94, c:" + " path[(72057594037928030)-[771485:KNOWS]->(72059793061184090)], bId:" @@ -199,7 +199,7 @@ public static QueryContext get_simple_match_query_15_test() { + " path[(72057594037928030)-[771487:KNOWS]->(72064191107695368)], bId:" + " 6597069767432}>", "Record<{aId: 94, c:" - + " path[(72057594037928030)-[771488:KNOWS]->(72066390130950305)]]," + + " path[(72057594037928030)-[771488:KNOWS]->(72066390130950305)]," + " bId: 8796093022369}>"); return new QueryContext(query, expected); } diff --git a/interactive_engine/compiler/src/main/java/com/alibaba/graphscope/gremlin/integration/result/TestGraphFactory.java b/interactive_engine/compiler/src/main/java/com/alibaba/graphscope/gremlin/integration/result/TestGraphFactory.java index 377dda31d7d7..3a476382c456 100644 --- a/interactive_engine/compiler/src/main/java/com/alibaba/graphscope/gremlin/integration/result/TestGraphFactory.java +++ b/interactive_engine/compiler/src/main/java/com/alibaba/graphscope/gremlin/integration/result/TestGraphFactory.java @@ -140,27 +140,27 @@ public Map getProperties(Configs configs) { String json = "{\n" + " \"vertex_properties\": {\n" - + " \"-7732428334775821489\": {\n" + + " \"7279245059266044353\": {\n" + " \"name\": \"marko\",\n" + " \"age\": 29\n" + " },\n" - + " \"6308168136910223060\": {\n" + + " \"-4593408084868477921\": {\n" + " \"name\": \"vadas\",\n" + " \"age\": 27\n" + " },\n" - + " \"-7991964441648465618\": {\n" + + " \"7263461402022796979\": {\n" + " \"name\": \"lop\",\n" + " \"lang\": \"java\"\n" + " },\n" - + " \"-6112228345218519679\": {\n" + + " \"-8728313207994723275\": {\n" + " \"name\": \"josh\",\n" + " \"age\": 32\n" + " },\n" - + " \"2233628339503041259\": {\n" + + " \"6632543798356094189\": {\n" + " \"name\": \"ripple\",\n" + " \"lang\": \"java\"\n" + " },\n" - + " \"-2045066182110421307\": {\n" + + " \"-5566731246168985051\": {\n" + " \"name\": \"peter\",\n" + " \"age\": 35\n" + " }\n" @@ -170,16 +170,16 @@ public Map getProperties(Configs configs) { + " \"weight\": 0.5\n" + " },\n" + " \"1000001\": {\n" - + " \"weight\": 0.4\n" + + " \"weight\": 1.0\n" + " },\n" + " \"1000004\": {\n" - + " \"weight\": 1.0\n" + + " \"weight\": 0.4\n" + " },\n" + " \"1000003\": {\n" - + " \"weight\": 0.4\n" + + " \"weight\": 1.0\n" + " },\n" + " \"1000002\": {\n" - + " \"weight\": 1.0\n" + + " \"weight\": 0.4\n" + " },\n" + " \"1000005\": {\n" + " \"weight\": 0.2\n" diff --git a/interactive_engine/compiler/src/test/java/com/alibaba/graphscope/cypher/integration/ldbc/IrLdbcTest.java b/interactive_engine/compiler/src/test/java/com/alibaba/graphscope/cypher/integration/ldbc/IrLdbcTest.java index e4a553376ba2..0bb3ccb5b737 100644 --- a/interactive_engine/compiler/src/test/java/com/alibaba/graphscope/cypher/integration/ldbc/IrLdbcTest.java +++ b/interactive_engine/compiler/src/test/java/com/alibaba/graphscope/cypher/integration/ldbc/IrLdbcTest.java @@ -56,7 +56,8 @@ public void run_ldbc_3_test() { @Test public void run_ldbc_4_test() { - assumeTrue("hiactor".equals(System.getenv("ENGINE_TYPE"))); + // skip this test in pegasus (actually exp-store) since the date format is different. + assumeFalse("pegasus".equals(System.getenv("ENGINE_TYPE"))); QueryContext testQuery = LdbcQueries.get_ldbc_4_test(); Result result = session.run(testQuery.getQuery()); Assert.assertEquals(testQuery.getExpectedResult().toString(), result.list().toString()); @@ -79,21 +80,34 @@ public void run_ldbc_6_test() { @Test public void run_ldbc_7_test() { + // skip this test in pegasus as optional match (via optional edge_expand) is not supported + // yet. assumeTrue("hiactor".equals(System.getenv("ENGINE_TYPE"))); QueryContext testQuery = LdbcQueries.get_ldbc_7_test(); Result result = session.run(testQuery.getQuery()); Assert.assertEquals(testQuery.getExpectedResult().toString(), result.list().toString()); } - // @Test - // public void run_ldbc_8_test() { - // QueryContext testQuery = LdbcQueries.get_ldbc_8_test(); - // Result result = session.run(testQuery.getQuery()); - // Assert.assertEquals(testQuery.getExpectedResult().toString(), result.list().toString()); - // } + @Test + public void run_ldbc_8_test() { + assumeFalse("pegasus".equals(System.getenv("ENGINE_TYPE"))); + QueryContext testQuery = LdbcQueries.get_ldbc_8_test(); + Result result = session.run(testQuery.getQuery()); + Assert.assertEquals(testQuery.getExpectedResult().toString(), result.list().toString()); + } + + @Test + public void run_ldbc_8_test_exp() { + assumeTrue("pegasus".equals(System.getenv("ENGINE_TYPE"))); + QueryContext testQuery = LdbcQueries.get_ldbc_8_test_exp(); + Result result = session.run(testQuery.getQuery()); + Assert.assertEquals(testQuery.getExpectedResult().toString(), result.list().toString()); + } @Test public void run_ldbc_10_test() { + // skip this test in pegasus (actually exp-store and groot-store) since the date format is + // different assumeTrue("hiactor".equals(System.getenv("ENGINE_TYPE"))); QueryContext testQuery = LdbcQueries.get_ldbc_10_test(); Result result = session.run(testQuery.getQuery()); diff --git a/interactive_engine/compiler/src/test/java/com/alibaba/graphscope/cypher/integration/movie/MovieTest.java b/interactive_engine/compiler/src/test/java/com/alibaba/graphscope/cypher/integration/movie/MovieTest.java index d909d47d9886..7f5ea090886d 100644 --- a/interactive_engine/compiler/src/test/java/com/alibaba/graphscope/cypher/integration/movie/MovieTest.java +++ b/interactive_engine/compiler/src/test/java/com/alibaba/graphscope/cypher/integration/movie/MovieTest.java @@ -16,6 +16,7 @@ package com.alibaba.graphscope.cypher.integration.movie; +import static org.junit.Assume.assumeFalse; import static org.junit.Assume.assumeTrue; import com.alibaba.graphscope.cypher.integration.suite.QueryContext; @@ -154,7 +155,7 @@ public void run_movie_query19_test() { @Test public void run_movie_query20_test() { - assumeTrue("pegasus".equals(System.getenv("ENGINE_TYPE"))); + assumeFalse("hiactor".equals(System.getenv("ENGINE_TYPE"))); QueryContext testQuery = MovieQueries.get_movie_query20_test(); Result result = session.run(testQuery.getQuery()); Assert.assertEquals(testQuery.getExpectedResult().toString(), result.list().toString()); diff --git a/interactive_engine/groot-client/gopt_groot_test.sh b/interactive_engine/groot-client/gopt_groot_test.sh new file mode 100755 index 000000000000..c8534c74da65 --- /dev/null +++ b/interactive_engine/groot-client/gopt_groot_test.sh @@ -0,0 +1,108 @@ +#!/bin/bash +set -x +BASE_DIR=$(cd "$(dirname "$0")"; pwd) +declare -r COMPILER_DIR=${BASE_DIR}/../compiler +declare -r DATA_IMPORT_SCRIPT_DIR=${BASE_DIR}/../groot-server/src/main/resources +declare -r CONFIG_FILE="/tmp/groot.config" +declare -r METADATA_DIR="/tmp/groot/meta" +declare -r DATA_DIR="/tmp/groot/data" +declare -r CSV_DATA_DIR="/tmp/gstest" +export LOG_DIR="/tmp/log/graphscope" + +# necessary python packages for data import, including pandas, graphscope and gremlin_python +if ! python3 -c "import pandas" &> /dev/null; then + echo "Installing pandas..." + python3 -m pip install pandas +else + echo "pandas is already installed." +fi + +if ! python3 -c "import graphscope" &> /dev/null; then + echo "Installing graphscope..." + python3 -m pip install graphscope +else + echo "graphscope is already installed." +fi + +if ! python3 -c "import gremlin_python" &> /dev/null; then + echo "Installing gremlin_python..." + python3 -m pip install gremlinpython +else + echo "gremlin_python is already installed." +fi + +# start server +ps -ef | grep "com.alibaba.graphscope.groot.servers.GrootGraph" | grep -v grep | awk '{print $2}' | xargs kill -9 +cd ${BASE_DIR}/../assembly/target && tar xvzf groot.tar.gz && cd groot +GROOT_DIR=$(pwd) +sed -e "s@LOG4RS_CONFIG@${GROOT_DIR}/conf/log4rs.yml@g" \ + -e "s@collect.statistics=false@collect.statistics=true@g" \ + -e "s@neo4j.bolt.server.disabled=true@neo4j.bolt.server.disabled=false@g" \ + -e "s@gremlin.server.port=12312@gremlin.server.port=8182@g" \ + -e "s@file.meta.store.path=./meta@file.meta.store.path=${METADATA_DIR}@g" \ + -e "s@store.data.path=./data@store.data.path=${DATA_DIR}@g" \ + -e "\$a\ + graph.planner.is.on=true" \ + -e "\$a\ + graph.physical.opt=proto" \ + -e "\$a\ + graph.planner.opt=CBO" \ + -e "\$a\ + graph.planner.rules=FilterIntoJoinRule,FilterMatchRule,ExtendIntersectRule,JoinDecompositionRule,ExpandGetVFusionRule" \ + -e "\$a\ + gremlin.script.language.name=antlr_gremlin_calcite" \ + ${GROOT_DIR}/conf/config.template > ${CONFIG_FILE} + +GROOT_CONF_FILE=${CONFIG_FILE} ${GROOT_DIR}/bin/store_ctl.sh start & + +sleep 30 +# load data +cd ${DATA_IMPORT_SCRIPT_DIR} && python3 import_data.py --graph modern --data_path ${CSV_DATA_DIR}/modern_graph +sleep 60 +# run modern graph test +cd ${COMPILER_DIR} && make gremlin_calcite_test +exit_code=$? +ps -ef | grep "com.alibaba.graphscope.groot.servers.GrootGraph" | grep -v grep | awk '{print $2}' | xargs kill -9 +# clean data +rm -r ${METADATA_DIR} +rm -r ${DATA_DIR} +if [ $exit_code -ne 0 ]; then + echo "gopt_on_groot gremlin test fail" + exit 1 +fi + +# start server +GROOT_CONF_FILE=${CONFIG_FILE} ${GROOT_DIR}/bin/store_ctl.sh start & +sleep 30 +# load data +cd ${DATA_IMPORT_SCRIPT_DIR} && python3 import_data.py --graph movie --data_path ${CSV_DATA_DIR}/movies +sleep 60 +# run movie graph test +cd ${COMPILER_DIR} && make cypher_test +exit_code=$? +ps -ef | grep "com.alibaba.graphscope.groot.servers.GrootGraph" | grep -v grep | awk '{print $2}' | xargs kill -9 +# clean data +rm -r ${METADATA_DIR} +rm -r ${DATA_DIR} +if [ $exit_code -ne 0 ]; then + echo "gopt_on_groot cypher test fail" + exit 1 +fi + +# start server +GROOT_CONF_FILE=${CONFIG_FILE} ${GROOT_DIR}/bin/store_ctl.sh start & +sleep 30 +# load data +cd ${DATA_IMPORT_SCRIPT_DIR} && python3 import_data.py --graph ldbc --data_path ${CSV_DATA_DIR}/ldbc +sleep 360 +# run ldbc graph test +cd ${COMPILER_DIR} && make ldbc_test && make simple_test && make pattern_test +exit_code=$? +ps -ef | grep "com.alibaba.graphscope.groot.servers.GrootGraph" | grep -v grep | awk '{print $2}' | xargs kill -9 +# clean data +rm -r ${METADATA_DIR} +rm -r ${DATA_DIR} +if [ $exit_code -ne 0 ]; then + echo "gopt_on_groot ldbc test fail" + exit 1 +fi diff --git a/interactive_engine/groot-server/src/main/resources/import_data.py b/interactive_engine/groot-server/src/main/resources/import_data.py index fe87be4ca3b3..7a96dfc13a07 100755 --- a/interactive_engine/groot-server/src/main/resources/import_data.py +++ b/interactive_engine/groot-server/src/main/resources/import_data.py @@ -375,25 +375,26 @@ def load_data_of_movie_graph(conn, graph, prefix): print("load movie graph done") -def create_modern_graph(conn, graph, client): +def create_modern_graph(conn, graph, client, data_path): + print("create modern graph", data_path) create_modern_graph_schema(graph) - load_data_of_modern_graph(conn, graph, "/home/graphscope/modern_graph") + load_data_of_modern_graph(conn, graph, data_path) statistics(client) -def create_crew_graph(conn, graph, client): +def create_crew_graph(conn, graph, client, data_path): create_crew_graph_schema(graph) - load_data_of_crew_graph(conn, graph, "/home/graphscope/crew") + load_data_of_crew_graph(conn, graph, data_path) statistics(client) -def create_ldbc_graph(conn, graph, client): +def create_ldbc_graph(conn, graph, client, data_path): create_ldbc_graph_schema(graph) - load_data_of_ldbc_graph(conn, graph, "/home/graphscope/ldbc") + load_data_of_ldbc_graph(conn, graph, data_path) statistics(client) -def create_movie_graph(conn, graph, client): +def create_movie_graph(conn, graph, client, data_path): create_movie_graph_schema(graph) - load_data_of_movie_graph(conn, graph, "/home/graphscope/movies") + load_data_of_movie_graph(conn, graph, data_path) statistics(client) def main(): @@ -408,17 +409,22 @@ def main(): required=True, help="The graph to import: 'modern', 'crew', 'ldbc', or 'movie'." ) - + parser.add_argument( + '--data_path', + required=True, + help="The path to the input data file." + ) + args = parser.parse_args() if args.graph == 'modern': - create_modern_graph(conn, graph, client) + create_modern_graph(conn, graph, client, args.data_path) elif args.graph == 'crew': - create_crew_graph(conn, graph, client) + create_crew_graph(conn, graph, client, args.data_path) elif args.graph == 'ldbc': - create_ldbc_graph(conn, graph, client) + create_ldbc_graph(conn, graph, client, args.data_path) elif args.graph == 'movie': - create_movie_graph(conn, graph, client) + create_movie_graph(conn, graph, client, args.data_path) if __name__ == "__main__": main()