From 9d9388edebc1fae2b8f72a415d9140c35d86d931 Mon Sep 17 00:00:00 2001 From: hgklohr Date: Thu, 16 May 2024 19:36:10 +0000 Subject: [PATCH 01/20] Update pom's for 6.14.0-SNAPSHOT --- common-test/pom.xml | 2 +- contrib/datawave-quickstart/docker/pom.xml | 2 +- core/pom.xml | 2 +- core/utils/pom.xml | 2 +- docs/pom.xml | 2 +- microservices/pom.xml | 2 +- microservices/services/pom.xml | 2 +- microservices/starters/pom.xml | 2 +- pom.xml | 2 +- warehouse/accumulo-extensions/pom.xml | 2 +- warehouse/age-off-utils/pom.xml | 2 +- warehouse/age-off/pom.xml | 2 +- warehouse/assemble/datawave/pom.xml | 2 +- warehouse/assemble/pom.xml | 2 +- warehouse/assemble/webservice/pom.xml | 2 +- warehouse/common/pom.xml | 2 +- warehouse/core/pom.xml | 2 +- warehouse/data-dictionary-core/pom.xml | 2 +- warehouse/edge-dictionary-core/pom.xml | 2 +- warehouse/edge-model-configuration-core/pom.xml | 2 +- warehouse/index-stats/pom.xml | 2 +- warehouse/ingest-configuration/pom.xml | 2 +- warehouse/ingest-core/pom.xml | 2 +- warehouse/ingest-csv/pom.xml | 2 +- warehouse/ingest-json/pom.xml | 2 +- warehouse/ingest-nyctlc/pom.xml | 2 +- warehouse/ingest-scripts/pom.xml | 2 +- warehouse/ingest-ssdeep/pom.xml | 2 +- warehouse/ingest-wikipedia/pom.xml | 2 +- warehouse/metrics-core/pom.xml | 2 +- warehouse/ops-tools/config-compare/pom.xml | 2 +- warehouse/ops-tools/index-validation/pom.xml | 2 +- warehouse/ops-tools/pom.xml | 2 +- warehouse/pom.xml | 2 +- warehouse/query-core/pom.xml | 2 +- warehouse/regression-testing/pom.xml | 2 +- warehouse/ssdeep-common/pom.xml | 2 +- web-services/accumulo/pom.xml | 2 +- web-services/atom/pom.xml | 2 +- web-services/cached-results/pom.xml | 2 +- web-services/client/pom.xml | 2 +- web-services/common-util/pom.xml | 2 +- web-services/common/pom.xml | 2 +- web-services/deploy/application/pom.xml | 2 +- web-services/deploy/configuration/pom.xml | 2 +- web-services/deploy/docs/pom.xml | 2 +- web-services/deploy/pom.xml | 2 +- web-services/deploy/spring-framework-integration/pom.xml | 2 +- web-services/dictionary/pom.xml | 2 +- web-services/examples/client-login/pom.xml | 2 +- web-services/examples/http-client/pom.xml | 2 +- web-services/examples/jms-client/pom.xml | 2 +- web-services/examples/pom.xml | 2 +- web-services/examples/query-war/pom.xml | 2 +- web-services/map-reduce-embedded/pom.xml | 2 +- web-services/map-reduce-status/pom.xml | 2 +- web-services/map-reduce/pom.xml | 2 +- web-services/model/pom.xml | 2 +- web-services/modification/pom.xml | 2 +- web-services/pom.xml | 2 +- web-services/query-websocket/pom.xml | 2 +- web-services/query/pom.xml | 2 +- web-services/rest-api/pom.xml | 2 +- web-services/security/pom.xml | 2 +- web-services/web-root/pom.xml | 2 +- 65 files changed, 65 insertions(+), 65 deletions(-) diff --git a/common-test/pom.xml b/common-test/pom.xml index 095aa0ecc5b..0745d338df6 100644 --- a/common-test/pom.xml +++ b/common-test/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-parent - 6.13.0-SNAPSHOT + 6.14.0-SNAPSHOT datawave-common-test ${project.artifactId} diff --git a/contrib/datawave-quickstart/docker/pom.xml b/contrib/datawave-quickstart/docker/pom.xml index b8804896a74..597c9f962f0 100644 --- a/contrib/datawave-quickstart/docker/pom.xml +++ b/contrib/datawave-quickstart/docker/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-parent - 6.13.0-SNAPSHOT + 6.14.0-SNAPSHOT ../../../pom.xml quickstart diff --git a/core/pom.xml b/core/pom.xml index 0146e2e9d0a..7fef6cb47d5 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-parent - 6.13.0-SNAPSHOT + 6.14.0-SNAPSHOT gov.nsa.datawave.core datawave-core-parent diff --git a/core/utils/pom.xml b/core/utils/pom.xml index 4e7296cea78..133ae6d9fb1 100644 --- a/core/utils/pom.xml +++ b/core/utils/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.core datawave-core-parent - 6.13.0-SNAPSHOT + 6.14.0-SNAPSHOT gov.nsa.datawave.core datawave-utils-parent diff --git a/docs/pom.xml b/docs/pom.xml index 608afc5c7cb..a575f602f98 100644 --- a/docs/pom.xml +++ b/docs/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-parent - 6.13.0-SNAPSHOT + 6.14.0-SNAPSHOT datawave-docs diff --git a/microservices/pom.xml b/microservices/pom.xml index 06b6028a095..707f2632b42 100644 --- a/microservices/pom.xml +++ b/microservices/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-parent - 6.13.0-SNAPSHOT + 6.14.0-SNAPSHOT gov.nsa.datawave.microservice datawave-microservice-build-parent diff --git a/microservices/services/pom.xml b/microservices/services/pom.xml index 63f6ef6e662..f9d7948ace3 100644 --- a/microservices/services/pom.xml +++ b/microservices/services/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.microservice datawave-microservice-build-parent - 6.13.0-SNAPSHOT + 6.14.0-SNAPSHOT datawave-microservice-service-build-parent pom diff --git a/microservices/starters/pom.xml b/microservices/starters/pom.xml index 8dc0fa5412d..93964b61729 100644 --- a/microservices/starters/pom.xml +++ b/microservices/starters/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.microservice datawave-microservice-build-parent - 6.13.0-SNAPSHOT + 6.14.0-SNAPSHOT datawave-microservice-starter-build-parent pom diff --git a/pom.xml b/pom.xml index 79ffbbe988a..adf28228acb 100644 --- a/pom.xml +++ b/pom.xml @@ -3,7 +3,7 @@ 4.0.0 gov.nsa.datawave datawave-parent - 6.13.0-SNAPSHOT + 6.14.0-SNAPSHOT pom DataWave DataWave is a Java-based ingest and query framework that leverages Apache Accumulo to provide fast, secure access to your data. diff --git a/warehouse/accumulo-extensions/pom.xml b/warehouse/accumulo-extensions/pom.xml index 4175dc56651..388bf37a937 100644 --- a/warehouse/accumulo-extensions/pom.xml +++ b/warehouse/accumulo-extensions/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.13.0-SNAPSHOT + 6.14.0-SNAPSHOT datawave-accumulo-extensions ${project.artifactId} diff --git a/warehouse/age-off-utils/pom.xml b/warehouse/age-off-utils/pom.xml index a70aeac3ee1..75bb4eeffea 100644 --- a/warehouse/age-off-utils/pom.xml +++ b/warehouse/age-off-utils/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.13.0-SNAPSHOT + 6.14.0-SNAPSHOT datawave-age-off-utils ${project.artifactId} diff --git a/warehouse/age-off/pom.xml b/warehouse/age-off/pom.xml index f2326d84a4f..93e1447ae10 100644 --- a/warehouse/age-off/pom.xml +++ b/warehouse/age-off/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.13.0-SNAPSHOT + 6.14.0-SNAPSHOT datawave-age-off ${project.artifactId} diff --git a/warehouse/assemble/datawave/pom.xml b/warehouse/assemble/datawave/pom.xml index d78e3a0b851..f1edb9bfd9e 100644 --- a/warehouse/assemble/datawave/pom.xml +++ b/warehouse/assemble/datawave/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave assemble-parent - 6.13.0-SNAPSHOT + 6.14.0-SNAPSHOT assemble-datawave jar diff --git a/warehouse/assemble/pom.xml b/warehouse/assemble/pom.xml index f8e1adc4833..a131fd5cfa4 100644 --- a/warehouse/assemble/pom.xml +++ b/warehouse/assemble/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.13.0-SNAPSHOT + 6.14.0-SNAPSHOT assemble-parent pom diff --git a/warehouse/assemble/webservice/pom.xml b/warehouse/assemble/webservice/pom.xml index 078fbf7bc25..a3d04e52e95 100644 --- a/warehouse/assemble/webservice/pom.xml +++ b/warehouse/assemble/webservice/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave assemble-parent - 6.13.0-SNAPSHOT + 6.14.0-SNAPSHOT assemble-webservice ${project.artifactId} diff --git a/warehouse/common/pom.xml b/warehouse/common/pom.xml index b9c30375288..15b2e1197b2 100644 --- a/warehouse/common/pom.xml +++ b/warehouse/common/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.13.0-SNAPSHOT + 6.14.0-SNAPSHOT datawave-common ${project.artifactId} diff --git a/warehouse/core/pom.xml b/warehouse/core/pom.xml index 7cf54345e9e..db4633f8518 100644 --- a/warehouse/core/pom.xml +++ b/warehouse/core/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.13.0-SNAPSHOT + 6.14.0-SNAPSHOT datawave-core jar diff --git a/warehouse/data-dictionary-core/pom.xml b/warehouse/data-dictionary-core/pom.xml index c694d510adb..8fee6cc799c 100644 --- a/warehouse/data-dictionary-core/pom.xml +++ b/warehouse/data-dictionary-core/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.13.0-SNAPSHOT + 6.14.0-SNAPSHOT datawave-data-dictionary-core jar diff --git a/warehouse/edge-dictionary-core/pom.xml b/warehouse/edge-dictionary-core/pom.xml index e36c7b91d64..8426ef0f6b6 100644 --- a/warehouse/edge-dictionary-core/pom.xml +++ b/warehouse/edge-dictionary-core/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.13.0-SNAPSHOT + 6.14.0-SNAPSHOT datawave-edge-dictionary-core jar diff --git a/warehouse/edge-model-configuration-core/pom.xml b/warehouse/edge-model-configuration-core/pom.xml index 302d0e2abf2..71c6025f73d 100644 --- a/warehouse/edge-model-configuration-core/pom.xml +++ b/warehouse/edge-model-configuration-core/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.13.0-SNAPSHOT + 6.14.0-SNAPSHOT datawave-edge-model-configuration-core jar diff --git a/warehouse/index-stats/pom.xml b/warehouse/index-stats/pom.xml index a83a4a374d8..f13553a106b 100644 --- a/warehouse/index-stats/pom.xml +++ b/warehouse/index-stats/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.13.0-SNAPSHOT + 6.14.0-SNAPSHOT datawave-index-stats jar diff --git a/warehouse/ingest-configuration/pom.xml b/warehouse/ingest-configuration/pom.xml index a2441d14050..a3f27bef329 100644 --- a/warehouse/ingest-configuration/pom.xml +++ b/warehouse/ingest-configuration/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.13.0-SNAPSHOT + 6.14.0-SNAPSHOT datawave-ingest-configuration diff --git a/warehouse/ingest-core/pom.xml b/warehouse/ingest-core/pom.xml index c27aedac82e..66583c54c92 100644 --- a/warehouse/ingest-core/pom.xml +++ b/warehouse/ingest-core/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.13.0-SNAPSHOT + 6.14.0-SNAPSHOT datawave-ingest-core jar diff --git a/warehouse/ingest-csv/pom.xml b/warehouse/ingest-csv/pom.xml index b67f1e197ba..8af44f0d50d 100644 --- a/warehouse/ingest-csv/pom.xml +++ b/warehouse/ingest-csv/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.13.0-SNAPSHOT + 6.14.0-SNAPSHOT datawave-ingest-csv jar diff --git a/warehouse/ingest-json/pom.xml b/warehouse/ingest-json/pom.xml index 2015886e5e9..23fa4da05fb 100644 --- a/warehouse/ingest-json/pom.xml +++ b/warehouse/ingest-json/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.13.0-SNAPSHOT + 6.14.0-SNAPSHOT datawave-ingest-json jar diff --git a/warehouse/ingest-nyctlc/pom.xml b/warehouse/ingest-nyctlc/pom.xml index 3d5bb5db43e..0d8f29f7d60 100644 --- a/warehouse/ingest-nyctlc/pom.xml +++ b/warehouse/ingest-nyctlc/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.13.0-SNAPSHOT + 6.14.0-SNAPSHOT datawave-ingest-nyctlc jar diff --git a/warehouse/ingest-scripts/pom.xml b/warehouse/ingest-scripts/pom.xml index d69731e9af7..319560ee7f7 100644 --- a/warehouse/ingest-scripts/pom.xml +++ b/warehouse/ingest-scripts/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.13.0-SNAPSHOT + 6.14.0-SNAPSHOT datawave-ingest-scripts ${project.artifactId} diff --git a/warehouse/ingest-ssdeep/pom.xml b/warehouse/ingest-ssdeep/pom.xml index edb6019635f..726e17492b0 100644 --- a/warehouse/ingest-ssdeep/pom.xml +++ b/warehouse/ingest-ssdeep/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.13.0-SNAPSHOT + 6.14.0-SNAPSHOT datawave-ingest-ssdeep diff --git a/warehouse/ingest-wikipedia/pom.xml b/warehouse/ingest-wikipedia/pom.xml index d89cdb7b55c..8fb4fdbbe98 100644 --- a/warehouse/ingest-wikipedia/pom.xml +++ b/warehouse/ingest-wikipedia/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.13.0-SNAPSHOT + 6.14.0-SNAPSHOT datawave-ingest-wikipedia jar diff --git a/warehouse/metrics-core/pom.xml b/warehouse/metrics-core/pom.xml index 8a6f8bffd69..0dc17601060 100644 --- a/warehouse/metrics-core/pom.xml +++ b/warehouse/metrics-core/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.13.0-SNAPSHOT + 6.14.0-SNAPSHOT datawave-metrics-core jar diff --git a/warehouse/ops-tools/config-compare/pom.xml b/warehouse/ops-tools/config-compare/pom.xml index 910cc19fbfe..53c4ec08c9c 100644 --- a/warehouse/ops-tools/config-compare/pom.xml +++ b/warehouse/ops-tools/config-compare/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-ops-tools-parent - 6.13.0-SNAPSHOT + 6.14.0-SNAPSHOT datawave-ops-tools-config-compare diff --git a/warehouse/ops-tools/index-validation/pom.xml b/warehouse/ops-tools/index-validation/pom.xml index 107098b7b92..e2e1d83fbc7 100644 --- a/warehouse/ops-tools/index-validation/pom.xml +++ b/warehouse/ops-tools/index-validation/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-ops-tools-parent - 6.13.0-SNAPSHOT + 6.14.0-SNAPSHOT datawave-ops-tools-index-validation jar diff --git a/warehouse/ops-tools/pom.xml b/warehouse/ops-tools/pom.xml index 06476691d57..658728d7d0d 100644 --- a/warehouse/ops-tools/pom.xml +++ b/warehouse/ops-tools/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.13.0-SNAPSHOT + 6.14.0-SNAPSHOT datawave-ops-tools-parent pom diff --git a/warehouse/pom.xml b/warehouse/pom.xml index a14d7314bff..1d56085525f 100644 --- a/warehouse/pom.xml +++ b/warehouse/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-parent - 6.13.0-SNAPSHOT + 6.14.0-SNAPSHOT datawave-warehouse-parent pom diff --git a/warehouse/query-core/pom.xml b/warehouse/query-core/pom.xml index 55c86901e00..f5066975bad 100644 --- a/warehouse/query-core/pom.xml +++ b/warehouse/query-core/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.13.0-SNAPSHOT + 6.14.0-SNAPSHOT datawave-query-core jar diff --git a/warehouse/regression-testing/pom.xml b/warehouse/regression-testing/pom.xml index 30bfc575dcd..37295d04b9c 100644 --- a/warehouse/regression-testing/pom.xml +++ b/warehouse/regression-testing/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.13.0-SNAPSHOT + 6.14.0-SNAPSHOT datawave-regression-testing ${project.artifactId} diff --git a/warehouse/ssdeep-common/pom.xml b/warehouse/ssdeep-common/pom.xml index 40cc72cc333..a22534ffe28 100644 --- a/warehouse/ssdeep-common/pom.xml +++ b/warehouse/ssdeep-common/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.13.0-SNAPSHOT + 6.14.0-SNAPSHOT datawave-ssdeep-common diff --git a/web-services/accumulo/pom.xml b/web-services/accumulo/pom.xml index a1ad7866750..0c0d1f629eb 100644 --- a/web-services/accumulo/pom.xml +++ b/web-services/accumulo/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 6.13.0-SNAPSHOT + 6.14.0-SNAPSHOT datawave-ws-accumulo ejb diff --git a/web-services/atom/pom.xml b/web-services/atom/pom.xml index e333627d3d6..b1e744a06ac 100644 --- a/web-services/atom/pom.xml +++ b/web-services/atom/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 6.13.0-SNAPSHOT + 6.14.0-SNAPSHOT datawave-ws-atom ejb diff --git a/web-services/cached-results/pom.xml b/web-services/cached-results/pom.xml index 4a02d3ceb42..e090a9f5a53 100644 --- a/web-services/cached-results/pom.xml +++ b/web-services/cached-results/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 6.13.0-SNAPSHOT + 6.14.0-SNAPSHOT datawave-ws-cached-results ejb diff --git a/web-services/client/pom.xml b/web-services/client/pom.xml index 7693ddb4b9d..d4443660355 100644 --- a/web-services/client/pom.xml +++ b/web-services/client/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 6.13.0-SNAPSHOT + 6.14.0-SNAPSHOT datawave-ws-client jar diff --git a/web-services/common-util/pom.xml b/web-services/common-util/pom.xml index 6b5b34b6cad..9448c18a0e9 100644 --- a/web-services/common-util/pom.xml +++ b/web-services/common-util/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 6.13.0-SNAPSHOT + 6.14.0-SNAPSHOT datawave-ws-common-util jar diff --git a/web-services/common/pom.xml b/web-services/common/pom.xml index 6fd40531834..f35b0cf06b1 100644 --- a/web-services/common/pom.xml +++ b/web-services/common/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 6.13.0-SNAPSHOT + 6.14.0-SNAPSHOT datawave-ws-common ejb diff --git a/web-services/deploy/application/pom.xml b/web-services/deploy/application/pom.xml index 881ab0b81b7..be044f4a418 100644 --- a/web-services/deploy/application/pom.xml +++ b/web-services/deploy/application/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-deploy-parent - 6.13.0-SNAPSHOT + 6.14.0-SNAPSHOT datawave-ws-deploy-application ear diff --git a/web-services/deploy/configuration/pom.xml b/web-services/deploy/configuration/pom.xml index 4d8ed716f40..cda81737a3f 100644 --- a/web-services/deploy/configuration/pom.xml +++ b/web-services/deploy/configuration/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-deploy-parent - 6.13.0-SNAPSHOT + 6.14.0-SNAPSHOT datawave-ws-deploy-configuration jar diff --git a/web-services/deploy/docs/pom.xml b/web-services/deploy/docs/pom.xml index 367727bea69..44b7b79e662 100644 --- a/web-services/deploy/docs/pom.xml +++ b/web-services/deploy/docs/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-deploy-parent - 6.13.0-SNAPSHOT + 6.14.0-SNAPSHOT datawave-ws-deploy-docs war diff --git a/web-services/deploy/pom.xml b/web-services/deploy/pom.xml index e2b37e4596b..d3c3716f4fc 100644 --- a/web-services/deploy/pom.xml +++ b/web-services/deploy/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 6.13.0-SNAPSHOT + 6.14.0-SNAPSHOT gov.nsa.datawave.webservices datawave-ws-deploy-parent diff --git a/web-services/deploy/spring-framework-integration/pom.xml b/web-services/deploy/spring-framework-integration/pom.xml index fbcd9619ccc..ee858755ed7 100644 --- a/web-services/deploy/spring-framework-integration/pom.xml +++ b/web-services/deploy/spring-framework-integration/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-deploy-parent - 6.13.0-SNAPSHOT + 6.14.0-SNAPSHOT spring-framework-integration ${project.artifactId} diff --git a/web-services/dictionary/pom.xml b/web-services/dictionary/pom.xml index 12b2ada6f0e..4d2f083316d 100644 --- a/web-services/dictionary/pom.xml +++ b/web-services/dictionary/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 6.13.0-SNAPSHOT + 6.14.0-SNAPSHOT datawave-ws-dictionary ejb diff --git a/web-services/examples/client-login/pom.xml b/web-services/examples/client-login/pom.xml index e12cdfb9cc6..3edce3f218e 100644 --- a/web-services/examples/client-login/pom.xml +++ b/web-services/examples/client-login/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-examples-parent - 6.13.0-SNAPSHOT + 6.14.0-SNAPSHOT datawave-ws-examples-client-login ejb diff --git a/web-services/examples/http-client/pom.xml b/web-services/examples/http-client/pom.xml index 3e80ce0a94d..6c0b1b0c1b6 100644 --- a/web-services/examples/http-client/pom.xml +++ b/web-services/examples/http-client/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-examples-parent - 6.13.0-SNAPSHOT + 6.14.0-SNAPSHOT datawave-ws-examples-http-client jar diff --git a/web-services/examples/jms-client/pom.xml b/web-services/examples/jms-client/pom.xml index fedf06816aa..3ae1187fdd3 100644 --- a/web-services/examples/jms-client/pom.xml +++ b/web-services/examples/jms-client/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-examples-parent - 6.13.0-SNAPSHOT + 6.14.0-SNAPSHOT datawave-ws-examples-jms-client jar diff --git a/web-services/examples/pom.xml b/web-services/examples/pom.xml index 8dd74e0777c..238fa7195c8 100644 --- a/web-services/examples/pom.xml +++ b/web-services/examples/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 6.13.0-SNAPSHOT + 6.14.0-SNAPSHOT datawave-ws-examples-parent pom diff --git a/web-services/examples/query-war/pom.xml b/web-services/examples/query-war/pom.xml index 1a51bb36216..5c77aea2210 100644 --- a/web-services/examples/query-war/pom.xml +++ b/web-services/examples/query-war/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-examples-parent - 6.13.0-SNAPSHOT + 6.14.0-SNAPSHOT datawave-ws-examples-query-war war diff --git a/web-services/map-reduce-embedded/pom.xml b/web-services/map-reduce-embedded/pom.xml index 0ed353f9a29..d3cfb72877d 100644 --- a/web-services/map-reduce-embedded/pom.xml +++ b/web-services/map-reduce-embedded/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 6.13.0-SNAPSHOT + 6.14.0-SNAPSHOT datawave-ws-map-reduce-embedded jar diff --git a/web-services/map-reduce-status/pom.xml b/web-services/map-reduce-status/pom.xml index c6570ebff65..84e11e60af8 100644 --- a/web-services/map-reduce-status/pom.xml +++ b/web-services/map-reduce-status/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 6.13.0-SNAPSHOT + 6.14.0-SNAPSHOT datawave-ws-map-reduce-status ejb diff --git a/web-services/map-reduce/pom.xml b/web-services/map-reduce/pom.xml index 8a81f8a4acb..e87f181c095 100644 --- a/web-services/map-reduce/pom.xml +++ b/web-services/map-reduce/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 6.13.0-SNAPSHOT + 6.14.0-SNAPSHOT datawave-ws-map-reduce ejb diff --git a/web-services/model/pom.xml b/web-services/model/pom.xml index 61bca12da60..2778c520262 100644 --- a/web-services/model/pom.xml +++ b/web-services/model/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 6.13.0-SNAPSHOT + 6.14.0-SNAPSHOT datawave-ws-model ejb diff --git a/web-services/modification/pom.xml b/web-services/modification/pom.xml index 598e20493ec..9c67b78f277 100644 --- a/web-services/modification/pom.xml +++ b/web-services/modification/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 6.13.0-SNAPSHOT + 6.14.0-SNAPSHOT datawave-ws-modification ejb diff --git a/web-services/pom.xml b/web-services/pom.xml index d5ff23d8d2d..809842f7252 100644 --- a/web-services/pom.xml +++ b/web-services/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-parent - 6.13.0-SNAPSHOT + 6.14.0-SNAPSHOT gov.nsa.datawave.webservices datawave-ws-parent diff --git a/web-services/query-websocket/pom.xml b/web-services/query-websocket/pom.xml index cfa00cc1023..4a11f2ee103 100644 --- a/web-services/query-websocket/pom.xml +++ b/web-services/query-websocket/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 6.13.0-SNAPSHOT + 6.14.0-SNAPSHOT datawave-ws-query-websocket war diff --git a/web-services/query/pom.xml b/web-services/query/pom.xml index 02daf86cb94..36cd3f1b5db 100644 --- a/web-services/query/pom.xml +++ b/web-services/query/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 6.13.0-SNAPSHOT + 6.14.0-SNAPSHOT datawave-ws-query ejb diff --git a/web-services/rest-api/pom.xml b/web-services/rest-api/pom.xml index 59d672022f5..e759765db33 100644 --- a/web-services/rest-api/pom.xml +++ b/web-services/rest-api/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 6.13.0-SNAPSHOT + 6.14.0-SNAPSHOT datawave-ws-rest-api war diff --git a/web-services/security/pom.xml b/web-services/security/pom.xml index a560795d8c9..6743dc68b15 100644 --- a/web-services/security/pom.xml +++ b/web-services/security/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 6.13.0-SNAPSHOT + 6.14.0-SNAPSHOT datawave-ws-security ejb diff --git a/web-services/web-root/pom.xml b/web-services/web-root/pom.xml index d2fab275bfd..cba4bae2e02 100644 --- a/web-services/web-root/pom.xml +++ b/web-services/web-root/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 6.13.0-SNAPSHOT + 6.14.0-SNAPSHOT datawave-ws-web-root war From 2a289e6b064942988cac3a344a784983a5927398 Mon Sep 17 00:00:00 2001 From: hgklohr Date: Thu, 16 May 2024 19:37:19 +0000 Subject: [PATCH 02/20] 6.13.0 --- common-test/pom.xml | 2 +- contrib/datawave-quickstart/docker/pom.xml | 2 +- core/pom.xml | 2 +- core/utils/pom.xml | 2 +- docs/pom.xml | 2 +- microservices/pom.xml | 2 +- microservices/services/pom.xml | 2 +- microservices/starters/pom.xml | 2 +- pom.xml | 2 +- warehouse/accumulo-extensions/pom.xml | 2 +- warehouse/age-off-utils/pom.xml | 2 +- warehouse/age-off/pom.xml | 2 +- warehouse/assemble/datawave/pom.xml | 2 +- warehouse/assemble/pom.xml | 2 +- warehouse/assemble/webservice/pom.xml | 2 +- warehouse/common/pom.xml | 2 +- warehouse/core/pom.xml | 2 +- warehouse/data-dictionary-core/pom.xml | 2 +- warehouse/edge-dictionary-core/pom.xml | 2 +- warehouse/edge-model-configuration-core/pom.xml | 2 +- warehouse/index-stats/pom.xml | 2 +- warehouse/ingest-configuration/pom.xml | 2 +- warehouse/ingest-core/pom.xml | 2 +- warehouse/ingest-csv/pom.xml | 2 +- warehouse/ingest-json/pom.xml | 2 +- warehouse/ingest-nyctlc/pom.xml | 2 +- warehouse/ingest-scripts/pom.xml | 2 +- warehouse/ingest-ssdeep/pom.xml | 2 +- warehouse/ingest-wikipedia/pom.xml | 2 +- warehouse/metrics-core/pom.xml | 2 +- warehouse/ops-tools/config-compare/pom.xml | 2 +- warehouse/ops-tools/index-validation/pom.xml | 2 +- warehouse/ops-tools/pom.xml | 2 +- warehouse/pom.xml | 2 +- warehouse/query-core/pom.xml | 2 +- warehouse/regression-testing/pom.xml | 2 +- warehouse/ssdeep-common/pom.xml | 2 +- web-services/accumulo/pom.xml | 2 +- web-services/atom/pom.xml | 2 +- web-services/cached-results/pom.xml | 2 +- web-services/client/pom.xml | 2 +- web-services/common-util/pom.xml | 2 +- web-services/common/pom.xml | 2 +- web-services/deploy/application/pom.xml | 2 +- web-services/deploy/configuration/pom.xml | 2 +- web-services/deploy/docs/pom.xml | 2 +- web-services/deploy/pom.xml | 2 +- web-services/deploy/spring-framework-integration/pom.xml | 2 +- web-services/dictionary/pom.xml | 2 +- web-services/examples/client-login/pom.xml | 2 +- web-services/examples/http-client/pom.xml | 2 +- web-services/examples/jms-client/pom.xml | 2 +- web-services/examples/pom.xml | 2 +- web-services/examples/query-war/pom.xml | 2 +- web-services/map-reduce-embedded/pom.xml | 2 +- web-services/map-reduce-status/pom.xml | 2 +- web-services/map-reduce/pom.xml | 2 +- web-services/model/pom.xml | 2 +- web-services/modification/pom.xml | 2 +- web-services/pom.xml | 2 +- web-services/query-websocket/pom.xml | 2 +- web-services/query/pom.xml | 2 +- web-services/rest-api/pom.xml | 2 +- web-services/security/pom.xml | 2 +- web-services/web-root/pom.xml | 2 +- 65 files changed, 65 insertions(+), 65 deletions(-) diff --git a/common-test/pom.xml b/common-test/pom.xml index 095aa0ecc5b..e3d6770b04a 100644 --- a/common-test/pom.xml +++ b/common-test/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-parent - 6.13.0-SNAPSHOT + 6.13.0 datawave-common-test ${project.artifactId} diff --git a/contrib/datawave-quickstart/docker/pom.xml b/contrib/datawave-quickstart/docker/pom.xml index b8804896a74..379da980298 100644 --- a/contrib/datawave-quickstart/docker/pom.xml +++ b/contrib/datawave-quickstart/docker/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-parent - 6.13.0-SNAPSHOT + 6.13.0 ../../../pom.xml quickstart diff --git a/core/pom.xml b/core/pom.xml index 0146e2e9d0a..cea876fb33e 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-parent - 6.13.0-SNAPSHOT + 6.13.0 gov.nsa.datawave.core datawave-core-parent diff --git a/core/utils/pom.xml b/core/utils/pom.xml index 4e7296cea78..02b915076ba 100644 --- a/core/utils/pom.xml +++ b/core/utils/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.core datawave-core-parent - 6.13.0-SNAPSHOT + 6.13.0 gov.nsa.datawave.core datawave-utils-parent diff --git a/docs/pom.xml b/docs/pom.xml index 608afc5c7cb..3b7169be2ee 100644 --- a/docs/pom.xml +++ b/docs/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-parent - 6.13.0-SNAPSHOT + 6.13.0 datawave-docs diff --git a/microservices/pom.xml b/microservices/pom.xml index 06b6028a095..58009e5bb5f 100644 --- a/microservices/pom.xml +++ b/microservices/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-parent - 6.13.0-SNAPSHOT + 6.13.0 gov.nsa.datawave.microservice datawave-microservice-build-parent diff --git a/microservices/services/pom.xml b/microservices/services/pom.xml index 63f6ef6e662..4713f77832a 100644 --- a/microservices/services/pom.xml +++ b/microservices/services/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.microservice datawave-microservice-build-parent - 6.13.0-SNAPSHOT + 6.13.0 datawave-microservice-service-build-parent pom diff --git a/microservices/starters/pom.xml b/microservices/starters/pom.xml index 8dc0fa5412d..d001a3e6e49 100644 --- a/microservices/starters/pom.xml +++ b/microservices/starters/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.microservice datawave-microservice-build-parent - 6.13.0-SNAPSHOT + 6.13.0 datawave-microservice-starter-build-parent pom diff --git a/pom.xml b/pom.xml index 79ffbbe988a..e666e97a6e7 100644 --- a/pom.xml +++ b/pom.xml @@ -3,7 +3,7 @@ 4.0.0 gov.nsa.datawave datawave-parent - 6.13.0-SNAPSHOT + 6.13.0 pom DataWave DataWave is a Java-based ingest and query framework that leverages Apache Accumulo to provide fast, secure access to your data. diff --git a/warehouse/accumulo-extensions/pom.xml b/warehouse/accumulo-extensions/pom.xml index 4175dc56651..bf3dae59da0 100644 --- a/warehouse/accumulo-extensions/pom.xml +++ b/warehouse/accumulo-extensions/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.13.0-SNAPSHOT + 6.13.0 datawave-accumulo-extensions ${project.artifactId} diff --git a/warehouse/age-off-utils/pom.xml b/warehouse/age-off-utils/pom.xml index a70aeac3ee1..f8b369af690 100644 --- a/warehouse/age-off-utils/pom.xml +++ b/warehouse/age-off-utils/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.13.0-SNAPSHOT + 6.13.0 datawave-age-off-utils ${project.artifactId} diff --git a/warehouse/age-off/pom.xml b/warehouse/age-off/pom.xml index f2326d84a4f..eef04f222af 100644 --- a/warehouse/age-off/pom.xml +++ b/warehouse/age-off/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.13.0-SNAPSHOT + 6.13.0 datawave-age-off ${project.artifactId} diff --git a/warehouse/assemble/datawave/pom.xml b/warehouse/assemble/datawave/pom.xml index d78e3a0b851..335ce3d174b 100644 --- a/warehouse/assemble/datawave/pom.xml +++ b/warehouse/assemble/datawave/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave assemble-parent - 6.13.0-SNAPSHOT + 6.13.0 assemble-datawave jar diff --git a/warehouse/assemble/pom.xml b/warehouse/assemble/pom.xml index f8e1adc4833..caa05b82ac7 100644 --- a/warehouse/assemble/pom.xml +++ b/warehouse/assemble/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.13.0-SNAPSHOT + 6.13.0 assemble-parent pom diff --git a/warehouse/assemble/webservice/pom.xml b/warehouse/assemble/webservice/pom.xml index 078fbf7bc25..8c731017d43 100644 --- a/warehouse/assemble/webservice/pom.xml +++ b/warehouse/assemble/webservice/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave assemble-parent - 6.13.0-SNAPSHOT + 6.13.0 assemble-webservice ${project.artifactId} diff --git a/warehouse/common/pom.xml b/warehouse/common/pom.xml index b9c30375288..a4066228d4e 100644 --- a/warehouse/common/pom.xml +++ b/warehouse/common/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.13.0-SNAPSHOT + 6.13.0 datawave-common ${project.artifactId} diff --git a/warehouse/core/pom.xml b/warehouse/core/pom.xml index 7cf54345e9e..48221c0da2a 100644 --- a/warehouse/core/pom.xml +++ b/warehouse/core/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.13.0-SNAPSHOT + 6.13.0 datawave-core jar diff --git a/warehouse/data-dictionary-core/pom.xml b/warehouse/data-dictionary-core/pom.xml index c694d510adb..109fb5ff5b6 100644 --- a/warehouse/data-dictionary-core/pom.xml +++ b/warehouse/data-dictionary-core/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.13.0-SNAPSHOT + 6.13.0 datawave-data-dictionary-core jar diff --git a/warehouse/edge-dictionary-core/pom.xml b/warehouse/edge-dictionary-core/pom.xml index e36c7b91d64..63cda765c64 100644 --- a/warehouse/edge-dictionary-core/pom.xml +++ b/warehouse/edge-dictionary-core/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.13.0-SNAPSHOT + 6.13.0 datawave-edge-dictionary-core jar diff --git a/warehouse/edge-model-configuration-core/pom.xml b/warehouse/edge-model-configuration-core/pom.xml index 302d0e2abf2..6132355701d 100644 --- a/warehouse/edge-model-configuration-core/pom.xml +++ b/warehouse/edge-model-configuration-core/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.13.0-SNAPSHOT + 6.13.0 datawave-edge-model-configuration-core jar diff --git a/warehouse/index-stats/pom.xml b/warehouse/index-stats/pom.xml index a83a4a374d8..8588efa1765 100644 --- a/warehouse/index-stats/pom.xml +++ b/warehouse/index-stats/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.13.0-SNAPSHOT + 6.13.0 datawave-index-stats jar diff --git a/warehouse/ingest-configuration/pom.xml b/warehouse/ingest-configuration/pom.xml index a2441d14050..da95bd0a0c3 100644 --- a/warehouse/ingest-configuration/pom.xml +++ b/warehouse/ingest-configuration/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.13.0-SNAPSHOT + 6.13.0 datawave-ingest-configuration diff --git a/warehouse/ingest-core/pom.xml b/warehouse/ingest-core/pom.xml index c27aedac82e..c199249f43f 100644 --- a/warehouse/ingest-core/pom.xml +++ b/warehouse/ingest-core/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.13.0-SNAPSHOT + 6.13.0 datawave-ingest-core jar diff --git a/warehouse/ingest-csv/pom.xml b/warehouse/ingest-csv/pom.xml index b67f1e197ba..7e0033962c1 100644 --- a/warehouse/ingest-csv/pom.xml +++ b/warehouse/ingest-csv/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.13.0-SNAPSHOT + 6.13.0 datawave-ingest-csv jar diff --git a/warehouse/ingest-json/pom.xml b/warehouse/ingest-json/pom.xml index 2015886e5e9..f2b33d4f5b2 100644 --- a/warehouse/ingest-json/pom.xml +++ b/warehouse/ingest-json/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.13.0-SNAPSHOT + 6.13.0 datawave-ingest-json jar diff --git a/warehouse/ingest-nyctlc/pom.xml b/warehouse/ingest-nyctlc/pom.xml index 3d5bb5db43e..fed2638cd9f 100644 --- a/warehouse/ingest-nyctlc/pom.xml +++ b/warehouse/ingest-nyctlc/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.13.0-SNAPSHOT + 6.13.0 datawave-ingest-nyctlc jar diff --git a/warehouse/ingest-scripts/pom.xml b/warehouse/ingest-scripts/pom.xml index d69731e9af7..7d0e973b7a0 100644 --- a/warehouse/ingest-scripts/pom.xml +++ b/warehouse/ingest-scripts/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.13.0-SNAPSHOT + 6.13.0 datawave-ingest-scripts ${project.artifactId} diff --git a/warehouse/ingest-ssdeep/pom.xml b/warehouse/ingest-ssdeep/pom.xml index edb6019635f..c07b01b678e 100644 --- a/warehouse/ingest-ssdeep/pom.xml +++ b/warehouse/ingest-ssdeep/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.13.0-SNAPSHOT + 6.13.0 datawave-ingest-ssdeep diff --git a/warehouse/ingest-wikipedia/pom.xml b/warehouse/ingest-wikipedia/pom.xml index d89cdb7b55c..4f72901c9b2 100644 --- a/warehouse/ingest-wikipedia/pom.xml +++ b/warehouse/ingest-wikipedia/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.13.0-SNAPSHOT + 6.13.0 datawave-ingest-wikipedia jar diff --git a/warehouse/metrics-core/pom.xml b/warehouse/metrics-core/pom.xml index 8a6f8bffd69..84ec3e2739b 100644 --- a/warehouse/metrics-core/pom.xml +++ b/warehouse/metrics-core/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.13.0-SNAPSHOT + 6.13.0 datawave-metrics-core jar diff --git a/warehouse/ops-tools/config-compare/pom.xml b/warehouse/ops-tools/config-compare/pom.xml index 910cc19fbfe..c59f7f0f662 100644 --- a/warehouse/ops-tools/config-compare/pom.xml +++ b/warehouse/ops-tools/config-compare/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-ops-tools-parent - 6.13.0-SNAPSHOT + 6.13.0 datawave-ops-tools-config-compare diff --git a/warehouse/ops-tools/index-validation/pom.xml b/warehouse/ops-tools/index-validation/pom.xml index 107098b7b92..93cac72d2f7 100644 --- a/warehouse/ops-tools/index-validation/pom.xml +++ b/warehouse/ops-tools/index-validation/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-ops-tools-parent - 6.13.0-SNAPSHOT + 6.13.0 datawave-ops-tools-index-validation jar diff --git a/warehouse/ops-tools/pom.xml b/warehouse/ops-tools/pom.xml index 06476691d57..c0c9452fc7f 100644 --- a/warehouse/ops-tools/pom.xml +++ b/warehouse/ops-tools/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.13.0-SNAPSHOT + 6.13.0 datawave-ops-tools-parent pom diff --git a/warehouse/pom.xml b/warehouse/pom.xml index a14d7314bff..f54350ee43f 100644 --- a/warehouse/pom.xml +++ b/warehouse/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-parent - 6.13.0-SNAPSHOT + 6.13.0 datawave-warehouse-parent pom diff --git a/warehouse/query-core/pom.xml b/warehouse/query-core/pom.xml index 55c86901e00..bf8024d22ba 100644 --- a/warehouse/query-core/pom.xml +++ b/warehouse/query-core/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.13.0-SNAPSHOT + 6.13.0 datawave-query-core jar diff --git a/warehouse/regression-testing/pom.xml b/warehouse/regression-testing/pom.xml index 30bfc575dcd..5f18493306e 100644 --- a/warehouse/regression-testing/pom.xml +++ b/warehouse/regression-testing/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.13.0-SNAPSHOT + 6.13.0 datawave-regression-testing ${project.artifactId} diff --git a/warehouse/ssdeep-common/pom.xml b/warehouse/ssdeep-common/pom.xml index 40cc72cc333..408167ce9b1 100644 --- a/warehouse/ssdeep-common/pom.xml +++ b/warehouse/ssdeep-common/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.13.0-SNAPSHOT + 6.13.0 datawave-ssdeep-common diff --git a/web-services/accumulo/pom.xml b/web-services/accumulo/pom.xml index a1ad7866750..95e514da311 100644 --- a/web-services/accumulo/pom.xml +++ b/web-services/accumulo/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 6.13.0-SNAPSHOT + 6.13.0 datawave-ws-accumulo ejb diff --git a/web-services/atom/pom.xml b/web-services/atom/pom.xml index e333627d3d6..790ff6b0ffe 100644 --- a/web-services/atom/pom.xml +++ b/web-services/atom/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 6.13.0-SNAPSHOT + 6.13.0 datawave-ws-atom ejb diff --git a/web-services/cached-results/pom.xml b/web-services/cached-results/pom.xml index 4a02d3ceb42..8ddc1221170 100644 --- a/web-services/cached-results/pom.xml +++ b/web-services/cached-results/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 6.13.0-SNAPSHOT + 6.13.0 datawave-ws-cached-results ejb diff --git a/web-services/client/pom.xml b/web-services/client/pom.xml index 7693ddb4b9d..3fa03ca4d66 100644 --- a/web-services/client/pom.xml +++ b/web-services/client/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 6.13.0-SNAPSHOT + 6.13.0 datawave-ws-client jar diff --git a/web-services/common-util/pom.xml b/web-services/common-util/pom.xml index 6b5b34b6cad..e54e4d45776 100644 --- a/web-services/common-util/pom.xml +++ b/web-services/common-util/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 6.13.0-SNAPSHOT + 6.13.0 datawave-ws-common-util jar diff --git a/web-services/common/pom.xml b/web-services/common/pom.xml index 6fd40531834..de7c25e5651 100644 --- a/web-services/common/pom.xml +++ b/web-services/common/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 6.13.0-SNAPSHOT + 6.13.0 datawave-ws-common ejb diff --git a/web-services/deploy/application/pom.xml b/web-services/deploy/application/pom.xml index 881ab0b81b7..2e59c152f95 100644 --- a/web-services/deploy/application/pom.xml +++ b/web-services/deploy/application/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-deploy-parent - 6.13.0-SNAPSHOT + 6.13.0 datawave-ws-deploy-application ear diff --git a/web-services/deploy/configuration/pom.xml b/web-services/deploy/configuration/pom.xml index 4d8ed716f40..5961e01622a 100644 --- a/web-services/deploy/configuration/pom.xml +++ b/web-services/deploy/configuration/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-deploy-parent - 6.13.0-SNAPSHOT + 6.13.0 datawave-ws-deploy-configuration jar diff --git a/web-services/deploy/docs/pom.xml b/web-services/deploy/docs/pom.xml index 367727bea69..f82bd8ec37e 100644 --- a/web-services/deploy/docs/pom.xml +++ b/web-services/deploy/docs/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-deploy-parent - 6.13.0-SNAPSHOT + 6.13.0 datawave-ws-deploy-docs war diff --git a/web-services/deploy/pom.xml b/web-services/deploy/pom.xml index e2b37e4596b..42516047d5b 100644 --- a/web-services/deploy/pom.xml +++ b/web-services/deploy/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 6.13.0-SNAPSHOT + 6.13.0 gov.nsa.datawave.webservices datawave-ws-deploy-parent diff --git a/web-services/deploy/spring-framework-integration/pom.xml b/web-services/deploy/spring-framework-integration/pom.xml index fbcd9619ccc..09bfc1d79cd 100644 --- a/web-services/deploy/spring-framework-integration/pom.xml +++ b/web-services/deploy/spring-framework-integration/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-deploy-parent - 6.13.0-SNAPSHOT + 6.13.0 spring-framework-integration ${project.artifactId} diff --git a/web-services/dictionary/pom.xml b/web-services/dictionary/pom.xml index 12b2ada6f0e..e9aa6bed724 100644 --- a/web-services/dictionary/pom.xml +++ b/web-services/dictionary/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 6.13.0-SNAPSHOT + 6.13.0 datawave-ws-dictionary ejb diff --git a/web-services/examples/client-login/pom.xml b/web-services/examples/client-login/pom.xml index e12cdfb9cc6..682714b1ed1 100644 --- a/web-services/examples/client-login/pom.xml +++ b/web-services/examples/client-login/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-examples-parent - 6.13.0-SNAPSHOT + 6.13.0 datawave-ws-examples-client-login ejb diff --git a/web-services/examples/http-client/pom.xml b/web-services/examples/http-client/pom.xml index 3e80ce0a94d..d486cd2b32f 100644 --- a/web-services/examples/http-client/pom.xml +++ b/web-services/examples/http-client/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-examples-parent - 6.13.0-SNAPSHOT + 6.13.0 datawave-ws-examples-http-client jar diff --git a/web-services/examples/jms-client/pom.xml b/web-services/examples/jms-client/pom.xml index fedf06816aa..6f19360c285 100644 --- a/web-services/examples/jms-client/pom.xml +++ b/web-services/examples/jms-client/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-examples-parent - 6.13.0-SNAPSHOT + 6.13.0 datawave-ws-examples-jms-client jar diff --git a/web-services/examples/pom.xml b/web-services/examples/pom.xml index 8dd74e0777c..1f01ea0c14a 100644 --- a/web-services/examples/pom.xml +++ b/web-services/examples/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 6.13.0-SNAPSHOT + 6.13.0 datawave-ws-examples-parent pom diff --git a/web-services/examples/query-war/pom.xml b/web-services/examples/query-war/pom.xml index 1a51bb36216..f2c05a27e72 100644 --- a/web-services/examples/query-war/pom.xml +++ b/web-services/examples/query-war/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-examples-parent - 6.13.0-SNAPSHOT + 6.13.0 datawave-ws-examples-query-war war diff --git a/web-services/map-reduce-embedded/pom.xml b/web-services/map-reduce-embedded/pom.xml index 0ed353f9a29..71b9f76f705 100644 --- a/web-services/map-reduce-embedded/pom.xml +++ b/web-services/map-reduce-embedded/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 6.13.0-SNAPSHOT + 6.13.0 datawave-ws-map-reduce-embedded jar diff --git a/web-services/map-reduce-status/pom.xml b/web-services/map-reduce-status/pom.xml index c6570ebff65..56e6283458d 100644 --- a/web-services/map-reduce-status/pom.xml +++ b/web-services/map-reduce-status/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 6.13.0-SNAPSHOT + 6.13.0 datawave-ws-map-reduce-status ejb diff --git a/web-services/map-reduce/pom.xml b/web-services/map-reduce/pom.xml index 8a81f8a4acb..3dc52bfd12b 100644 --- a/web-services/map-reduce/pom.xml +++ b/web-services/map-reduce/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 6.13.0-SNAPSHOT + 6.13.0 datawave-ws-map-reduce ejb diff --git a/web-services/model/pom.xml b/web-services/model/pom.xml index 61bca12da60..c3d73f041c6 100644 --- a/web-services/model/pom.xml +++ b/web-services/model/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 6.13.0-SNAPSHOT + 6.13.0 datawave-ws-model ejb diff --git a/web-services/modification/pom.xml b/web-services/modification/pom.xml index 598e20493ec..345d89f6945 100644 --- a/web-services/modification/pom.xml +++ b/web-services/modification/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 6.13.0-SNAPSHOT + 6.13.0 datawave-ws-modification ejb diff --git a/web-services/pom.xml b/web-services/pom.xml index d5ff23d8d2d..6b918bf2aa0 100644 --- a/web-services/pom.xml +++ b/web-services/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-parent - 6.13.0-SNAPSHOT + 6.13.0 gov.nsa.datawave.webservices datawave-ws-parent diff --git a/web-services/query-websocket/pom.xml b/web-services/query-websocket/pom.xml index cfa00cc1023..59c9b5016d5 100644 --- a/web-services/query-websocket/pom.xml +++ b/web-services/query-websocket/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 6.13.0-SNAPSHOT + 6.13.0 datawave-ws-query-websocket war diff --git a/web-services/query/pom.xml b/web-services/query/pom.xml index 02daf86cb94..87183c5e749 100644 --- a/web-services/query/pom.xml +++ b/web-services/query/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 6.13.0-SNAPSHOT + 6.13.0 datawave-ws-query ejb diff --git a/web-services/rest-api/pom.xml b/web-services/rest-api/pom.xml index 59d672022f5..660f813cc26 100644 --- a/web-services/rest-api/pom.xml +++ b/web-services/rest-api/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 6.13.0-SNAPSHOT + 6.13.0 datawave-ws-rest-api war diff --git a/web-services/security/pom.xml b/web-services/security/pom.xml index a560795d8c9..307ce1994e1 100644 --- a/web-services/security/pom.xml +++ b/web-services/security/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 6.13.0-SNAPSHOT + 6.13.0 datawave-ws-security ejb diff --git a/web-services/web-root/pom.xml b/web-services/web-root/pom.xml index d2fab275bfd..b1d81f4251e 100644 --- a/web-services/web-root/pom.xml +++ b/web-services/web-root/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 6.13.0-SNAPSHOT + 6.13.0 datawave-ws-web-root war From 2e5f15fbe9bb7dbf8031e755279f330e895a5e9b Mon Sep 17 00:00:00 2001 From: hgklohr Date: Thu, 16 May 2024 19:38:47 +0000 Subject: [PATCH 03/20] 6.13.1-SNAPSHOT --- common-test/pom.xml | 2 +- contrib/datawave-quickstart/docker/pom.xml | 2 +- core/pom.xml | 2 +- core/utils/pom.xml | 2 +- docs/pom.xml | 2 +- microservices/pom.xml | 2 +- microservices/services/pom.xml | 2 +- microservices/starters/pom.xml | 2 +- pom.xml | 2 +- warehouse/accumulo-extensions/pom.xml | 2 +- warehouse/age-off-utils/pom.xml | 2 +- warehouse/age-off/pom.xml | 2 +- warehouse/assemble/datawave/pom.xml | 2 +- warehouse/assemble/pom.xml | 2 +- warehouse/assemble/webservice/pom.xml | 2 +- warehouse/common/pom.xml | 2 +- warehouse/core/pom.xml | 2 +- warehouse/data-dictionary-core/pom.xml | 2 +- warehouse/edge-dictionary-core/pom.xml | 2 +- warehouse/edge-model-configuration-core/pom.xml | 2 +- warehouse/index-stats/pom.xml | 2 +- warehouse/ingest-configuration/pom.xml | 2 +- warehouse/ingest-core/pom.xml | 2 +- warehouse/ingest-csv/pom.xml | 2 +- warehouse/ingest-json/pom.xml | 2 +- warehouse/ingest-nyctlc/pom.xml | 2 +- warehouse/ingest-scripts/pom.xml | 2 +- warehouse/ingest-ssdeep/pom.xml | 2 +- warehouse/ingest-wikipedia/pom.xml | 2 +- warehouse/metrics-core/pom.xml | 2 +- warehouse/ops-tools/config-compare/pom.xml | 2 +- warehouse/ops-tools/index-validation/pom.xml | 2 +- warehouse/ops-tools/pom.xml | 2 +- warehouse/pom.xml | 2 +- warehouse/query-core/pom.xml | 2 +- warehouse/regression-testing/pom.xml | 2 +- warehouse/ssdeep-common/pom.xml | 2 +- web-services/accumulo/pom.xml | 2 +- web-services/atom/pom.xml | 2 +- web-services/cached-results/pom.xml | 2 +- web-services/client/pom.xml | 2 +- web-services/common-util/pom.xml | 2 +- web-services/common/pom.xml | 2 +- web-services/deploy/application/pom.xml | 2 +- web-services/deploy/configuration/pom.xml | 2 +- web-services/deploy/docs/pom.xml | 2 +- web-services/deploy/pom.xml | 2 +- web-services/deploy/spring-framework-integration/pom.xml | 2 +- web-services/dictionary/pom.xml | 2 +- web-services/examples/client-login/pom.xml | 2 +- web-services/examples/http-client/pom.xml | 2 +- web-services/examples/jms-client/pom.xml | 2 +- web-services/examples/pom.xml | 2 +- web-services/examples/query-war/pom.xml | 2 +- web-services/map-reduce-embedded/pom.xml | 2 +- web-services/map-reduce-status/pom.xml | 2 +- web-services/map-reduce/pom.xml | 2 +- web-services/model/pom.xml | 2 +- web-services/modification/pom.xml | 2 +- web-services/pom.xml | 2 +- web-services/query-websocket/pom.xml | 2 +- web-services/query/pom.xml | 2 +- web-services/rest-api/pom.xml | 2 +- web-services/security/pom.xml | 2 +- web-services/web-root/pom.xml | 2 +- 65 files changed, 65 insertions(+), 65 deletions(-) diff --git a/common-test/pom.xml b/common-test/pom.xml index e3d6770b04a..08f2e8644a7 100644 --- a/common-test/pom.xml +++ b/common-test/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-parent - 6.13.0 + 6.13.1-SNAPSHOT datawave-common-test ${project.artifactId} diff --git a/contrib/datawave-quickstart/docker/pom.xml b/contrib/datawave-quickstart/docker/pom.xml index 379da980298..2420d9de524 100644 --- a/contrib/datawave-quickstart/docker/pom.xml +++ b/contrib/datawave-quickstart/docker/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-parent - 6.13.0 + 6.13.1-SNAPSHOT ../../../pom.xml quickstart diff --git a/core/pom.xml b/core/pom.xml index cea876fb33e..05ac101dc7f 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-parent - 6.13.0 + 6.13.1-SNAPSHOT gov.nsa.datawave.core datawave-core-parent diff --git a/core/utils/pom.xml b/core/utils/pom.xml index 02b915076ba..8dc017496d4 100644 --- a/core/utils/pom.xml +++ b/core/utils/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.core datawave-core-parent - 6.13.0 + 6.13.1-SNAPSHOT gov.nsa.datawave.core datawave-utils-parent diff --git a/docs/pom.xml b/docs/pom.xml index 3b7169be2ee..ced2845a7fc 100644 --- a/docs/pom.xml +++ b/docs/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-parent - 6.13.0 + 6.13.1-SNAPSHOT datawave-docs diff --git a/microservices/pom.xml b/microservices/pom.xml index 58009e5bb5f..a950d377069 100644 --- a/microservices/pom.xml +++ b/microservices/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-parent - 6.13.0 + 6.13.1-SNAPSHOT gov.nsa.datawave.microservice datawave-microservice-build-parent diff --git a/microservices/services/pom.xml b/microservices/services/pom.xml index 4713f77832a..16f2bd0d578 100644 --- a/microservices/services/pom.xml +++ b/microservices/services/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.microservice datawave-microservice-build-parent - 6.13.0 + 6.13.1-SNAPSHOT datawave-microservice-service-build-parent pom diff --git a/microservices/starters/pom.xml b/microservices/starters/pom.xml index d001a3e6e49..17f7f205ac3 100644 --- a/microservices/starters/pom.xml +++ b/microservices/starters/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.microservice datawave-microservice-build-parent - 6.13.0 + 6.13.1-SNAPSHOT datawave-microservice-starter-build-parent pom diff --git a/pom.xml b/pom.xml index e666e97a6e7..754c0923ff6 100644 --- a/pom.xml +++ b/pom.xml @@ -3,7 +3,7 @@ 4.0.0 gov.nsa.datawave datawave-parent - 6.13.0 + 6.13.1-SNAPSHOT pom DataWave DataWave is a Java-based ingest and query framework that leverages Apache Accumulo to provide fast, secure access to your data. diff --git a/warehouse/accumulo-extensions/pom.xml b/warehouse/accumulo-extensions/pom.xml index bf3dae59da0..1ad3f4669af 100644 --- a/warehouse/accumulo-extensions/pom.xml +++ b/warehouse/accumulo-extensions/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.13.0 + 6.13.1-SNAPSHOT datawave-accumulo-extensions ${project.artifactId} diff --git a/warehouse/age-off-utils/pom.xml b/warehouse/age-off-utils/pom.xml index f8b369af690..fe9b407d310 100644 --- a/warehouse/age-off-utils/pom.xml +++ b/warehouse/age-off-utils/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.13.0 + 6.13.1-SNAPSHOT datawave-age-off-utils ${project.artifactId} diff --git a/warehouse/age-off/pom.xml b/warehouse/age-off/pom.xml index eef04f222af..1f2fd5f1b44 100644 --- a/warehouse/age-off/pom.xml +++ b/warehouse/age-off/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.13.0 + 6.13.1-SNAPSHOT datawave-age-off ${project.artifactId} diff --git a/warehouse/assemble/datawave/pom.xml b/warehouse/assemble/datawave/pom.xml index 335ce3d174b..755d4944adb 100644 --- a/warehouse/assemble/datawave/pom.xml +++ b/warehouse/assemble/datawave/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave assemble-parent - 6.13.0 + 6.13.1-SNAPSHOT assemble-datawave jar diff --git a/warehouse/assemble/pom.xml b/warehouse/assemble/pom.xml index caa05b82ac7..f55e73b5fff 100644 --- a/warehouse/assemble/pom.xml +++ b/warehouse/assemble/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.13.0 + 6.13.1-SNAPSHOT assemble-parent pom diff --git a/warehouse/assemble/webservice/pom.xml b/warehouse/assemble/webservice/pom.xml index 8c731017d43..a694b09bc9e 100644 --- a/warehouse/assemble/webservice/pom.xml +++ b/warehouse/assemble/webservice/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave assemble-parent - 6.13.0 + 6.13.1-SNAPSHOT assemble-webservice ${project.artifactId} diff --git a/warehouse/common/pom.xml b/warehouse/common/pom.xml index a4066228d4e..12919be74e4 100644 --- a/warehouse/common/pom.xml +++ b/warehouse/common/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.13.0 + 6.13.1-SNAPSHOT datawave-common ${project.artifactId} diff --git a/warehouse/core/pom.xml b/warehouse/core/pom.xml index 48221c0da2a..a60495c77ef 100644 --- a/warehouse/core/pom.xml +++ b/warehouse/core/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.13.0 + 6.13.1-SNAPSHOT datawave-core jar diff --git a/warehouse/data-dictionary-core/pom.xml b/warehouse/data-dictionary-core/pom.xml index 109fb5ff5b6..cc195b9e551 100644 --- a/warehouse/data-dictionary-core/pom.xml +++ b/warehouse/data-dictionary-core/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.13.0 + 6.13.1-SNAPSHOT datawave-data-dictionary-core jar diff --git a/warehouse/edge-dictionary-core/pom.xml b/warehouse/edge-dictionary-core/pom.xml index 63cda765c64..953bd183722 100644 --- a/warehouse/edge-dictionary-core/pom.xml +++ b/warehouse/edge-dictionary-core/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.13.0 + 6.13.1-SNAPSHOT datawave-edge-dictionary-core jar diff --git a/warehouse/edge-model-configuration-core/pom.xml b/warehouse/edge-model-configuration-core/pom.xml index 6132355701d..e8b89ce6dc8 100644 --- a/warehouse/edge-model-configuration-core/pom.xml +++ b/warehouse/edge-model-configuration-core/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.13.0 + 6.13.1-SNAPSHOT datawave-edge-model-configuration-core jar diff --git a/warehouse/index-stats/pom.xml b/warehouse/index-stats/pom.xml index 8588efa1765..91667de705b 100644 --- a/warehouse/index-stats/pom.xml +++ b/warehouse/index-stats/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.13.0 + 6.13.1-SNAPSHOT datawave-index-stats jar diff --git a/warehouse/ingest-configuration/pom.xml b/warehouse/ingest-configuration/pom.xml index da95bd0a0c3..23b19792ca1 100644 --- a/warehouse/ingest-configuration/pom.xml +++ b/warehouse/ingest-configuration/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.13.0 + 6.13.1-SNAPSHOT datawave-ingest-configuration diff --git a/warehouse/ingest-core/pom.xml b/warehouse/ingest-core/pom.xml index c199249f43f..d6dfda83e83 100644 --- a/warehouse/ingest-core/pom.xml +++ b/warehouse/ingest-core/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.13.0 + 6.13.1-SNAPSHOT datawave-ingest-core jar diff --git a/warehouse/ingest-csv/pom.xml b/warehouse/ingest-csv/pom.xml index 7e0033962c1..a7b4a132f33 100644 --- a/warehouse/ingest-csv/pom.xml +++ b/warehouse/ingest-csv/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.13.0 + 6.13.1-SNAPSHOT datawave-ingest-csv jar diff --git a/warehouse/ingest-json/pom.xml b/warehouse/ingest-json/pom.xml index f2b33d4f5b2..4e93797fe45 100644 --- a/warehouse/ingest-json/pom.xml +++ b/warehouse/ingest-json/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.13.0 + 6.13.1-SNAPSHOT datawave-ingest-json jar diff --git a/warehouse/ingest-nyctlc/pom.xml b/warehouse/ingest-nyctlc/pom.xml index fed2638cd9f..4aeeb82c39d 100644 --- a/warehouse/ingest-nyctlc/pom.xml +++ b/warehouse/ingest-nyctlc/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.13.0 + 6.13.1-SNAPSHOT datawave-ingest-nyctlc jar diff --git a/warehouse/ingest-scripts/pom.xml b/warehouse/ingest-scripts/pom.xml index 7d0e973b7a0..d61810fc3ac 100644 --- a/warehouse/ingest-scripts/pom.xml +++ b/warehouse/ingest-scripts/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.13.0 + 6.13.1-SNAPSHOT datawave-ingest-scripts ${project.artifactId} diff --git a/warehouse/ingest-ssdeep/pom.xml b/warehouse/ingest-ssdeep/pom.xml index c07b01b678e..532dad307fb 100644 --- a/warehouse/ingest-ssdeep/pom.xml +++ b/warehouse/ingest-ssdeep/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.13.0 + 6.13.1-SNAPSHOT datawave-ingest-ssdeep diff --git a/warehouse/ingest-wikipedia/pom.xml b/warehouse/ingest-wikipedia/pom.xml index 4f72901c9b2..b629cc215a9 100644 --- a/warehouse/ingest-wikipedia/pom.xml +++ b/warehouse/ingest-wikipedia/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.13.0 + 6.13.1-SNAPSHOT datawave-ingest-wikipedia jar diff --git a/warehouse/metrics-core/pom.xml b/warehouse/metrics-core/pom.xml index 84ec3e2739b..0f4a7feecfc 100644 --- a/warehouse/metrics-core/pom.xml +++ b/warehouse/metrics-core/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.13.0 + 6.13.1-SNAPSHOT datawave-metrics-core jar diff --git a/warehouse/ops-tools/config-compare/pom.xml b/warehouse/ops-tools/config-compare/pom.xml index c59f7f0f662..d039763e725 100644 --- a/warehouse/ops-tools/config-compare/pom.xml +++ b/warehouse/ops-tools/config-compare/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-ops-tools-parent - 6.13.0 + 6.13.1-SNAPSHOT datawave-ops-tools-config-compare diff --git a/warehouse/ops-tools/index-validation/pom.xml b/warehouse/ops-tools/index-validation/pom.xml index 93cac72d2f7..4777a181b74 100644 --- a/warehouse/ops-tools/index-validation/pom.xml +++ b/warehouse/ops-tools/index-validation/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-ops-tools-parent - 6.13.0 + 6.13.1-SNAPSHOT datawave-ops-tools-index-validation jar diff --git a/warehouse/ops-tools/pom.xml b/warehouse/ops-tools/pom.xml index c0c9452fc7f..5b58e4011d5 100644 --- a/warehouse/ops-tools/pom.xml +++ b/warehouse/ops-tools/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.13.0 + 6.13.1-SNAPSHOT datawave-ops-tools-parent pom diff --git a/warehouse/pom.xml b/warehouse/pom.xml index f54350ee43f..0624deb6b85 100644 --- a/warehouse/pom.xml +++ b/warehouse/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-parent - 6.13.0 + 6.13.1-SNAPSHOT datawave-warehouse-parent pom diff --git a/warehouse/query-core/pom.xml b/warehouse/query-core/pom.xml index bf8024d22ba..5a6fa9f20a7 100644 --- a/warehouse/query-core/pom.xml +++ b/warehouse/query-core/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.13.0 + 6.13.1-SNAPSHOT datawave-query-core jar diff --git a/warehouse/regression-testing/pom.xml b/warehouse/regression-testing/pom.xml index 5f18493306e..6fd4fe43fca 100644 --- a/warehouse/regression-testing/pom.xml +++ b/warehouse/regression-testing/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.13.0 + 6.13.1-SNAPSHOT datawave-regression-testing ${project.artifactId} diff --git a/warehouse/ssdeep-common/pom.xml b/warehouse/ssdeep-common/pom.xml index 408167ce9b1..95eaa2fdfa3 100644 --- a/warehouse/ssdeep-common/pom.xml +++ b/warehouse/ssdeep-common/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.13.0 + 6.13.1-SNAPSHOT datawave-ssdeep-common diff --git a/web-services/accumulo/pom.xml b/web-services/accumulo/pom.xml index 95e514da311..40b9bd2c2b8 100644 --- a/web-services/accumulo/pom.xml +++ b/web-services/accumulo/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 6.13.0 + 6.13.1-SNAPSHOT datawave-ws-accumulo ejb diff --git a/web-services/atom/pom.xml b/web-services/atom/pom.xml index 790ff6b0ffe..b5933097b74 100644 --- a/web-services/atom/pom.xml +++ b/web-services/atom/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 6.13.0 + 6.13.1-SNAPSHOT datawave-ws-atom ejb diff --git a/web-services/cached-results/pom.xml b/web-services/cached-results/pom.xml index 8ddc1221170..2873e644120 100644 --- a/web-services/cached-results/pom.xml +++ b/web-services/cached-results/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 6.13.0 + 6.13.1-SNAPSHOT datawave-ws-cached-results ejb diff --git a/web-services/client/pom.xml b/web-services/client/pom.xml index 3fa03ca4d66..44496bee9b3 100644 --- a/web-services/client/pom.xml +++ b/web-services/client/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 6.13.0 + 6.13.1-SNAPSHOT datawave-ws-client jar diff --git a/web-services/common-util/pom.xml b/web-services/common-util/pom.xml index e54e4d45776..c070061d870 100644 --- a/web-services/common-util/pom.xml +++ b/web-services/common-util/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 6.13.0 + 6.13.1-SNAPSHOT datawave-ws-common-util jar diff --git a/web-services/common/pom.xml b/web-services/common/pom.xml index de7c25e5651..6d1b33e5bb1 100644 --- a/web-services/common/pom.xml +++ b/web-services/common/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 6.13.0 + 6.13.1-SNAPSHOT datawave-ws-common ejb diff --git a/web-services/deploy/application/pom.xml b/web-services/deploy/application/pom.xml index 2e59c152f95..ff2bafd60de 100644 --- a/web-services/deploy/application/pom.xml +++ b/web-services/deploy/application/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-deploy-parent - 6.13.0 + 6.13.1-SNAPSHOT datawave-ws-deploy-application ear diff --git a/web-services/deploy/configuration/pom.xml b/web-services/deploy/configuration/pom.xml index 5961e01622a..00476104008 100644 --- a/web-services/deploy/configuration/pom.xml +++ b/web-services/deploy/configuration/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-deploy-parent - 6.13.0 + 6.13.1-SNAPSHOT datawave-ws-deploy-configuration jar diff --git a/web-services/deploy/docs/pom.xml b/web-services/deploy/docs/pom.xml index f82bd8ec37e..17f7587281e 100644 --- a/web-services/deploy/docs/pom.xml +++ b/web-services/deploy/docs/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-deploy-parent - 6.13.0 + 6.13.1-SNAPSHOT datawave-ws-deploy-docs war diff --git a/web-services/deploy/pom.xml b/web-services/deploy/pom.xml index 42516047d5b..dacb653170d 100644 --- a/web-services/deploy/pom.xml +++ b/web-services/deploy/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 6.13.0 + 6.13.1-SNAPSHOT gov.nsa.datawave.webservices datawave-ws-deploy-parent diff --git a/web-services/deploy/spring-framework-integration/pom.xml b/web-services/deploy/spring-framework-integration/pom.xml index 09bfc1d79cd..e52d16758bf 100644 --- a/web-services/deploy/spring-framework-integration/pom.xml +++ b/web-services/deploy/spring-framework-integration/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-deploy-parent - 6.13.0 + 6.13.1-SNAPSHOT spring-framework-integration ${project.artifactId} diff --git a/web-services/dictionary/pom.xml b/web-services/dictionary/pom.xml index e9aa6bed724..9ca0a1c4f0d 100644 --- a/web-services/dictionary/pom.xml +++ b/web-services/dictionary/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 6.13.0 + 6.13.1-SNAPSHOT datawave-ws-dictionary ejb diff --git a/web-services/examples/client-login/pom.xml b/web-services/examples/client-login/pom.xml index 682714b1ed1..7ddfcfa756e 100644 --- a/web-services/examples/client-login/pom.xml +++ b/web-services/examples/client-login/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-examples-parent - 6.13.0 + 6.13.1-SNAPSHOT datawave-ws-examples-client-login ejb diff --git a/web-services/examples/http-client/pom.xml b/web-services/examples/http-client/pom.xml index d486cd2b32f..00190e5d321 100644 --- a/web-services/examples/http-client/pom.xml +++ b/web-services/examples/http-client/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-examples-parent - 6.13.0 + 6.13.1-SNAPSHOT datawave-ws-examples-http-client jar diff --git a/web-services/examples/jms-client/pom.xml b/web-services/examples/jms-client/pom.xml index 6f19360c285..fe899182272 100644 --- a/web-services/examples/jms-client/pom.xml +++ b/web-services/examples/jms-client/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-examples-parent - 6.13.0 + 6.13.1-SNAPSHOT datawave-ws-examples-jms-client jar diff --git a/web-services/examples/pom.xml b/web-services/examples/pom.xml index 1f01ea0c14a..5c627064db0 100644 --- a/web-services/examples/pom.xml +++ b/web-services/examples/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 6.13.0 + 6.13.1-SNAPSHOT datawave-ws-examples-parent pom diff --git a/web-services/examples/query-war/pom.xml b/web-services/examples/query-war/pom.xml index f2c05a27e72..9ae28740617 100644 --- a/web-services/examples/query-war/pom.xml +++ b/web-services/examples/query-war/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-examples-parent - 6.13.0 + 6.13.1-SNAPSHOT datawave-ws-examples-query-war war diff --git a/web-services/map-reduce-embedded/pom.xml b/web-services/map-reduce-embedded/pom.xml index 71b9f76f705..1efacb31460 100644 --- a/web-services/map-reduce-embedded/pom.xml +++ b/web-services/map-reduce-embedded/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 6.13.0 + 6.13.1-SNAPSHOT datawave-ws-map-reduce-embedded jar diff --git a/web-services/map-reduce-status/pom.xml b/web-services/map-reduce-status/pom.xml index 56e6283458d..bc1cbaa030d 100644 --- a/web-services/map-reduce-status/pom.xml +++ b/web-services/map-reduce-status/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 6.13.0 + 6.13.1-SNAPSHOT datawave-ws-map-reduce-status ejb diff --git a/web-services/map-reduce/pom.xml b/web-services/map-reduce/pom.xml index 3dc52bfd12b..227b5f3b6ca 100644 --- a/web-services/map-reduce/pom.xml +++ b/web-services/map-reduce/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 6.13.0 + 6.13.1-SNAPSHOT datawave-ws-map-reduce ejb diff --git a/web-services/model/pom.xml b/web-services/model/pom.xml index c3d73f041c6..6148b56e217 100644 --- a/web-services/model/pom.xml +++ b/web-services/model/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 6.13.0 + 6.13.1-SNAPSHOT datawave-ws-model ejb diff --git a/web-services/modification/pom.xml b/web-services/modification/pom.xml index 345d89f6945..2bb899c8af1 100644 --- a/web-services/modification/pom.xml +++ b/web-services/modification/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 6.13.0 + 6.13.1-SNAPSHOT datawave-ws-modification ejb diff --git a/web-services/pom.xml b/web-services/pom.xml index 6b918bf2aa0..0447f00cbb4 100644 --- a/web-services/pom.xml +++ b/web-services/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-parent - 6.13.0 + 6.13.1-SNAPSHOT gov.nsa.datawave.webservices datawave-ws-parent diff --git a/web-services/query-websocket/pom.xml b/web-services/query-websocket/pom.xml index 59c9b5016d5..f42674808b5 100644 --- a/web-services/query-websocket/pom.xml +++ b/web-services/query-websocket/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 6.13.0 + 6.13.1-SNAPSHOT datawave-ws-query-websocket war diff --git a/web-services/query/pom.xml b/web-services/query/pom.xml index 87183c5e749..595617a7088 100644 --- a/web-services/query/pom.xml +++ b/web-services/query/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 6.13.0 + 6.13.1-SNAPSHOT datawave-ws-query ejb diff --git a/web-services/rest-api/pom.xml b/web-services/rest-api/pom.xml index 660f813cc26..55809dc760b 100644 --- a/web-services/rest-api/pom.xml +++ b/web-services/rest-api/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 6.13.0 + 6.13.1-SNAPSHOT datawave-ws-rest-api war diff --git a/web-services/security/pom.xml b/web-services/security/pom.xml index 307ce1994e1..01e7c96f62f 100644 --- a/web-services/security/pom.xml +++ b/web-services/security/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 6.13.0 + 6.13.1-SNAPSHOT datawave-ws-security ejb diff --git a/web-services/web-root/pom.xml b/web-services/web-root/pom.xml index b1d81f4251e..4dbf2f441fb 100644 --- a/web-services/web-root/pom.xml +++ b/web-services/web-root/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 6.13.0 + 6.13.1-SNAPSHOT datawave-ws-web-root war From 9dadf7b2c65ab207429f2ad1eec9f88d71132621 Mon Sep 17 00:00:00 2001 From: Whitney O'Meara Date: Mon, 20 May 2024 12:33:34 -0400 Subject: [PATCH 04/20] Feature/query microservices (#2192) * Modified a couple of things and fixed a few tests with the integration merge. * pulling in latest audit changes * pulled in latest audit updates * Fixed metrics version in docker compose * Updated accumulo utils and datawave starter versions. * Updated the audit api and audit starter versions * Updated authorization api version * Updated accumulo api version. * Updated config service with latest changes * updated dictionary and accumulo utils version * Updated hazelcast version * bumped query metric api version * Updated pki for query microservices docker deployment. * Fixing the docker compose bootstrap to avoid duplicate extra hosts issue. * adding latest datawave changes * updated metadata utils * updated docker compose to use the latest version of each service. * Added a cached results spring boot starter and updated associated classes accordingly. * fixing spotbugs error * fixed install-ingest.sh for datawave. * Updates to get bulk results map reduce query working again. * formatting * Updates per a race condition discovered in the kafka variant of the query executor tests. * Update README.md * accumulo service merge from main * fixed a test that broke after merging integration * Added logic from Datawave Principal Login Module to the datawave spring boot starter. * Updated datawave to 5.4.0-SNAPSHOT * Updated ProxiedUserDetails to use dynamic type for newInstance method. * updated audit service to stop intermittent test failures. * Updated the query results publisher and setup the kafka manager to create topics manually. * Added github actions for some submodules * Updated github actions to avoid running out of disk space. * Updated import sorting for microservices using impsort plugin. * fixing merge conflicts * updated metadata utils * Moved Query, QueryImpl to query-api * Removed usage of zookeeper stringutils * added package-info.java for Query, QueryImpl * updated modification service logic to use ProxiedUserDetails. * Removed unnecessary jackson annotations to address enunciate issues * Updated profile ids for submodules * changes to get the github actions working again * attempting to fix github actions * typo * udpated kafka config * removed banned dependency * bumped release version * formatting * fixing docker compose kafka setup * updated deploy poms to work with the latest version of maven * bumped version for some modules * bumped microservice service parent version * Updated with latest changes from main/integration * fixed the composite query logic for the query microservices * updated submodules * Added v1 to the model path * For this branch we need to checkout all submodules when building tests. * Added alerman's run cache settings for docker * Updated github actions to build query microservices correctly * fixed build cache config typo * Updated query service to use pool header, and allow for admin override via parameter * Updated github ci to make sure it runs against the queryMicroservices branch * Updated lookup uuid to accept multiple terms per field * Updated confirm ack configuration for query metric service, and added configuration to make rest metrics updates possible * Added support for long running queries * Added configcheck command line utility * updated next call logic for long running queries * updated query metric service and starter submodules * Added some plugins to the config check service * Updated configcheck tests to trim whitespace * fixing unit tests for config check * Changed from max long running query timeouts to long running query timeout * Updated the query starter submodule * updated submodules * fixed composite query logic test * Refactored query scripts to use common code * Updated quickstart docker-maven-plugin to not cleanup old tagged releases * updated common-utils submodule * Updated LookupService to behave the same as the webservice * Fixed TypedValue (de)serialization, and added a result post processor to handle things like counts * reset docker-compose quickstart to just be accumulo, not web * Changed how the result postprocessor runs to support result aggregation for long running queries * Renamed aggregateResults to reduceResults * Added junit-jupiter engine to datawave core pom * Updated query logic factory to add logic map * Updated EdgeQueryLogicFactory configuration * Removed unused RemoteQueryLogic and cleaned up RemoteEventQueryLogic class definition: * Implemented authorization and query federation for the query microservices * Fixed copy constructor parameter assignment in QueryData * Formatting job fix * Fixing build/formatter issues for latest changes pulled in from integration * Fixed SSDeepIngestQueryTest * Updated usage of remote user operations for query microservices * Fixed QueryPlan constructor * Moved filtering and delegating query logic functionality into datawave-core-query so it can be used by the microservices * formatting * Updated submodules * Updated package names for commons.lang3 classes due to type-utils fix * Moved the AuthorizationsPredicate class to authorization-api * Removed hadoop dependencies from pom * Updated edge configuration to reset param variables before loading new values * Excluded log4j from our maven plugins * Updated in-memory-accumulo javadoc plugin to exclude log4j * Updated microservice-parent maven-dependency-plugin to exclude log4j * Removed infinispan from query starter * Remove validation call from LoadModel * fixed formatting * Updated datawave ingest jar libs * QueryMetricQueryLogic must copy runAsMetricsAdministrator setting in copy constructor * Revert query metric query logic back to it's original state * pr feedback * PR feedback * PR Feedback * PR feedback * PR Feedback * PR feedback * PR feedback * PR Feedback * PR Feedback * PR feedback * PR feedback * PR feedback * pr feedback * PR feedback * pr feedback * pr feedback * pr feedback * pr feedback * pr feedback * pr feedback * PR feedback * PR feedback * pr feedback * pr feedback * Updated in-memory-accumulo commit * in-memory-accumulo * formatting * query starter * pr feedback * pr feedback * pr feedback * pr feedback * pr feedback * pr feedback * pr feedback * Remoted CachedQueryStatus mechanism * pr feedback * pr feedback * pr feedback * pr feedback * pr updates * pr feedback * pr review: found duplicate properties between configurations and the super GenericQueryConfiguration class. Removed redundant copies in ShardQueryConfiguration checkpoint copy constructor * review comments * review comments * Reverted path changes in dictionary service * pr review updates * updated executor to use QueryStatusUpdateUtil * updated query microservice submodule * make github action tests match integration --------- Co-authored-by: Ivan Bella Co-authored-by: GitHub Actions Co-authored-by: Bill Oley --- .github/workflows/tests.yml | 1 - .gitmodules | 21 + .mvn/maven-build-cache-config.xml | 16 + README.md | 4 + common-test/pom.xml | 2 +- contrib/datawave-quickstart/.gitignore | 2 +- contrib/datawave-quickstart/bin/common.sh | 1 + .../datawave-quickstart/bin/modification.sh | 295 ++++++ contrib/datawave-quickstart/bin/query.sh | 2 +- .../bin/services/accumulo/bootstrap.sh | 1 + .../bin/services/accumulo/install.sh | 4 +- .../bin/services/hadoop/bootstrap.sh | 4 +- .../docker/datawave-bootstrap.sh | 14 +- contrib/datawave-quickstart/docker/pom.xml | 3 +- contrib/datawave-utils | 1 + core/base-rest-responses | 2 +- core/cached-results/pom.xml | 48 + .../cachedresults/CacheableQueryRowImpl.java | 5 +- .../CacheableQueryRowReader.java | 10 +- .../CachedResultsQueryParameters.java | 7 +- core/common-util/pom.xml | 121 +++ .../logging/ThreadConfigurableLogger.java | 2 +- .../core/common}/util/EnvProvider.java | 4 +- .../core/query/cache/ResultsPage.java | 45 + .../authorization/DatawavePrincipal.java | 10 +- .../src/main/resources/META-INF/beans.xml | 9 + .../main/resources/META-INF/jboss-ejb3.xml | 16 + .../security/authorization/package-info.java | 1 - .../core/common}/util/EnvProviderTest.java | 2 +- core/common/pom.xml | 79 ++ .../common/audit/PrivateAuditConstants.java | 7 +- .../EdgeDictionaryProvider.java | 9 + .../core}/common/extjs/ExtJsResponse.java | 2 +- core/connection-pool/pom.xml | 161 ++++ .../core/common/cache/AccumuloTableCache.java | 28 + .../common/cache/AccumuloTableCacheImpl.java | 204 ++-- .../cache/AccumuloTableCacheProperties.java | 129 +++ .../core}/common/cache/BaseTableCache.java | 7 +- .../core}/common/cache/SharedBoolean.java | 2 +- .../common/cache/SharedBooleanListener.java | 2 +- .../common/cache/SharedBooleanReader.java | 2 +- .../common/cache/SharedCacheCoordinator.java | 36 +- .../core}/common/cache/SharedTriState.java | 2 +- .../common/cache/SharedTriStateListener.java | 2 +- .../common/cache/SharedTriStateReader.java | 2 +- .../core}/common/cache/TableCache.java | 4 +- .../common/connection/AccumuloClientPool.java | 12 +- .../connection/AccumuloClientPoolFactory.java | 2 +- .../connection/AccumuloConnectionFactory.java | 67 +- .../AccumuloConnectionFactoryImpl.java | 381 ++++++++ .../result/AccumuloTableCacheStatus.java | 8 +- .../core}/common/result/Connection.java | 2 +- .../result/ConnectionFactoryResponse.java | 2 +- .../core}/common/result/ConnectionPool.java | 2 +- .../ConnectionPoolClientProperties.java | 25 + .../result/ConnectionPoolProperties.java | 77 ++ .../result/ConnectionPoolsProperties.java | 49 + .../common/result/ConnectionProperty.java | 2 +- .../common/result/TableCacheDescription.java | 5 +- .../runner/AccumuloConnectionRequestMap.java | 105 ++ .../src/main/resources/META-INF/beans.xml | 9 + .../main/resources/META-INF/jboss-ejb3.xml | 16 + .../core/common/result}/package-info.java | 5 +- .../cache/SharedCacheCoordinatorTest.java | 9 +- .../AccumuloConnectionFactoryTest.java | 72 +- .../curator/TestSharedCacheCoordinator.java | 10 +- .../common/result/ConnectionPoolTest.java | 5 +- .../src/test/resources/log4j.properties | 6 + core/in-memory-accumulo | 2 +- core/map-reduce/pom.xml | 37 + .../map/ApplicationContextAwareMapper.java | 64 ++ .../map/BulkResultsFileOutputMapper.java | 31 +- .../map/BulkResultsTableOutputMapper.java | 28 +- core/metrics-reporter | 2 +- core/modification/pom.xml | 53 + .../DatawaveModificationException.java | 30 + .../modification/ModificationService.java | 142 +++ .../modification/MutableMetadataHandler.java | 84 +- .../MutableMetadataUUIDHandler.java | 44 +- .../modification/cache/ModificationCache.java | 120 +++ .../ModificationConfiguration.java | 2 +- .../ModificationServiceConfiguration.java | 29 +- .../query/ModificationQueryService.java | 21 + core/pom.xml | 14 +- core/query/pom.xml | 102 ++ .../query/cachedresults/CacheableLogic.java | 10 +- .../CheckpointableQueryConfiguration.java | 12 + .../GenericQueryConfiguration.java | 138 ++- .../core/query/configuration/QueryData.java | 585 ++++++++++++ .../core/query/configuration/Result.java | 109 +++ .../query/configuration/ResultContext.java | 15 + .../query/dashboard/DashboardFields.java | 2 +- .../query/dashboard/DashboardSummary.java | 2 +- .../query/exception/EmptyObjectException.java | 2 +- .../iterator/DatawaveTransformIterator.java | 6 +- .../logic/AbstractQueryLogicTransformer.java | 4 +- .../core}/query/logic/BaseQueryLogic.java | 78 +- .../logic/BaseQueryLogicTransformer.java | 2 +- .../query/logic/CheckpointableQueryLogic.java | 60 ++ .../query/logic/DelegatingQueryLogic.java | 85 +- .../datawave/core}/query/logic/Flushable.java | 4 +- .../core/query/logic/QueryCheckpoint.java | 80 ++ .../datawave/core/query/logic/QueryKey.java | 104 ++ .../core}/query/logic/QueryLogic.java | 66 +- .../core/query/logic/QueryLogicFactory.java | 29 + .../query/logic/QueryLogicTransformer.java | 6 +- .../core}/query/logic/ResponseEnricher.java | 2 +- .../query/logic/ResponseEnricherBuilder.java | 11 +- .../core/query/logic/ResultPostprocessor.java | 24 + .../core/query/logic/WritesQueryMetrics.java | 24 + .../logic/WritesResultCardinalities.java | 2 +- .../composite/CompositeLogicException.java | 6 +- .../composite/CompositeQueryCheckpoint.java | 22 + .../CompositeQueryConfiguration.java | 26 +- .../logic/composite/CompositeQueryLogic.java | 195 ++-- .../composite/CompositeQueryLogicResults.java | 4 +- .../CompositeQueryLogicResultsIterator.java | 6 +- .../CompositeQueryLogicTransformer.java | 19 +- .../composite/CompositeUserOperations.java | 19 +- .../logic/filtered/FilteredQueryLogic.java | 13 +- .../filtered/QueryLogicFilterByAuth.java | 6 +- .../filtered/QueryLogicFilterByParameter.java | 7 +- .../query/logic/lookup/LookupQueryLogic.java | 394 ++++++++ .../logic/lookup/uid/LookupUIDQueryLogic.java | 38 + .../lookup/uuid/LookupUUIDQueryLogic.java | 38 + .../core}/query/map/QueryGeometryHandler.java | 3 +- .../query/metric/QueryMetricHandler.java | 2 +- .../ProxiedAuthorizationsPredicate.java | 4 +- .../predicate/QueryParameterPredicate.java | 6 +- .../query/predict/NoOpQueryPredictor.java | 13 + .../core/query/predict}/QueryPredictor.java | 2 +- .../query}/remote/RemoteQueryService.java | 16 +- .../event/DefaultResponseObjectFactory.java | 17 +- .../datawave/core}/query/util/QueryUtil.java | 6 +- .../src/main/resources/META-INF/beans.xml | 9 + .../main/resources/META-INF/jboss-ejb3.xml | 16 + core/utils/accumulo-utils | 2 +- core/utils/common-utils | 2 +- core/utils/metadata-utils | 2 +- core/utils/pom.xml | 2 +- core/utils/type-utils | 2 +- docker/.gitignore | 15 + docker/README.md | 80 +- docker/cleanup.sh | 9 + docker/config/application-cachedresults.yml | 79 ++ docker/config/application-compose.yml | 6 + docker/config/application-federation.yml | 16 + docker/config/application-metricssource.yml | 23 + docker/config/application-mrquery.yml | 54 ++ docker/config/application-query.yml | 568 +++++++++++ docker/config/application-querymessaging.yml | 11 + docker/config/executor-pool1.yml | 20 + docker/config/executor-pool2.yml | 20 + docker/config/executor.yml | 67 ++ docker/config/modification.yml | 70 ++ docker/config/mrquery.yml | 31 + docker/config/query.yml | 74 ++ docker/debug.yml.example | 5 + docker/docker-compose.yml | 315 +++++- docker/restart.sh | 7 + docker/scripts/cachedResultsQuery.sh | 18 + docker/scripts/cancel.sh | 10 + docker/scripts/cleanup.sh | 12 + docker/scripts/close.sh | 10 + docker/scripts/common/batchLookup.sh | 2 + docker/scripts/common/batchLookupContent.sh | 2 + docker/scripts/common/cachedResultsQuery.sh | 148 +++ docker/scripts/common/common.sh | 13 +- docker/scripts/common/edge.sh | 107 +++ docker/scripts/common/lookup.sh | 2 + docker/scripts/common/lookupContent.sh | 2 + docker/scripts/common/mapReduceQuery.sh | 184 ++++ docker/scripts/common/oozieQuery.sh | 152 +++ docker/scripts/common/plan.sh | 62 ++ docker/scripts/common/predict.sh | 64 ++ docker/scripts/common/query.sh | 4 + docker/scripts/common/streamingQuery.sh | 77 ++ docker/scripts/connectionFactory.sh | 28 + docker/scripts/edge.sh | 17 + docker/scripts/edgeEvent.sh | 19 + docker/scripts/executorHealth.sh | 23 + docker/scripts/executorShutdown.sh | 23 + docker/scripts/mapReduceCancel.sh | 10 + docker/scripts/mapReduceQuery.sh | 21 + docker/scripts/mapReduceRemove.sh | 10 + docker/scripts/metrics.sh | 19 + docker/scripts/modification.sh | 31 + docker/scripts/oozieQuery.sh | 11 + docker/scripts/plan.sh | 18 + docker/scripts/poundit.sh | 19 + docker/scripts/predict.sh | 18 + docker/scripts/queryHealth.sh | 23 + docker/scripts/queryShutdown.sh | 23 + docker/scripts/streamingQuery.sh | 18 + docker/scripts/termFrequency.sh | 19 + docker/scripts/testAll.sh | 39 +- docker/scripts/webQuery.sh | 25 + docs/enunciate.xml | 4 +- docs/pom.xml | 15 +- microservices/configcheck/pom.xml | 2 +- microservices/microservice-parent | 2 +- microservices/microservice-service-parent | 2 +- microservices/pom.xml | 2 +- microservices/services/accumulo | 2 +- microservices/services/audit | 2 +- microservices/services/authorization | 2 +- microservices/services/config | 2 +- microservices/services/dictionary | 2 +- microservices/services/hazelcast | 2 +- microservices/services/mapreduce-query | 1 + microservices/services/modification | 1 + microservices/services/pom.xml | 46 +- microservices/services/query | 1 + microservices/services/query-executor | 1 + microservices/services/query-metric | 2 +- microservices/starters/audit | 2 +- microservices/starters/cache | 2 +- microservices/starters/cached-results | 1 + microservices/starters/datawave | 2 +- microservices/starters/metadata | 2 +- microservices/starters/pom.xml | 42 +- microservices/starters/query | 1 + microservices/starters/query-metric | 2 +- pom.xml | 119 ++- properties/compose.properties | 16 +- properties/default.properties | 8 - properties/dev.properties | 16 +- warehouse/accumulo-extensions/pom.xml | 2 +- warehouse/age-off-utils/pom.xml | 2 +- warehouse/age-off/pom.xml | 2 +- .../filter/ConfigurableAgeOffFilter.java | 2 - warehouse/assemble/datawave/pom.xml | 10 +- .../datawave/src/main/assembly/dist.xml | 8 + warehouse/assemble/pom.xml | 2 +- warehouse/assemble/webservice/pom.xml | 2 +- warehouse/common/pom.xml | 2 +- warehouse/core/pom.xml | 2 +- .../model/DefaultEdgeModelFieldsFactory.java | 62 ++ .../datawave/edge/model/EdgeModelAware.java | 375 -------- .../datawave/edge/model/EdgeModelFields.java | 324 +++++++ .../edge/model/EdgeModelFieldsFactory.java | 5 + .../main/java/datawave/edge/util/EdgeKey.java | 1 - .../java/datawave/edge/util/EdgeKeyUtil.java | 2 +- .../datawave/ingest/util/cache/Loader.java | 1 + .../datawave/mr/bulk/BulkInputFormat.java | 1 - .../java/datawave/query/data/UUIDType.java | 34 +- .../datawave/util/OperationEvaluator.java | 4 +- warehouse/data-dictionary-core/pom.xml | 14 +- .../datadictionary/DataDictionaryType.java | 0 .../datadictionary/RemoteDataDictionary.java | 0 warehouse/edge-dictionary-core/pom.xml | 14 +- .../EdgeDictionaryProviderImpl.java | 18 + .../edgedictionary/EdgeDictionaryType.java | 0 .../edgedictionary/RemoteEdgeDictionary.java | 7 +- .../edge-model-configuration-core/pom.xml | 2 +- warehouse/index-stats/pom.xml | 2 +- warehouse/ingest-configuration/pom.xml | 2 +- .../resources/config/myjson-ingest-config.xml | 10 +- warehouse/ingest-core/pom.xml | 16 +- .../datawave/ingest/data/TypeRegistry.java | 2 +- .../data/config/ingest/BaseIngestHelper.java | 2 +- .../ingest/ContentBaseIngestHelper.java | 1 - .../dateindex/DateIndexDataTypeHandler.java | 2 +- .../edge/ProtobufEdgeDataTypeHandler.java | 2 - .../error/ErrorShardedDataTypeHandler.java | 1 - .../shard/AbstractColumnBasedHandler.java | 2 +- .../handler/shard/ShardedDataTypeHandler.java | 2 +- .../summary/CoreSummaryDataTypeHandler.java | 2 +- .../MetricsSummaryDataTypeHandler.java | 2 +- .../summary/MetricsSummaryFormatter.java | 3 +- .../summary/SummaryDataTypeHandler.java | 2 +- .../ContentIndexingColumnBasedHandler.java | 1 - .../mapreduce/job/TableConfigurationUtil.java | 1 - .../partition/MultiTableRangePartitioner.java | 1 - .../config/ErrorShardTableConfigHelper.java | 1 - .../table/config/ShardTableConfigHelper.java | 2 - .../policy/ExampleIngestPolicyEnforcer.java | 2 +- .../job/BulkIngestMapFileLoaderTest.java | 2 - .../util/NGramTokenizationStrategyTest.java | 1 - warehouse/ingest-csv/pom.xml | 17 +- warehouse/ingest-json/pom.xml | 2 +- .../config/ingest/tvmaze-ingest-config.xml | 10 +- warehouse/ingest-nyctlc/pom.xml | 2 +- warehouse/ingest-scripts/pom.xml | 2 +- .../src/main/resources/bin/ingest/findJars.sh | 9 +- .../main/resources/bin/ingest/ingest-libs.sh | 9 +- warehouse/ingest-ssdeep/pom.xml | 2 +- warehouse/ingest-wikipedia/pom.xml | 2 +- warehouse/metrics-core/pom.xml | 2 +- .../metrics/mapreduce/MetricsIngester.java | 1 - warehouse/ops-tools/config-compare/pom.xml | 2 +- warehouse/ops-tools/index-validation/pom.xml | 2 +- warehouse/ops-tools/pom.xml | 2 +- warehouse/pom.xml | 14 +- warehouse/query-core/pom.xml | 74 +- .../audit/DatawaveSelectorExtractor.java | 3 +- .../audit/SplitSelectorExtractor.java | 4 +- .../KeyAggregatingTransformIterator.java | 2 +- .../core}/query/util/QueryParserUtil.java | 2 +- .../java/datawave/mr/bulk/RfileScanner.java | 4 +- .../datawave/query/DocumentSerialization.java | 4 +- .../cardinality/CardinalityConfiguration.java | 10 +- .../config/ContentQueryConfiguration.java | 95 +- .../EdgeExtendedSummaryConfiguration.java | 163 +++- .../query/config/EdgeQueryConfiguration.java | 251 ++++- .../datawave/query/config/LookupUUIDTune.java | 4 +- .../java/datawave/query/config/Profile.java | 4 +- .../config/RemoteQueryConfiguration.java | 43 +- .../SSDeepSimilarityQueryConfiguration.java | 20 +- .../config/ShardIndexQueryConfiguration.java | 119 ++- .../query/config/ShardQueryConfiguration.java | 594 ++++++++++-- .../TermFrequencyQueryConfiguration.java | 49 +- .../query/dashboard/DashboardQueryLogic.java | 15 +- .../query/discovery/DiscoveredThing.java | 3 + .../query/discovery/DiscoveryLogic.java | 216 +++-- .../DiscoveryQueryConfiguration.java | 106 +- .../query/discovery/DiscoveryTransformer.java | 94 +- .../edge/DefaultExtendedEdgeQueryLogic.java | 121 +-- .../query/index/lookup/EntryParser.java | 8 +- .../query/index/lookup/RangeStream.java | 5 +- .../query/index/lookup/ShardRangeStream.java | 1 + .../query/index/lookup/TupleToRange.java | 19 +- .../query/iterator/QueryIterator.java | 3 +- .../iterator/filter/EdgeFilterIterator.java | 22 +- .../ivarator/IvaratorCacheDirConfig.java | 29 +- .../EvaluationTrackingNestedIterator.java | 3 + .../query/jexl/lookups/AsyncIndexLookup.java | 2 +- .../jexl/lookups/BoundedRangeIndexLookup.java | 2 +- .../jexl/lookups/FieldNameIndexLookup.java | 3 +- .../query/jexl/lookups/RegexIndexLookup.java | 7 +- .../ShardIndexQueryTableStaticMethods.java | 17 +- .../datawave/query/jexl/nodes/ExceededOr.java | 2 +- .../BoundedRangeIndexExpansionVisitor.java | 2 +- .../EdgeTableRangeBuildingVisitor.java | 47 +- .../jexl/visitors/ExpandCompositeTerms.java | 2 +- .../visitors/ExpandMultiNormalizedTerms.java | 2 +- .../jexl/visitors/GeoFeatureVisitor.java | 2 +- .../jexl/visitors/GeoWavePruningVisitor.java | 2 +- .../visitors/IsNotNullPruningVisitor.java | 2 +- .../JexlFormattedStringBuildingVisitor.java | 2 +- .../PushFunctionsIntoExceededValueRanges.java | 2 +- .../PushdownLargeFieldedListsVisitor.java | 2 +- .../jexl/visitors/QueryModelVisitor.java | 2 +- .../jexl/visitors/RegexFunctionVisitor.java | 2 +- .../visitors/RegexIndexExpansionVisitor.java | 4 +- .../jexl/visitors/TermCountingVisitor.java | 2 +- .../UnfieldedIndexExpansionVisitor.java | 2 +- .../ValidateFilterFunctionVisitor.java | 4 +- .../jexl/visitors/whindex/WhindexVisitor.java | 2 +- .../query/metrics/AccumuloRecordWriter.java | 9 +- .../query/metrics/BaseQueryMetricHandler.java | 2 +- .../ContentQueryMetricsIngestHelper.java | 10 +- .../query/metrics/QueryMetricQueryLogic.java | 32 +- .../query/model/edge/EdgeQueryModel.java | 44 +- .../{LoadModelFromXml.java => LoadModel.java} | 21 +- .../planner/BooleanChunkingQueryPlanner.java | 4 +- .../query/planner/DefaultQueryPlanner.java | 11 +- .../query/planner/FacetedQueryPlanner.java | 4 +- .../query/planner/IndexQueryPlanner.java | 4 +- .../MetadataHelperQueryModelProvider.java | 2 +- .../query/planner/QueryOptionsSwitch.java | 2 +- .../datawave/query/planner/QueryPlan.java | 90 +- .../datawave/query/planner/QueryPlanner.java | 6 +- .../query/planner/ThreadedRangeBundler.java | 4 +- .../planner/ThreadedRangeBundlerIterator.java | 13 +- .../query/scheduler/PushdownFunction.java | 9 +- .../query/scheduler/PushdownScheduler.java | 52 +- .../datawave/query/scheduler/Scheduler.java | 13 +- .../query/scheduler/SequentialScheduler.java | 129 ++- .../SingleRangeQueryDataIterator.java | 59 ++ .../table/parser/ContentKeyValueFactory.java | 49 +- .../query/tables/AnyFieldScanner.java | 2 +- .../query/tables/BaseRemoteQueryLogic.java | 25 +- .../query/tables/BatchScannerSession.java | 94 +- .../query/tables/CountingShardQueryLogic.java | 25 +- .../query/tables/DedupingIterator.java | 66 +- .../query/tables/ParentQueryLogic.java | 4 +- .../query/tables/PartitionedQueryLogic.java | 8 +- .../query/tables/RangeStreamScanner.java | 49 +- .../query/tables/RemoteEdgeQueryLogic.java | 34 +- .../query/tables/RemoteEventQueryLogic.java | 17 +- .../datawave/query/tables/ScannerFactory.java | 4 +- .../datawave/query/tables/ScannerSession.java | 56 +- .../query/tables/ShardIndexQueryTable.java | 475 ++++----- .../query/tables/ShardQueryLogic.java | 204 ++-- .../datawave/query/tables/async/Scan.java | 15 +- .../query/tables/async/ScannerChunk.java | 22 +- .../query/tables/async/SpeculativeScan.java | 11 +- .../tables/async/event/VisitorFunction.java | 6 +- .../chained/ChainedQueryConfiguration.java | 11 +- .../tables/chained/ChainedQueryTable.java | 10 +- .../iterators/ChainedQueryIterator.java | 4 +- .../chained/strategy/ChainStrategy.java | 4 +- .../chained/strategy/FullChainStrategy.java | 10 +- .../datawave/query/tables/chunk/Chunker.java | 4 +- .../content/ContentQueryCheckpoint.java | 71 ++ ...QueryTable.java => ContentQueryLogic.java} | 97 +- .../edge/DefaultEdgeEventQueryLogic.java | 46 +- .../tables/edge/DefaultEventQueryBuilder.java | 22 +- .../query/tables/edge/EdgeQueryLogic.java | 468 +++++---- .../tables/edge/contexts/IdentityContext.java | 14 +- .../tables/edge/contexts/QueryContext.java | 127 +-- .../edge/contexts/VisitationContext.java | 32 +- .../tables/facets/FacetQueryPlanVisitor.java | 1 + .../tables/facets/FacetedQueryLogic.java | 6 +- .../query/tables/remote/RemoteQueryLogic.java | 4 +- .../shard/CountAggregatingIterator.java | 5 +- .../shard/CountResultPostprocessor.java | 100 ++ .../shard/FieldIndexCountQueryLogic.java | 6 +- .../tables/shard/IndexStatsQueryLogic.java | 12 +- .../FullSSDeepDiscoveryChainStrategy.java | 6 +- .../SSDeepChainedDiscoveryQueryLogic.java | 6 +- .../ssdeep/SSDeepDiscoveryQueryLogic.java | 52 +- .../tables/ssdeep/SSDeepScoringFunction.java | 4 +- .../ssdeep/SSDeepSimilarityQueryLogic.java | 10 +- .../SSDeepSimilarityQueryTransformer.java | 13 +- .../tables/term/TermFrequencyQueryTable.java | 18 +- .../transformer/ContentQueryTransformer.java | 4 +- .../query/transformer/DocumentTransform.java | 2 +- .../transformer/DocumentTransformer.java | 12 +- .../DocumentTransformerInterface.java | 4 +- .../DocumentTransformerSupport.java | 62 +- .../transformer/EdgeQueryTransformer.java | 12 +- .../EdgeQueryTransformerSupport.java | 155 ++- .../transformer/EventQueryDataDecorator.java | 2 +- .../transformer/EventQueryTransformer.java | 6 +- .../EventQueryTransformerInterface.java | 6 +- .../EventQueryTransformerSupport.java | 81 +- .../query/transformer/FacetedTransformer.java | 6 +- .../FieldIndexCountQueryTransformer.java | 84 +- .../GroupingDocumentTransformer.java | 4 +- .../ParentDocumentTransformer.java | 4 +- .../ShardIndexQueryTransformer.java | 92 +- .../ShardQueryCountTableTransformer.java | 90 +- .../TermFrequencyQueryTransformer.java | 6 +- .../java/datawave/query/util/GeoUtils.java | 3 +- ...MetadataHelperCacheManagementListener.java | 2 +- .../MetadataHelperUpdateHdfsListener.java | 13 +- .../datawave/query/util/QueryInformation.java | 2 +- .../query/util/QueryScannerHelper.java | 2 +- .../src/main/resources/DATAWAVE_EDGE.xml | 28 +- .../audit/DatawaveSelectorExtractorTest.java | 2 +- .../audit/SplitSelectorExtractorTest.java | 2 +- .../KeyAggregatingTransformIteratorTest.java | 2 +- .../query/CheckpointableQueryTest.java | 17 + .../query/CompositeFunctionsTest.java | 8 +- .../query/DelayedIndexOnlyQueryTest.java | 9 +- .../test/java/datawave/query/ExcerptTest.java | 6 +- .../datawave/query/FunctionalSetTest.java | 9 +- ...AreAlwaysIncludedCommonalityTokenTest.java | 8 +- .../IfThisTestFailsThenHitTermsAreBroken.java | 4 +- .../datawave/query/IvaratorInterruptTest.java | 8 +- .../datawave/query/LenientFieldsTest.java | 6 +- .../datawave/query/LongRunningQueryTest.java | 18 +- .../java/datawave/query/LuceneQueryTest.java | 10 +- .../query/MaxExpansionRegexQueryTest.java | 15 +- .../query/MixedGeoAndGeoWaveTest.java | 30 +- .../datawave/query/NumericListQueryTest.java | 7 +- .../test/java/datawave/query/ShapesTest.java | 5 +- ...estLimitReturnedGroupsToHitTermGroups.java | 9 +- .../datawave/query/TextFunctionQueryTest.java | 8 +- .../query/UnevaluatedFieldsQueryTest.java | 8 +- .../query/UnindexedNumericQueryTest.java | 6 +- .../test/java/datawave/query/UniqueTest.java | 10 +- ...UseOccurrenceToCountInJexlContextTest.java | 4 +- .../cardinality/TestCardinalityWithQuery.java | 8 +- .../config/ShardQueryConfigurationTest.java | 46 +- .../query/discovery/DiscoveryLogicTest.java | 7 +- .../edge/ExtendedEdgeQueryLogicTest.java | 33 +- .../function/HitsAreAlwaysIncludedTest.java | 8 +- .../query/index/lookup/EntryParserTest.java | 10 +- .../query/index/lookup/TupleToRangeTest.java | 17 +- .../functions/ContentFunctionQueryTest.java | 31 +- ...ExceededOrThresholdMarkerJexlNodeTest.java | 35 +- .../EdgeTableRangeBuildingVisitorTest.java | 4 +- .../ExecutableExpansionVisitorTest.java | 7 +- .../query/planner/CompositeIndexTest.java | 37 +- .../query/planner/GeoSortedQueryDataTest.java | 35 +- .../planner/MultiValueCompositeIndexTest.java | 37 +- .../planner/ThreadedRangeBundlerTest.java | 4 +- .../predicate/ValueToAttributesTest.java | 8 +- .../query/tables/DedupingIteratorTest.java | 34 +- .../query/tables/IndexQueryLogicTest.java | 2 +- .../query/tables/RangeStreamScannerTest.java | 32 +- .../tables/RemoteEdgeQueryLogicHttpTest.java | 10 +- .../tables/RemoteEdgeQueryLogicTest.java | 17 +- .../tables/RemoteEventQueryLogicHttpTest.java | 10 +- .../tables/RemoteEventQueryLogicTest.java | 17 +- .../query/tables/ShardQueryLogicTest.java | 8 +- .../async/event/VisitorFunctionTest.java | 59 +- ...leTest.java => ContentQueryLogicTest.java} | 44 +- .../query/tables/edge/BaseEdgeQueryTest.java | 66 +- .../edge/CheckpointableEdgeQueryTest.java | 17 + .../CheckpointableExtendedEdgeQueryTest.java | 19 + .../edge/DefaultEdgeEventQueryLogicTest.java | 8 +- .../tables/edge/EdgeQueryFunctionalTest.java | 147 ++- .../tables/facets/FacetedQueryLogicTest.java | 2 +- .../tables/ssdeep/SSDeepIndexQueryTest.java | 6 +- .../tables/ssdeep/SSDeepIngestQueryTest.java | 13 +- .../ssdeep/SSDeepSimilarityQueryTest.java | 6 +- .../SSDeepSimilarityQueryTransformerTest.java | 2 +- .../testframework/AbstractDataTypeConfig.java | 11 + .../AbstractFunctionalQuery.java | 121 ++- .../query/testframework/BaseRawData.java | 5 +- .../testframework/QueryLogicTestHarness.java | 164 +++- .../query/tld/TLDNormalFlattenQueryTest.java | 2 +- .../transformer/DocumentTransformerTest.java | 6 +- .../query/transformer/GroupingTest.java | 8 +- .../query/transformer/NoExpansionTests.java | 6 +- .../UniqueTransformMostRecentTest.java | 2 +- .../src/test/resources/DATAWAVE_EDGE.xml | 28 +- .../datawave/query/EventQueryLogicFactory.xml | 17 +- .../datawave/query/QueryLogicFactory.xml | 27 +- warehouse/regression-testing/pom.xml | 2 +- warehouse/ssdeep-common/pom.xml | 2 +- web-services/accumulo/pom.xml | 15 +- web-services/atom/pom.xml | 7 +- .../webservice/atom/AtomServiceBean.java | 22 +- web-services/cached-results/pom.xml | 17 +- .../database/CachedResultsCleanupBean.java | 6 +- .../results/cached/CachedResultsBean.java | 190 ++-- .../results/cached/CachedRunningQuery.java | 63 +- .../cached/CachedRunningQueryTest.java | 2 +- web-services/client/pom.xml | 7 +- .../datawave/audit/SelectorExtractor.java | 2 +- .../java/datawave/webservice/query/Query.java | 118 --- .../datawave/webservice/query/QueryImpl.java | 904 ------------------ .../webservice/query/QueryParameters.java | 97 -- .../webservice/query/QueryParametersImpl.java | 552 ----------- .../webservice/query/QueryPersistence.java | 7 - .../cachedresults/CacheableQueryRow.java | 11 + .../query/result/event/DefaultEvent.java | 9 + .../query/result/event/DefaultField.java | 76 +- .../result/event/ResponseObjectFactory.java | 2 +- .../result/logic/QueryLogicDescription.java | 14 +- .../result/QueryImplListResponse.java | 3 +- .../results/mr/MapReduceJobDescription.java | 7 +- .../mr/MapReduceJobDescriptionList.java | 34 + .../webservice/query/map}/package-info.java | 5 +- .../query/result/metadata/package-info.java | 7 - .../datawave/user/UserAuthorizationsTest.java | 82 ++ .../webservice/query/QueryParametersTest.java | 16 +- .../webservice/query/TestQueryImpl.java | 3 +- .../webservice/query/TestQueryParameters.java | 33 +- web-services/common-util/pom.xml | 25 +- .../authorization/UserOperations.java | 60 -- .../java/datawave/security/util/DnUtils.java | 2 + .../webservice/query/cache/ResultsPage.java | 45 - web-services/common/pom.xml | 13 +- .../webservice/common/audit/AuditBean.java | 3 +- .../common/audit/AuditParameterBuilder.java | 4 +- .../audit/DefaultAuditParameterBuilder.java | 6 +- .../common/cache/AccumuloTableCacheBean.java | 213 +++++ .../AccumuloTableCacheConfiguration.java | 53 - .../AccumuloConnectionFactoryBean.java | 325 ++----- .../ConnectionPoolClientConfiguration.java | 7 +- .../config/ConnectionPoolConfiguration.java | 52 +- .../config/ConnectionPoolsConfiguration.java | 45 +- .../webservice/common/health/HealthBean.java | 4 +- web-services/deploy/application/pom.xml | 67 +- web-services/deploy/configuration/pom.xml | 5 +- .../datawave/mapreduce/MapReduceJobs.xml | 2 +- .../ExampleModificationServices.xml | 4 +- .../modification/ModificationServices.xml | 8 +- .../datawave/query/EdgeQueryLogicFactory.xml | 2 + .../datawave/query/QueryExpiration.xml | 10 +- .../datawave/query/QueryLogicFactory.xml | 33 +- web-services/deploy/docs/pom.xml | 2 +- web-services/deploy/pom.xml | 2 +- .../spring-framework-integration/pom.xml | 11 +- .../WiredQueryExecutorBeanTest.java | 39 +- web-services/dictionary/pom.xml | 6 +- web-services/examples/client-login/pom.xml | 2 +- web-services/examples/http-client/pom.xml | 2 +- web-services/examples/jms-client/pom.xml | 2 +- web-services/examples/pom.xml | 2 +- web-services/examples/query-war/pom.xml | 2 +- web-services/map-reduce-embedded/pom.xml | 2 +- .../EmbeddedAccumuloConnectionFactory.java | 38 +- web-services/map-reduce-status/pom.xml | 2 +- web-services/map-reduce/pom.xml | 31 +- .../datawave/webservice/mr/MapReduceBean.java | 9 +- .../map/ApplicationContextAwareMapper.java | 23 - .../bulkresults/map/SerializationFormat.java | 5 - .../map/WeldBulkResultsFileOutputMapper.java | 32 + .../BulkResultsJobConfiguration.java | 70 +- .../MapReduceJobConfiguration.java | 2 - .../NeedAccumuloConnectionFactory.java | 2 +- .../configuration/NeedQueryLogicFactory.java | 2 +- .../mr/state/MapReduceStatePersisterBean.java | 16 +- .../webservice/mr/MapReduceBeanTest.java | 4 +- .../mr/state/MapReduceStatePersisterTest.java | 32 +- web-services/metrics/pom.xml | 53 + .../query/map/SimpleQueryGeometryHandler.java | 6 +- .../metrics/ShardTableQueryMetricHandler.java | 29 +- .../src/main/resources/META-INF/beans.xml | 9 + .../map/SimpleQueryGeometryHandlerTest.java | 231 +++++ .../src/test/resources/log4j.properties | 31 + web-services/model/pom.xml | 7 +- .../webservice/query/model/ModelBean.java | 41 +- .../webservice/query/model/ModelBeanTest.java | 87 +- web-services/modification/pom.xml | 7 +- .../modification/ModificationBean.java | 130 +-- .../QueryExecutorBeanService.java | 43 + .../cache/ModificationCacheBean.java | 84 +- .../cache/ModificationCacheMessageBean.java | 6 +- web-services/pom.xml | 26 +- web-services/query-websocket/pom.xml | 2 +- web-services/query/pom.xml | 14 +- .../cache/CreatedQueryLogicCacheBean.java | 4 +- .../query/cache/QueryCacheBean.java | 2 +- .../query/cache/QueryExpirationBean.java | 15 +- .../cache/QueryExpirationConfiguration.java | 91 -- .../query/cache/RunningQueryTimingImpl.java | 6 +- .../IdTranslatorConfiguration.java | 4 +- .../LookupUUIDConfiguration.java | 2 +- .../query/configuration/QueryData.java | 206 ---- .../query/dashboard/DashboardBean.java | 15 +- .../query/dashboard/DashboardQuery.java | 20 +- .../webservice/query/factory/Persister.java | 31 +- .../webservice/query/hud/HudBean.java | 2 +- .../query/hud/HudQuerySummaryBuilder.java | 2 +- .../QueryMetricsEnrichmentInterceptor.java | 4 +- .../query/logic/DatawaveRoleManager.java | 43 - .../query/logic/EasyRoleManager.java | 23 - .../query/logic/QueryLogicFactory.java | 23 - .../logic/QueryLogicFactoryConfiguration.java | 2 + .../query/logic/QueryLogicFactoryImpl.java | 35 +- .../webservice/query/logic/RoleManager.java | 14 - .../query/logic/RoleManagerImpl.java | 5 - .../query/logic/WritesQueryMetrics.java | 9 - .../query/metric/QueryMetricsBean.java | 3 +- .../query/metric/QueryMetricsWriter.java | 1 + .../predicate/AuthorizationsPredicate.java | 53 - .../query/remote/RemoteQueryServiceImpl.java | 17 +- .../runner/AccumuloConnectionRequestBean.java | 57 +- .../query/runner/BasicQueryBean.java | 23 +- .../query/runner/IdTranslatorBean.java | 14 +- .../query/runner/NoOpQueryPredictor.java | 14 - .../query/runner/QueryExecutor.java | 2 +- .../query/runner/QueryExecutorBean.java | 209 ++-- .../webservice/query/runner/RunningQuery.java | 25 +- .../util/AbstractUUIDLookupCriteria.java | 14 +- .../query/util/GetUUIDCriteria.java | 4 +- .../webservice/query/util/LookupUUIDUtil.java | 65 +- .../webservice/query/util/MapUtils.java | 31 + .../query/util/NextContentCriteria.java | 6 +- .../query/util/PostUUIDCriteria.java | 4 +- .../query/util/UIDQueryCriteria.java | 4 +- .../cache/CreatedQueryLogicCacheBeanTest.java | 2 +- .../query/cache/QueryCacheBeanTest.java | 10 +- .../query/cache/QueryExpirationBeanTest.java | 13 +- .../cache/RunningQueryTimingImplTest.java | 20 +- .../query/cache/TestQueryLogic.java | 14 +- .../GenericQueryConfigurationMockTest.java | 10 +- .../GenericQueryConfigurationTest.java | 1 + .../query/configuration/QueryDataTest.java | 6 +- .../configuration/TestBaseQueryLogic.java | 29 +- ...QueryMetricsEnrichmentInterceptorTest.java | 2 +- .../query/logic/BaseQueryLogicTest.java | 12 +- .../ConfiguredQueryLogicFactoryBeanTest.java | 11 +- .../query/logic/DatawaveRoleManagerTest.java | 139 --- .../logic/QueryLogicFactoryBeanTest.java | 10 +- .../TestLegacyBaseQueryLogicTransformer.java | 3 +- .../query/logic/TestQueryLogic.java | 8 +- .../composite/CompositeQueryLogicTest.java | 111 +-- .../filtered/FilteredQueryLogicTest.java | 10 +- .../filtered/QueryLogicFilterByAuthTest.java | 2 + .../QueryLogicFilterByParameterTest.java | 3 +- .../runner/ExtendedQueryExecutorBeanTest.java | 719 +++++++------- .../runner/ExtendedRunningQueryTest.java | 24 +- .../query/runner/QueryExecutorBeanTest.java | 83 +- .../query/runner/RunningQueryTest.java | 31 +- .../query/util/LookupUUIDUtilTest.java | 6 +- .../webservice/query/util/QueryUtilTest.java | 7 +- .../TestConfiguredQueryLogicFactory.xml | 10 +- .../test/resources/TestQueryLogicFactory.xml | 15 - web-services/rest-api/pom.xml | 2 +- .../rest-api/src/main/webapp/WEB-INF/web.xml | 2 +- web-services/security/pom.xml | 48 +- .../ConditionalRemoteUserOperations.java | 109 --- .../remote/RemoteUserOperationsImpl.java | 11 +- .../test/TestDatawaveUserService.java | 4 +- .../security/cache/CredentialsCacheBean.java | 4 +- .../security/user/UserOperationsBean.java | 10 +- .../ConditionalRemoteUserOperationsTest.java | 11 +- .../RemoteUserOperationsImplHttpTest.java | 2 +- .../test/TestDatawaveUserServiceTest.java | 32 +- .../cache/CredentialsCacheBeanTest.java | 28 +- web-services/web-root/pom.xml | 2 +- 690 files changed, 15535 insertions(+), 8248 deletions(-) create mode 100755 contrib/datawave-quickstart/bin/modification.sh create mode 160000 contrib/datawave-utils create mode 100644 core/cached-results/pom.xml rename {web-services/query/src/main/java/datawave/webservice => core/cached-results/src/main/java/datawave/core}/query/cachedresults/CacheableQueryRowImpl.java (99%) rename {web-services/query/src/main/java/datawave/webservice => core/cached-results/src/main/java/datawave/core}/query/cachedresults/CacheableQueryRowReader.java (95%) rename web-services/cached-results/src/main/java/datawave/webservice/results/cached/CachedResultsParameters.java => core/cached-results/src/main/java/datawave/core/query/cachedresults/CachedResultsQueryParameters.java (96%) create mode 100644 core/common-util/pom.xml rename {web-services/common-util/src/main/java/datawave/webservice => core/common-util/src/main/java/datawave/core}/common/logging/ThreadConfigurableLogger.java (99%) rename {web-services/common-util/src/main/java/datawave/webservice => core/common-util/src/main/java/datawave/core/common}/util/EnvProvider.java (91%) create mode 100644 core/common-util/src/main/java/datawave/core/query/cache/ResultsPage.java rename {web-services => core}/common-util/src/main/java/datawave/security/authorization/DatawavePrincipal.java (97%) create mode 100644 core/common-util/src/main/resources/META-INF/beans.xml create mode 100644 core/common-util/src/main/resources/META-INF/jboss-ejb3.xml rename {web-services => core}/common-util/src/main/resources/source-templates/datawave/security/authorization/package-info.java (99%) rename {web-services/common-util/src/test/java/datawave/webservice => core/common-util/src/test/java/datawave/core/common}/util/EnvProviderTest.java (95%) create mode 100644 core/common/pom.xml rename {web-services/common/src/main/java/datawave/webservice => core/common/src/main/java/datawave/core}/common/audit/PrivateAuditConstants.java (80%) create mode 100644 core/common/src/main/java/datawave/core/common/edgedictionary/EdgeDictionaryProvider.java rename {web-services/common/src/main/java/datawave/webservice => core/common/src/main/java/datawave/core}/common/extjs/ExtJsResponse.java (95%) create mode 100644 core/connection-pool/pom.xml create mode 100644 core/connection-pool/src/main/java/datawave/core/common/cache/AccumuloTableCache.java rename web-services/common/src/main/java/datawave/webservice/common/cache/AccumuloTableCache.java => core/connection-pool/src/main/java/datawave/core/common/cache/AccumuloTableCacheImpl.java (50%) create mode 100644 core/connection-pool/src/main/java/datawave/core/common/cache/AccumuloTableCacheProperties.java rename {web-services/common/src/main/java/datawave/webservice => core/connection-pool/src/main/java/datawave/core}/common/cache/BaseTableCache.java (96%) rename {web-services/common/src/main/java/datawave/webservice => core/connection-pool/src/main/java/datawave/core}/common/cache/SharedBoolean.java (98%) rename {web-services/common/src/main/java/datawave/webservice => core/connection-pool/src/main/java/datawave/core}/common/cache/SharedBooleanListener.java (90%) rename {web-services/common/src/main/java/datawave/webservice => core/connection-pool/src/main/java/datawave/core}/common/cache/SharedBooleanReader.java (89%) rename {web-services/common/src/main/java/datawave/webservice => core/connection-pool/src/main/java/datawave/core}/common/cache/SharedCacheCoordinator.java (97%) rename {web-services/common/src/main/java/datawave/webservice => core/connection-pool/src/main/java/datawave/core}/common/cache/SharedTriState.java (99%) rename {web-services/common/src/main/java/datawave/webservice => core/connection-pool/src/main/java/datawave/core}/common/cache/SharedTriStateListener.java (85%) rename {web-services/common/src/main/java/datawave/webservice => core/connection-pool/src/main/java/datawave/core}/common/cache/SharedTriStateReader.java (82%) rename {web-services/common/src/main/java/datawave/webservice => core/connection-pool/src/main/java/datawave/core}/common/cache/TableCache.java (90%) rename {web-services/common/src/main/java/datawave/webservice => core/connection-pool/src/main/java/datawave/core}/common/connection/AccumuloClientPool.java (88%) rename {web-services/common/src/main/java/datawave/webservice => core/connection-pool/src/main/java/datawave/core}/common/connection/AccumuloClientPoolFactory.java (97%) rename {web-services/common/src/main/java/datawave/webservice => core/connection-pool/src/main/java/datawave/core}/common/connection/AccumuloConnectionFactory.java (52%) create mode 100644 core/connection-pool/src/main/java/datawave/core/common/connection/AccumuloConnectionFactoryImpl.java rename {web-services/common-util/src/main/java/datawave/webservice => core/connection-pool/src/main/java/datawave/core}/common/result/AccumuloTableCacheStatus.java (92%) rename {web-services/common-util/src/main/java/datawave/webservice => core/connection-pool/src/main/java/datawave/core}/common/result/Connection.java (98%) rename {web-services/common-util/src/main/java/datawave/webservice => core/connection-pool/src/main/java/datawave/core}/common/result/ConnectionFactoryResponse.java (99%) rename {web-services/common-util/src/main/java/datawave/webservice => core/connection-pool/src/main/java/datawave/core}/common/result/ConnectionPool.java (98%) create mode 100644 core/connection-pool/src/main/java/datawave/core/common/result/ConnectionPoolClientProperties.java create mode 100644 core/connection-pool/src/main/java/datawave/core/common/result/ConnectionPoolProperties.java create mode 100644 core/connection-pool/src/main/java/datawave/core/common/result/ConnectionPoolsProperties.java rename {web-services/common-util/src/main/java/datawave/webservice => core/connection-pool/src/main/java/datawave/core}/common/result/ConnectionProperty.java (97%) rename web-services/common-util/src/main/java/datawave/webservice/common/result/TableCache.java => core/connection-pool/src/main/java/datawave/core/common/result/TableCacheDescription.java (95%) create mode 100644 core/connection-pool/src/main/java/datawave/core/query/runner/AccumuloConnectionRequestMap.java create mode 100644 core/connection-pool/src/main/resources/META-INF/beans.xml create mode 100644 core/connection-pool/src/main/resources/META-INF/jboss-ejb3.xml rename {web-services/security/src/main/resources/source-templates/datawave/security/cache => core/connection-pool/src/main/resources/source-templates/datawave/core/common/result}/package-info.java (76%) rename {web-services/common/src/test/java/datawave/webservice => core/connection-pool/src/test/java/datawave/core}/common/cache/SharedCacheCoordinatorTest.java (96%) rename {web-services/common/src/test/java/datawave/webservice => core/connection-pool/src/test/java/datawave/core}/common/connection/AccumuloConnectionFactoryTest.java (68%) rename {web-services/common/src/test/java/datawave/webservice => core/connection-pool/src/test/java/datawave/core}/common/curator/TestSharedCacheCoordinator.java (98%) rename {web-services/common-util/src/test/java/datawave => core/connection-pool/src/test/java/datawave/core}/common/result/ConnectionPoolTest.java (94%) create mode 100644 core/connection-pool/src/test/resources/log4j.properties create mode 100644 core/map-reduce/pom.xml create mode 100644 core/map-reduce/src/main/java/datawave/core/mapreduce/bulkresults/map/ApplicationContextAwareMapper.java rename {web-services/map-reduce/src/main/java/datawave/webservice/mr => core/map-reduce/src/main/java/datawave/core/mapreduce}/bulkresults/map/BulkResultsFileOutputMapper.java (90%) rename {web-services/map-reduce/src/main/java/datawave/webservice/mr => core/map-reduce/src/main/java/datawave/core/mapreduce}/bulkresults/map/BulkResultsTableOutputMapper.java (82%) create mode 100644 core/modification/pom.xml create mode 100644 core/modification/src/main/java/datawave/modification/DatawaveModificationException.java create mode 100644 core/modification/src/main/java/datawave/modification/ModificationService.java rename {warehouse/query-core/src/main/java/datawave/webservice => core/modification/src/main/java/datawave}/modification/MutableMetadataHandler.java (94%) rename {warehouse/query-core/src/main/java/datawave/webservice => core/modification/src/main/java/datawave}/modification/MutableMetadataUUIDHandler.java (92%) create mode 100644 core/modification/src/main/java/datawave/modification/cache/ModificationCache.java rename {warehouse/query-core/src/main/java/datawave/webservice => core/modification/src/main/java/datawave}/modification/configuration/ModificationConfiguration.java (95%) rename {warehouse/query-core/src/main/java/datawave/webservice => core/modification/src/main/java/datawave}/modification/configuration/ModificationServiceConfiguration.java (70%) create mode 100644 core/modification/src/main/java/datawave/modification/query/ModificationQueryService.java create mode 100644 core/query/pom.xml rename {web-services/query/src/main/java/datawave/webservice => core/query/src/main/java/datawave/core}/query/cachedresults/CacheableLogic.java (51%) create mode 100644 core/query/src/main/java/datawave/core/query/configuration/CheckpointableQueryConfiguration.java rename {web-services/query/src/main/java/datawave/webservice => core/query/src/main/java/datawave/core}/query/configuration/GenericQueryConfiguration.java (53%) create mode 100644 core/query/src/main/java/datawave/core/query/configuration/QueryData.java create mode 100644 core/query/src/main/java/datawave/core/query/configuration/Result.java create mode 100644 core/query/src/main/java/datawave/core/query/configuration/ResultContext.java rename {web-services/query/src/main/java/datawave/webservice => core/query/src/main/java/datawave/core}/query/dashboard/DashboardFields.java (98%) rename {web-services/query/src/main/java/datawave/webservice => core/query/src/main/java/datawave/core}/query/dashboard/DashboardSummary.java (99%) rename {web-services/query/src/main/java/datawave/webservice => core/query/src/main/java/datawave/core}/query/exception/EmptyObjectException.java (81%) rename {web-services/query/src/main/java/datawave/webservice => core/query/src/main/java/datawave/core}/query/iterator/DatawaveTransformIterator.java (91%) rename {web-services/query/src/main/java/datawave/webservice => core/query/src/main/java/datawave/core}/query/logic/AbstractQueryLogicTransformer.java (93%) rename {web-services/query/src/main/java/datawave/webservice => core/query/src/main/java/datawave/core}/query/logic/BaseQueryLogic.java (87%) rename {web-services/query/src/main/java/datawave/webservice => core/query/src/main/java/datawave/core}/query/logic/BaseQueryLogicTransformer.java (92%) create mode 100644 core/query/src/main/java/datawave/core/query/logic/CheckpointableQueryLogic.java rename {web-services/query/src/main/java/datawave/webservice => core/query/src/main/java/datawave/core}/query/logic/DelegatingQueryLogic.java (88%) rename {web-services/query/src/main/java/datawave/webservice => core/query/src/main/java/datawave/core}/query/logic/Flushable.java (84%) create mode 100644 core/query/src/main/java/datawave/core/query/logic/QueryCheckpoint.java create mode 100644 core/query/src/main/java/datawave/core/query/logic/QueryKey.java rename {web-services/query/src/main/java/datawave/webservice => core/query/src/main/java/datawave/core}/query/logic/QueryLogic.java (91%) create mode 100644 core/query/src/main/java/datawave/core/query/logic/QueryLogicFactory.java rename {web-services/query/src/main/java/datawave/webservice => core/query/src/main/java/datawave/core}/query/logic/QueryLogicTransformer.java (86%) rename {web-services/query/src/main/java/datawave/webservice => core/query/src/main/java/datawave/core}/query/logic/ResponseEnricher.java (79%) rename {web-services/query/src/main/java/datawave/webservice => core/query/src/main/java/datawave/core}/query/logic/ResponseEnricherBuilder.java (59%) create mode 100644 core/query/src/main/java/datawave/core/query/logic/ResultPostprocessor.java create mode 100644 core/query/src/main/java/datawave/core/query/logic/WritesQueryMetrics.java rename {web-services/query/src/main/java/datawave/webservice => core/query/src/main/java/datawave/core}/query/logic/WritesResultCardinalities.java (67%) rename {web-services/query/src/main/java/datawave/webservice => core/query/src/main/java/datawave/core}/query/logic/composite/CompositeLogicException.java (85%) create mode 100644 core/query/src/main/java/datawave/core/query/logic/composite/CompositeQueryCheckpoint.java rename {web-services/query/src/main/java/datawave/webservice => core/query/src/main/java/datawave/core}/query/logic/composite/CompositeQueryConfiguration.java (81%) rename {web-services/query/src/main/java/datawave/webservice => core/query/src/main/java/datawave/core}/query/logic/composite/CompositeQueryLogic.java (77%) rename {web-services/query/src/main/java/datawave/webservice => core/query/src/main/java/datawave/core}/query/logic/composite/CompositeQueryLogicResults.java (95%) rename {web-services/query/src/main/java/datawave/webservice => core/query/src/main/java/datawave/core}/query/logic/composite/CompositeQueryLogicResultsIterator.java (95%) rename {web-services/query/src/main/java/datawave/webservice => core/query/src/main/java/datawave/core}/query/logic/composite/CompositeQueryLogicTransformer.java (81%) rename {web-services/query/src/main/java/datawave/webservice => core/query/src/main/java/datawave/core}/query/logic/composite/CompositeUserOperations.java (89%) rename {web-services/query/src/main/java/datawave/webservice => core/query/src/main/java/datawave/core}/query/logic/filtered/FilteredQueryLogic.java (91%) rename {web-services/query/src/main/java/datawave/webservice => core/query/src/main/java/datawave/core}/query/logic/filtered/QueryLogicFilterByAuth.java (91%) rename {web-services/query/src/main/java/datawave/webservice => core/query/src/main/java/datawave/core}/query/logic/filtered/QueryLogicFilterByParameter.java (89%) create mode 100644 core/query/src/main/java/datawave/core/query/logic/lookup/LookupQueryLogic.java create mode 100644 core/query/src/main/java/datawave/core/query/logic/lookup/uid/LookupUIDQueryLogic.java create mode 100644 core/query/src/main/java/datawave/core/query/logic/lookup/uuid/LookupUUIDQueryLogic.java rename {web-services/query/src/main/java/datawave/webservice => core/query/src/main/java/datawave/core}/query/map/QueryGeometryHandler.java (71%) rename {web-services/query/src/main/java/datawave/webservice => core/query/src/main/java/datawave/core}/query/metric/QueryMetricHandler.java (96%) rename {web-services/query/src/main/java/datawave/webservice => core/query/src/main/java/datawave/core}/query/predicate/ProxiedAuthorizationsPredicate.java (95%) rename {web-services/query/src/main/java/datawave/webservice => core/query/src/main/java/datawave/core}/query/predicate/QueryParameterPredicate.java (92%) create mode 100644 core/query/src/main/java/datawave/core/query/predict/NoOpQueryPredictor.java rename {web-services/query/src/main/java/datawave/webservice/query/runner => core/query/src/main/java/datawave/core/query/predict}/QueryPredictor.java (94%) rename {web-services/common/src/main/java/datawave/webservice/common => core/query/src/main/java/datawave/core/query}/remote/RemoteQueryService.java (67%) rename {web-services/query/src/main/java/datawave/webservice => core/query/src/main/java/datawave/core}/query/result/event/DefaultResponseObjectFactory.java (79%) rename {web-services/query/src/main/java/datawave/webservice => core/query/src/main/java/datawave/core}/query/util/QueryUtil.java (96%) create mode 100644 core/query/src/main/resources/META-INF/beans.xml create mode 100644 core/query/src/main/resources/META-INF/jboss-ejb3.xml create mode 100755 docker/cleanup.sh create mode 100755 docker/config/application-cachedresults.yml create mode 100644 docker/config/application-federation.yml create mode 100755 docker/config/application-metricssource.yml create mode 100755 docker/config/application-mrquery.yml create mode 100755 docker/config/application-query.yml create mode 100755 docker/config/application-querymessaging.yml create mode 100755 docker/config/executor-pool1.yml create mode 100755 docker/config/executor-pool2.yml create mode 100755 docker/config/executor.yml create mode 100755 docker/config/modification.yml create mode 100755 docker/config/mrquery.yml create mode 100755 docker/config/query.yml create mode 100644 docker/debug.yml.example create mode 100755 docker/restart.sh create mode 100755 docker/scripts/cachedResultsQuery.sh create mode 100755 docker/scripts/cancel.sh create mode 100755 docker/scripts/close.sh create mode 100755 docker/scripts/common/cachedResultsQuery.sh create mode 100755 docker/scripts/common/edge.sh create mode 100755 docker/scripts/common/mapReduceQuery.sh create mode 100755 docker/scripts/common/oozieQuery.sh create mode 100755 docker/scripts/common/plan.sh create mode 100755 docker/scripts/common/predict.sh create mode 100755 docker/scripts/common/streamingQuery.sh create mode 100755 docker/scripts/connectionFactory.sh create mode 100755 docker/scripts/edge.sh create mode 100755 docker/scripts/edgeEvent.sh create mode 100755 docker/scripts/executorHealth.sh create mode 100755 docker/scripts/executorShutdown.sh create mode 100755 docker/scripts/mapReduceCancel.sh create mode 100755 docker/scripts/mapReduceQuery.sh create mode 100755 docker/scripts/mapReduceRemove.sh create mode 100755 docker/scripts/metrics.sh create mode 100755 docker/scripts/modification.sh create mode 100755 docker/scripts/oozieQuery.sh create mode 100755 docker/scripts/plan.sh create mode 100755 docker/scripts/poundit.sh create mode 100755 docker/scripts/predict.sh create mode 100755 docker/scripts/queryHealth.sh create mode 100755 docker/scripts/queryShutdown.sh create mode 100755 docker/scripts/streamingQuery.sh create mode 100755 docker/scripts/termFrequency.sh create mode 100755 docker/scripts/webQuery.sh create mode 160000 microservices/services/mapreduce-query create mode 160000 microservices/services/modification create mode 160000 microservices/services/query create mode 160000 microservices/services/query-executor create mode 160000 microservices/starters/cached-results create mode 160000 microservices/starters/query create mode 100644 warehouse/core/src/main/java/datawave/edge/model/DefaultEdgeModelFieldsFactory.java delete mode 100644 warehouse/core/src/main/java/datawave/edge/model/EdgeModelAware.java create mode 100644 warehouse/core/src/main/java/datawave/edge/model/EdgeModelFields.java create mode 100644 warehouse/core/src/main/java/datawave/edge/model/EdgeModelFieldsFactory.java rename warehouse/{query-core => data-dictionary-core}/src/main/java/datawave/webservice/datadictionary/DataDictionaryType.java (100%) rename warehouse/{query-core => data-dictionary-core}/src/main/java/datawave/webservice/datadictionary/RemoteDataDictionary.java (100%) create mode 100644 warehouse/edge-dictionary-core/src/main/java/datawave/webservice/edgedictionary/EdgeDictionaryProviderImpl.java rename warehouse/{query-core => edge-dictionary-core}/src/main/java/datawave/webservice/edgedictionary/EdgeDictionaryType.java (100%) rename warehouse/{query-core => edge-dictionary-core}/src/main/java/datawave/webservice/edgedictionary/RemoteEdgeDictionary.java (95%) rename {web-services/query/src/main/java/datawave/webservice => warehouse/query-core/src/main/java/datawave/core}/query/iterator/KeyAggregatingTransformIterator.java (99%) rename {web-services/query/src/main/java/datawave/webservice => warehouse/query-core/src/main/java/datawave/core}/query/util/QueryParserUtil.java (74%) rename warehouse/query-core/src/main/java/datawave/query/model/util/{LoadModelFromXml.java => LoadModel.java} (84%) create mode 100644 warehouse/query-core/src/main/java/datawave/query/scheduler/SingleRangeQueryDataIterator.java create mode 100644 warehouse/query-core/src/main/java/datawave/query/tables/content/ContentQueryCheckpoint.java rename warehouse/query-core/src/main/java/datawave/query/tables/content/{ContentQueryTable.java => ContentQueryLogic.java} (75%) create mode 100644 warehouse/query-core/src/main/java/datawave/query/tables/shard/CountResultPostprocessor.java rename {web-services/query/src/test/java/datawave/webservice => warehouse/query-core/src/test/java/datawave/core}/query/iterator/KeyAggregatingTransformIteratorTest.java (99%) create mode 100644 warehouse/query-core/src/test/java/datawave/query/CheckpointableQueryTest.java rename warehouse/query-core/src/test/java/datawave/query/tables/content/{ContentQueryTableTest.java => ContentQueryLogicTest.java} (82%) create mode 100644 warehouse/query-core/src/test/java/datawave/query/tables/edge/CheckpointableEdgeQueryTest.java create mode 100644 warehouse/query-core/src/test/java/datawave/query/tables/edge/CheckpointableExtendedEdgeQueryTest.java delete mode 100644 web-services/client/src/main/java/datawave/webservice/query/Query.java delete mode 100644 web-services/client/src/main/java/datawave/webservice/query/QueryImpl.java delete mode 100644 web-services/client/src/main/java/datawave/webservice/query/QueryParameters.java delete mode 100644 web-services/client/src/main/java/datawave/webservice/query/QueryParametersImpl.java delete mode 100644 web-services/client/src/main/java/datawave/webservice/query/QueryPersistence.java create mode 100644 web-services/client/src/main/java/datawave/webservice/results/mr/MapReduceJobDescriptionList.java rename web-services/{common-util/src/main/resources/source-templates/datawave/webservice/common/result => client/src/main/resources/source-templates/datawave/webservice/query/map}/package-info.java (74%) delete mode 100644 web-services/client/src/main/resources/source-templates/datawave/webservice/query/result/metadata/package-info.java create mode 100644 web-services/client/src/test/java/datawave/user/UserAuthorizationsTest.java delete mode 100644 web-services/common-util/src/main/java/datawave/security/authorization/UserOperations.java delete mode 100644 web-services/common-util/src/main/java/datawave/webservice/query/cache/ResultsPage.java create mode 100644 web-services/common/src/main/java/datawave/webservice/common/cache/AccumuloTableCacheBean.java delete mode 100644 web-services/common/src/main/java/datawave/webservice/common/cache/AccumuloTableCacheConfiguration.java delete mode 100644 web-services/map-reduce/src/main/java/datawave/webservice/mr/bulkresults/map/ApplicationContextAwareMapper.java delete mode 100644 web-services/map-reduce/src/main/java/datawave/webservice/mr/bulkresults/map/SerializationFormat.java create mode 100644 web-services/map-reduce/src/main/java/datawave/webservice/mr/bulkresults/map/WeldBulkResultsFileOutputMapper.java create mode 100644 web-services/metrics/pom.xml rename {warehouse/query-core => web-services/metrics}/src/main/java/datawave/query/map/SimpleQueryGeometryHandler.java (95%) rename {warehouse/query-core => web-services/metrics}/src/main/java/datawave/query/metrics/ShardTableQueryMetricHandler.java (97%) create mode 100644 web-services/metrics/src/main/resources/META-INF/beans.xml create mode 100644 web-services/metrics/src/test/java/datawave/query/map/SimpleQueryGeometryHandlerTest.java create mode 100644 web-services/metrics/src/test/resources/log4j.properties create mode 100644 web-services/modification/src/main/java/datawave/webservice/modification/QueryExecutorBeanService.java delete mode 100644 web-services/query/src/main/java/datawave/webservice/query/cache/QueryExpirationConfiguration.java delete mode 100644 web-services/query/src/main/java/datawave/webservice/query/configuration/QueryData.java delete mode 100644 web-services/query/src/main/java/datawave/webservice/query/logic/DatawaveRoleManager.java delete mode 100644 web-services/query/src/main/java/datawave/webservice/query/logic/EasyRoleManager.java delete mode 100644 web-services/query/src/main/java/datawave/webservice/query/logic/QueryLogicFactory.java delete mode 100644 web-services/query/src/main/java/datawave/webservice/query/logic/RoleManager.java delete mode 100644 web-services/query/src/main/java/datawave/webservice/query/logic/RoleManagerImpl.java delete mode 100644 web-services/query/src/main/java/datawave/webservice/query/logic/WritesQueryMetrics.java delete mode 100644 web-services/query/src/main/java/datawave/webservice/query/predicate/AuthorizationsPredicate.java delete mode 100644 web-services/query/src/main/java/datawave/webservice/query/runner/NoOpQueryPredictor.java create mode 100644 web-services/query/src/main/java/datawave/webservice/query/util/MapUtils.java delete mode 100644 web-services/query/src/test/java/datawave/webservice/query/logic/DatawaveRoleManagerTest.java delete mode 100644 web-services/security/src/main/java/datawave/security/authorization/remote/ConditionalRemoteUserOperations.java diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 37a71b97cb6..ce2cb2b9723 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -171,4 +171,3 @@ jobs: # docker tag ${IMAGE_NAME}:${TAG} ${REMOTE_IMAGE_NAME}:${TAG} # docker images # docker push ${REMOTE_IMAGE_NAME}:${TAG} - diff --git a/.gitmodules b/.gitmodules index c5ed6ab8d72..b7832653b95 100644 --- a/.gitmodules +++ b/.gitmodules @@ -61,3 +61,24 @@ [submodule "microservices/microservice-service-parent"] path = microservices/microservice-service-parent url = git@github.com:NationalSecurityAgency/datawave-service-parent.git +[submodule "contrib/datawave-utils"] + path = contrib/datawave-utils + url = git@github.com:NationalSecurityAgency/datawave-utils.git +[submodule "microservices/starters/query"] + path = microservices/starters/query + url = git@github.com:NationalSecurityAgency/datawave-spring-boot-starter-query.git +[submodule "microservices/services/query"] + path = microservices/services/query + url = git@github.com:NationalSecurityAgency/datawave-query-service.git +[submodule "microservices/services/query-executor"] + path = microservices/services/query-executor + url = git@github.com:NationalSecurityAgency/datawave-query-executor-service.git +[submodule "microservices/services/modification"] + path = microservices/services/modification + url = git@github.com:NationalSecurityAgency/datawave-modification-service.git +[submodule "microservices/services/mapreduce-query"] + path = microservices/services/mapreduce-query + url = git@github.com:NationalSecurityAgency/datawave-mapreduce-query-service.git +[submodule "microservices/starters/cached-results"] + path = microservices/starters/cached-results + url = git@github.com:NationalSecurityAgency/datawave-spring-boot-starter-cached-results.git diff --git a/.mvn/maven-build-cache-config.xml b/.mvn/maven-build-cache-config.xml index 8def3e68c94..bf5dbbb1766 100644 --- a/.mvn/maven-build-cache-config.xml +++ b/.mvn/maven-build-cache-config.xml @@ -71,6 +71,22 @@ install + + + build + push + + + + + copy-resources + + + + + repackage + + diff --git a/README.md b/README.md index 09fb2dd4631..2b154209b7b 100644 --- a/README.md +++ b/README.md @@ -56,5 +56,9 @@ mvn -Pdocker,dist -DskipMicroservices clean install -T 1C git submodule deinit --all ``` +### DataWave Microservices + +For more information about deploying the datawave quickstart and microservices, check out the [Docker Readme](docker/README.md#usage) + [li]: http://img.shields.io/badge/license-ASL-blue.svg [ll]: https://www.apache.org/licenses/LICENSE-2.0 diff --git a/common-test/pom.xml b/common-test/pom.xml index 0745d338df6..5230d7f1272 100644 --- a/common-test/pom.xml +++ b/common-test/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-parent - 6.14.0-SNAPSHOT + 7.0.0-SNAPSHOT datawave-common-test ${project.artifactId} diff --git a/contrib/datawave-quickstart/.gitignore b/contrib/datawave-quickstart/.gitignore index b8db490d380..ba4449d9f5c 100644 --- a/contrib/datawave-quickstart/.gitignore +++ b/contrib/datawave-quickstart/.gitignore @@ -28,4 +28,4 @@ !docker !docker/** -docker/target/** +docker/target/** \ No newline at end of file diff --git a/contrib/datawave-quickstart/bin/common.sh b/contrib/datawave-quickstart/bin/common.sh index 2fc1f9acda5..8c6639f50d8 100644 --- a/contrib/datawave-quickstart/bin/common.sh +++ b/contrib/datawave-quickstart/bin/common.sh @@ -2,6 +2,7 @@ BIN_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" source "${BIN_DIR}/logging.sh" source "${BIN_DIR}/query.sh" +source "${BIN_DIR}/modification.sh" # Upon uninstall, tarballs will be preserved in place by default. # To remove them, use DW_UNINSTALL_RM_BINARIES_FLAG_* diff --git a/contrib/datawave-quickstart/bin/modification.sh b/contrib/datawave-quickstart/bin/modification.sh new file mode 100755 index 00000000000..fc6e31c5b61 --- /dev/null +++ b/contrib/datawave-quickstart/bin/modification.sh @@ -0,0 +1,295 @@ +# +# if using feature/queryMicroservices quickstart webserver: +# DW_MODIFICATION_URI=https://localhost:9443/DataWave/Modification +# +# if using feature/queryMicroservices deployed modification service +# DW_BASE_URI=-https://localhost:9443/DataWave +# DW_MODIFICATION_URI=https://localhost:9343/modification/v1 +# + +DW_BASE_URI=${DW_BASE_URI:-https://localhost:8443/DataWave} +DW_MODIFICATION_URI=${DW_MODIFICATION_URI:-${DW_BASE_URI}/Modification} + +function xmlencode() { + local s + s=${1//&/&} + s=${s///>} + s=${s//'"'/"} + printf -- %s "$s" +} + +function datawaveModification() { + + ! datawaveIsInstalled && info "DataWave Web is not installed. Try 'datawaveInstall'" && return 1 + ! datawaveWebIsRunning && info "DataWave Web is not running. Try 'datawaveWebStart'" && return 1 + + # Reset + + DW_QUERY_RESPONSE_BODY="" + DW_QUERY_RESPONSE_CODE="" + DW_QUERY_RESPONSE_TYPE="" + DW_QUERY_TOTAL_TIME="" + + # Both 'Content-Type: application/x-www-form-urlencoded' and 'Accept: application/json' + # added by default, but may be overridden, if needed, via --header,-H option + + DW_REQUEST_HEADERS="" + + # Defaults + + DW_MODIFICATION_COMMAND="INSERT" + DW_MODIFICATION_SERVICE="MutableMetadataUUIDService" + DW_MODIFICATION_VIZ="BAR&FOO" + DW_MODIFICATION_VERBOSE=false + + configureUserIdentity || return 1 + configureModification "$@" || return $? + + local curlcmd="/usr/bin/curl \ + --silent --write-out 'HTTP_STATUS_CODE:%{http_code};TOTAL_TIME:%{time_total};CONTENT_TYPE:%{content_type}' \ + --insecure --cert "${DW_CURL_CERT}" --key "${DW_CURL_KEY_RSA}" --cacert "${DW_CURL_CA}" \ + --header 'Content-Type: application/xml;charset=UTF-8' --header 'Accept: application/xml' \ + ${DW_REQUEST_HEADERS} ${DW_CURL_DATA} -X PUT ${DW_MODIFICATION_URI}/${DW_MODIFICATION_SERVICE}/submit" + echo $curlcmd + + local response="$( eval "${curlcmd}" )" + local exitStatus=$? + + if [ "${exitStatus}" != "0" ] ; then + echo + error "Curl command exited with non-zero status: ${exitStatus}" + echo + return 1 + fi + + parseQueryResponse + prettyPrintResponse + printCurlSummary + + return 0 +} + +function configureModification() { + + while [ "${1}" != "" ]; do + case "${1}" in + --uuid | -u) + DW_MODIFICATION_UUID="${2}" + shift + ;; + --type | -t) + DW_MODIFICATION_UUID_TYPE="${2}" + shift + ;; + --field | -f) + DW_MODIFICATION_FIELD="${2}" + shift + ;; + --oldvalue | -o) + DW_MODIFICATION_OLD_VALUE="${2}" + shift + ;; + --newvalue | -n) + DW_MODIFICATION_NEW_VALUE="${2}" + shift + ;; + --visibility | --vis) + DW_MODIFICATION_VIZ="${2}" + shift + ;; + --command | -c) + DW_MODIFICATION_COMMAND="${2}" + shift + ;; + --header | -H) + DW_REQUEST_HEADERS="${DW_REQUEST_HEADERS} ${1} '${2}'" + shift + ;; + --help | -h) + modificationHelp && return 1 + ;; + --verbose | -v) + DW_MODIFICATION_VERBOSE=true + ;; + *) + error "Invalid argument passed to $( basename "$0" ): ${1}" && return 1 + esac + shift + done + + [ -z "${DW_MODIFICATION_UUID}" ] && error "Uuid is required" && return 1 + [ -z "${DW_MODIFICATION_UUID_TYPE}" ] && error "Uuid type (field) is required" && return 1 + [ -z "${DW_MODIFICATION_FIELD}" ] && error "Field is required" && return 1 + [ -z "${DW_MODIFICATION_VIZ}" ] && error "Visibility is required" && return 1 + BODY="${DW_MODIFICATION_UUID}${DW_MODIFICATION_UUID_TYPE}${DW_MODIFICATION_COMMAND}${DW_MODIFICATION_FIELD}${DW_MODIFICATION_NEW_VALUE}$( xmlencode ${DW_MODIFICATION_VIZ} )testUserINSERTTESTABCPUBLIC" + if [ "${DW_MODIFICATION_COMMAND}" == "INSERT" ] ; then + [ -z "${DW_MODIFICATION_NEW_VALUE}" ] && error "New field value is required" && return 1 + elif [ "${DW_MODIFICATION_COMMAND}" == "REPLACE" ] ; then + [ -z "${DW_MODIFICATION_NEW_VALUE}" ] && error "New field value is required" && return 1 + elif [ "${DW_MODIFICATION_COMMAND}" == "UPDATE" ] ; then + [ -z "${DW_MODIFICATION_NEW_VALUE}" ] && error "New field value is required" && return 1 + [ -z "${DW_MODIFICATION_OLD_VALUE}" ] && error "Old field value is required" && return 1 + BODY="${DW_MODIFICATION_UUID}${DW_MODIFICATION_UUID_TYPE}${DW_MODIFICATION_COMMAND}${DW_MODIFICATION_FIELD}${DW_MODIFICATION_NEW_VALUE}${DW_MODIFICATION_OLD_VALUE}$( xmlencode ${DW_MODIFICATION_VIZ} )testUserINSERTTESTABCPUBLIC" + elif [ "${DW_MODIFICATION_COMMAND}" == "DELETE" ] ; then + [ -z "${DW_MODIFICATION_OLD_VALUE}" ] && error "Old field value is required" && return 1 + BODY="${DW_MODIFICATION_UUID}${DW_MODIFICATION_UUID_TYPE}${DW_MODIFICATION_COMMAND}${DW_MODIFICATION_FIELD}${DW_MODIFICATION_OLD_VALUE}$( xmlencode ${DW_MODIFICATION_VIZ} )testUserINSERTTESTABCPUBLIC" + else + error "Command set to ${DW_MODIFICATION_COMMAND}. Command must be one of INSERT, UPDATE, DELETE, or REPLACE." && return 1 + fi + + DW_CURL_DATA="-d '$BODY'" +} + +function modificationHelp() { + echo + echo " The $( printGreen "datawaveModification" ) shell function allows you submit modification requests on demand to DataWave's" + echo " Rest API and to inspect the results. It automatically configures curl and sets" + echo " reasonable defaults for most required query parameters" + echo + echo " Assuming the following modification entries are in the datawave.metadata:" + echo " REVIEW m:csv []" + echo " REVIEW m:enwiki []" + echo " REVIEW m:tvmaze []" + echo + echo " $( printGreen datawaveModification ) --uuid 09aa3d46-8aa0-49fb-8859-f3add48859b0 --type UUID --field REVIEW -c INSERT --newvalue 'I liked this one'" + echo " $( printGreen datawaveModification ) --uuid 09aa3d46-8aa0-49fb-8859-f3add48859b0 --type UUID --field REVIEW -c DELETE --oldvalue 'I liked this one'" + echo " $( printGreen datawaveModification ) --uuid 09aa3d46-8aa0-49fb-8859-f3add48859b0 --type UUID --field REVIEW -c REPLACE --newvalue 'I really liked this one'" + echo " $( printGreen datawaveModification ) --uuid 09aa3d46-8aa0-49fb-8859-f3add48859b0 --type UUID --field REVIEW -c UPDATE --oldvalue 'I liked this one' --newvalue 'I really liked this one'" + echo + echo " Required:" + echo + echo " $( printGreen "-u" ) | $( printGreen "--uuid" ) \"\"" + echo " The event uuid" + echo + echo " $( printGreen "-t" ) | $( printGreen "--type" ) \"\"" + echo " The event uuid type (field)" + echo + echo " $( printGreen "-f" ) | $( printGreen "--field" ) \"\"" + echo " The field to modify" + echo + echo " Optional:" + echo + echo " $( printGreen "-c" ) | $( printGreen "--command" ) " + echo " The command must be one of INSERT, UPDATE, DELETE, or REPLACE. Defaults to ${DW_MODIFICATION_COMMAND}" + echo + echo " $( printGreen "-n" ) | $( printGreen "--newvalue" ) " + echo " The old value (required for INSERT, UPDATE or REPLACE service)" + echo + echo " $( printGreen "-o" ) | $( printGreen "--oldvalue" ) " + echo " The old value (required for UPDATE or DELETE service)" + echo + echo " $( printGreen "--vis" ) | $( printGreen "--visibility" ) " + echo " Visibility expression to use when logging this query to Accumulo. Defaults to '${DW_MODIFICATION_LOG_VIZ}'" + echo + echo " $( printGreen "-H" ) | $( printGreen "--header" ) \"HeaderName: HeaderValue\"" + echo " Adds specified name/value pair to the curl command as an HTTP request header" + echo " Defaults: '$(printGreen "Content-Type"): application/x-www-form-urlencoded' and '$(printGreen "Accept"): application/json'" + echo + echo " $( printGreen "-x" ) | $( printGreen "--xml" )" + echo " Adds '$(printGreen "Accept"): application/xml' as an HTTP request header to override the default JSON" + echo + echo " $( printGreen "-v" ) | $( printGreen "--verbose" )" + echo " Display curl command. Otherwise, only query results and response metadata are displayed" + echo + echo " $( printGreen "-h" ) | $( printGreen "--help" )" + echo " Print this usage information and exit the script" + echo +} + +function listMutableFields() { + + # Reset + + DW_QUERY_RESPONSE_BODY="" + DW_QUERY_RESPONSE_CODE="" + DW_QUERY_RESPONSE_TYPE="" + DW_QUERY_TOTAL_TIME="" + DW_QUERY_EXTRA_PARAMS="" + + configureUserIdentity || return 1 + + local curlcmd="/usr/bin/curl \ + --silent --write-out 'HTTP_STATUS_CODE:%{http_code};TOTAL_TIME:%{time_total};CONTENT_TYPE:%{content_type}' \ + --insecure --cert "${DW_CURL_CERT}" --key "${DW_CURL_KEY_RSA}" --cacert "${DW_CURL_CA}" \ + -X GET ${DW_MODIFICATION_URI}/getMutableFieldList" + + local response="$( eval "${curlcmd}" )" + local exitStatus=$? + if [ "${exitStatus}" != "0" ] ; then + error "Curl command exited with non-zero status: ${exitStatus}" + echo + return 1 + fi + + parseQueryResponse + prettyPrintResponse + printCurlSummary +} + +function reloadMutableFieldCache() { + + local curlcmd="/usr/bin/curl \ + --silent --write-out 'HTTP_STATUS_CODE:%{http_code};TOTAL_TIME:%{time_total};CONTENT_TYPE:%{content_type}' \ + --insecure --cert "${DW_CURL_CERT}" --key "${DW_CURL_KEY_RSA}" --cacert "${DW_CURL_CA}" \ + -X GET ${DW_MODIFICATION_URI}/AccumuloTableCache/reload/datawave.metadata" + local response="$( eval "${curlcmd}" )" + local exitStatus=$? + + if [ "${exitStatus}" != "0" ] ; then + error "Curl command exited with non-zero status: ${exitStatus}. Failed to update table cache: ${dwtable}" + return 1 + fi + + parseQueryResponse + prettyPrintResponse + printCurlSummary + + local curlcmd="/usr/bin/curl \ + --silent --write-out 'HTTP_STATUS_CODE:%{http_code};TOTAL_TIME:%{time_total};CONTENT_TYPE:%{content_type}' \ + --insecure --cert "${DW_CURL_CERT}" --key "${DW_CURL_KEY_RSA}" --cacert "${DW_CURL_CA}" \ + -X GET ${DW_MODIFICATION_URI}/reloadCache" + + local response="$( eval "${curlcmd}" )" + local exitStatus=$? + + if [ "${exitStatus}" != "0" ] ; then + error "Curl command exited with non-zero status: ${exitStatus}. Failed to update mutable fields cache: ${dwtable}" + return 1 + fi + + parseQueryResponse + prettyPrintResponse + printCurlSummary +} + +function listModificationConfiguration() { + + # Reset + + DW_QUERY_RESPONSE_BODY="" + DW_QUERY_RESPONSE_CODE="" + DW_QUERY_RESPONSE_TYPE="" + DW_QUERY_TOTAL_TIME="" + DW_QUERY_EXTRA_PARAMS="" + + configureUserIdentity || return 1 + + local curlcmd="/usr/bin/curl \ + --silent --write-out 'HTTP_STATUS_CODE:%{http_code};TOTAL_TIME:%{time_total};CONTENT_TYPE:%{content_type}' \ + --insecure --cert "${DW_CURL_CERT}" --key "${DW_CURL_KEY_RSA}" --cacert "${DW_CURL_CA}" \ + -X GET ${DW_MODIFICATION_URI}/listConfigurations" + + local response="$( eval "${curlcmd}" )" + local exitStatus=$? + if [ "${exitStatus}" != "0" ] ; then + error "Curl command exited with non-zero status: ${exitStatus}" + echo + return 1 + fi + + parseQueryResponse + prettyPrintResponse + printCurlSummary +} + diff --git a/contrib/datawave-quickstart/bin/query.sh b/contrib/datawave-quickstart/bin/query.sh index 7e44656010c..ced895a3d66 100644 --- a/contrib/datawave-quickstart/bin/query.sh +++ b/contrib/datawave-quickstart/bin/query.sh @@ -400,7 +400,7 @@ function getNextPage() { function parseQueryResponse() { DW_QUERY_RESPONSE_BODY=$( echo ${response} | sed -e 's/HTTP_STATUS_CODE\:.*//g' ) DW_QUERY_RESPONSE_CODE=$( echo ${response} | tr -d '\n' | sed -e 's/.*HTTP_STATUS_CODE://' | sed -e 's/;TOTAL_TIME\:.*//' ) - DW_QUERY_RESPONSE_TYPE=$( echo ${response} | tr -d '\n' | sed -e 's/.*CONTENT_TYPE://' ) + DW_QUERY_RESPONSE_TYPE=$( echo ${response} | tr -d '\n' | sed -e 's/.*CONTENT_TYPE://' | sed -e 's/;.*//' ) DW_QUERY_TOTAL_TIME=$( echo ${response} | tr -d '\n' | sed -e 's/.*TOTAL_TIME://' | sed -e 's/;CONTENT_TYPE\:.*//' ) } diff --git a/contrib/datawave-quickstart/bin/services/accumulo/bootstrap.sh b/contrib/datawave-quickstart/bin/services/accumulo/bootstrap.sh index ede63d2d40d..66faa708157 100644 --- a/contrib/datawave-quickstart/bin/services/accumulo/bootstrap.sh +++ b/contrib/datawave-quickstart/bin/services/accumulo/bootstrap.sh @@ -42,6 +42,7 @@ syncLimit=5 clientPort=2181 dataDir=${DW_CLOUD_DATA}/zookeeper maxClientCnxns=100 +4lw.commands.whitelist=ruok,wchs admin.serverPort=8089 admin.enableServer=false" diff --git a/contrib/datawave-quickstart/bin/services/accumulo/install.sh b/contrib/datawave-quickstart/bin/services/accumulo/install.sh index ac1c4097dad..b73e5494045 100755 --- a/contrib/datawave-quickstart/bin/services/accumulo/install.sh +++ b/contrib/datawave-quickstart/bin/services/accumulo/install.sh @@ -26,6 +26,8 @@ else mkdir "${DW_ACCUMULO_SERVICE_DIR}/${DW_ZOOKEEPER_BASEDIR}" || fatal "Failed to create ZooKeeper base directory" # Extract ZooKeeper, set symlink, and verify... tar xf "${DW_ACCUMULO_SERVICE_DIR}/${DW_ZOOKEEPER_DIST}" -C "${DW_ACCUMULO_SERVICE_DIR}/${DW_ZOOKEEPER_BASEDIR}" --strip-components=1 || fatal "Failed to extract ZooKeeper tarball" + #symlink the zookeeper jars if needed + ln -s ${DW_ACCUMULO_SERVICE_DIR}/${DW_ZOOKEEPER_BASEDIR}/lib/* ${DW_ACCUMULO_SERVICE_DIR}/${DW_ZOOKEEPER_BASEDIR} ( cd "${DW_CLOUD_HOME}" && ln -s "bin/services/accumulo/${DW_ZOOKEEPER_BASEDIR}" "${DW_ZOOKEEPER_SYMLINK}" ) || fatal "Failed to set ZooKeeper symlink" zookeeperIsInstalled || fatal "ZooKeeper was not installed" @@ -73,7 +75,7 @@ sed -i'' -e "s~\(export ACCUMULO_MONITOR_OPTS=\).*$~\1\"\${POLICY} -Xmx2g -Xms51 # Update Accumulo bind host if it's not set to localhost if [ "${DW_ACCUMULO_BIND_HOST}" != "localhost" ] ; then - sed -i'' -e "s/localhost/${DW_ACCUMULO_BIND_HOST}/g" ${DW_ACCUMULO_CONF_DIR}/cluster.yaml + sed -i'' -e "s/localhost/${DW_ACCUMULO_BIND_HOST}/g" ${DW_ACCUMULO_CONF_DIR}/cluster.yaml fi # Write zoo.cfg file using our settings in DW_ZOOKEEPER_CONF diff --git a/contrib/datawave-quickstart/bin/services/hadoop/bootstrap.sh b/contrib/datawave-quickstart/bin/services/hadoop/bootstrap.sh index 2ba9cf12d4c..5137be4c5a3 100644 --- a/contrib/datawave-quickstart/bin/services/hadoop/bootstrap.sh +++ b/contrib/datawave-quickstart/bin/services/hadoop/bootstrap.sh @@ -22,8 +22,8 @@ DW_HADOOP_RESOURCE_MANAGER_ADDRESS_SERVER="${DW_BIND_HOST}:8050" DW_HADOOP_RESOURCE_MANAGER_ADDRESS_CLIENT="${DW_BIND_HOST}:8050" if [ "${DW_BIND_HOST}" == "0.0.0.0" ] ; then - DW_HADOOP_DFS_URI_CLIENT="hdfs://localhost:9000" - DW_HADOOP_RESOURCE_MANAGER_ADDRESS_CLIENT="localhost:8050" + DW_HADOOP_DFS_URI_CLIENT="hdfs://localhost:9000" + DW_HADOOP_RESOURCE_MANAGER_ADDRESS_CLIENT="localhost:8050" fi HADOOP_HOME="${DW_CLOUD_HOME}/${DW_HADOOP_SYMLINK}" diff --git a/contrib/datawave-quickstart/docker/datawave-bootstrap.sh b/contrib/datawave-quickstart/docker/datawave-bootstrap.sh index fbabc9978e4..5cb868b5aff 100755 --- a/contrib/datawave-quickstart/docker/datawave-bootstrap.sh +++ b/contrib/datawave-quickstart/docker/datawave-bootstrap.sh @@ -4,13 +4,13 @@ source ~/.bashrc # If DW_CONTAINER_HOST is defined update Accumulo and Hadoop bind hosts if [ ! -z "${DW_CONTAINER_HOST}" ] && [ "${DW_CONTAINER_HOST}" != "localhost" ] ; then - # Update Accumulo bind hosts - sed -i'' -e "s/localhost/${DW_CONTAINER_HOST}/g" ${ACCUMULO_HOME}/conf/cluster.yaml + # Update Accumulo bind hosts + sed -i'' -e "s/localhost/${DW_CONTAINER_HOST}/g" ${ACCUMULO_HOME}/conf/cluster.yaml - # Create hadoop client configs - mkdir -p ${HADOOP_HOME}/client/conf - cp -r ${HADOOP_CONF_DIR}/*-site.xml ${HADOOP_HOME}/client/conf - sed -i'' -e "s/${DW_BIND_HOST}/${DW_CONTAINER_HOST}/g" ${HADOOP_HOME}/client/conf/*-site.xml + # Create hadoop client configs + mkdir -p ${HADOOP_HOME}/client/conf + cp -r ${HADOOP_CONF_DIR}/*-site.xml ${HADOOP_HOME}/client/conf + sed -i'' -e "s/${DW_BIND_HOST}/${DW_CONTAINER_HOST}/g" ${HADOOP_HOME}/client/conf/*-site.xml fi START_AS_DAEMON=true @@ -29,7 +29,7 @@ do --accumulo) START_ACCUMULO=true ;; - --web) + --web) START_WEB=true ;; --webdebug) diff --git a/contrib/datawave-quickstart/docker/pom.xml b/contrib/datawave-quickstart/docker/pom.xml index 597c9f962f0..772c424f5e8 100644 --- a/contrib/datawave-quickstart/docker/pom.xml +++ b/contrib/datawave-quickstart/docker/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-parent - 6.14.0-SNAPSHOT + 7.0.0-SNAPSHOT ../../../pom.xml quickstart @@ -254,6 +254,7 @@ 8589934592 + none ${project.basedir}/../../../ none ${project.basedir}/Dockerfile diff --git a/contrib/datawave-utils b/contrib/datawave-utils new file mode 160000 index 00000000000..8438bce9c0b --- /dev/null +++ b/contrib/datawave-utils @@ -0,0 +1 @@ +Subproject commit 8438bce9c0ba7f163704838d52927d547ee6efcf diff --git a/core/base-rest-responses b/core/base-rest-responses index d5029a3f83a..f0941b049b0 160000 --- a/core/base-rest-responses +++ b/core/base-rest-responses @@ -1 +1 @@ -Subproject commit d5029a3f83a20835725bf5f15c24d6b3c775e5a9 +Subproject commit f0941b049b0e1a6cef6dd82dd9f2fb86fbefbe78 diff --git a/core/cached-results/pom.xml b/core/cached-results/pom.xml new file mode 100644 index 00000000000..591bd5d39ce --- /dev/null +++ b/core/cached-results/pom.xml @@ -0,0 +1,48 @@ + + + 4.0.0 + + gov.nsa.datawave.core + datawave-core-parent + 7.0.0-SNAPSHOT + + datawave-core-cached-results + ${project.artifactId} + + + + + gov.nsa.datawave.microservice + base-rest-responses + + + gov.nsa.datawave.microservice + query-api + + + jakarta.validation + jakarta.validation-api + + + + + gov.nsa.datawave.microservice + type-utils + + + gov.nsa.datawave.webservices + datawave-ws-client + ${project.version} + + + org.slf4j + * + + + log4j + log4j + + + + + diff --git a/web-services/query/src/main/java/datawave/webservice/query/cachedresults/CacheableQueryRowImpl.java b/core/cached-results/src/main/java/datawave/core/query/cachedresults/CacheableQueryRowImpl.java similarity index 99% rename from web-services/query/src/main/java/datawave/webservice/query/cachedresults/CacheableQueryRowImpl.java rename to core/cached-results/src/main/java/datawave/core/query/cachedresults/CacheableQueryRowImpl.java index 7fa25b368f3..ab4673fb274 100644 --- a/web-services/query/src/main/java/datawave/webservice/query/cachedresults/CacheableQueryRowImpl.java +++ b/core/cached-results/src/main/java/datawave/core/query/cachedresults/CacheableQueryRowImpl.java @@ -1,4 +1,4 @@ -package datawave.webservice.query.cachedresults; +package datawave.core.query.cachedresults; import java.util.ArrayList; import java.util.Collection; @@ -20,6 +20,7 @@ import datawave.data.type.Type; import datawave.marking.MarkingFunctions; import datawave.marking.MarkingFunctionsFactory; +import datawave.webservice.query.cachedresults.CacheableQueryRow; import datawave.webservice.query.data.ObjectSizeOf; import datawave.webservice.query.util.TypedValue; @@ -44,8 +45,6 @@ public class CacheableQueryRowImpl extends CacheableQueryRow implements ObjectSi private String queryOrigin = null; private String resultOrigin = null; - private static final MarkingFunctions markingFunctions = MarkingFunctionsFactory.createMarkingFunctions(); - public void addColumn(String columnName, String columnValueString, Map markings, String columnVisibility, Long timestamp) { addColumn(columnName, new TypedValue(columnValueString), markings, columnVisibility, timestamp); } diff --git a/web-services/query/src/main/java/datawave/webservice/query/cachedresults/CacheableQueryRowReader.java b/core/cached-results/src/main/java/datawave/core/query/cachedresults/CacheableQueryRowReader.java similarity index 95% rename from web-services/query/src/main/java/datawave/webservice/query/cachedresults/CacheableQueryRowReader.java rename to core/cached-results/src/main/java/datawave/core/query/cachedresults/CacheableQueryRowReader.java index 7b13334b102..ddcbf921264 100644 --- a/web-services/query/src/main/java/datawave/webservice/query/cachedresults/CacheableQueryRowReader.java +++ b/core/cached-results/src/main/java/datawave/core/query/cachedresults/CacheableQueryRowReader.java @@ -1,5 +1,6 @@ -package datawave.webservice.query.cachedresults; +package datawave.core.query.cachedresults; +import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; import java.util.HashMap; @@ -8,20 +9,21 @@ import java.util.Set; import java.util.TreeSet; -import javax.sql.rowset.CachedRowSet; - import org.apache.log4j.Logger; import datawave.marking.MarkingFunctions; +import datawave.webservice.query.cachedresults.CacheableQueryRow; import datawave.webservice.query.result.event.ResponseObjectFactory; public class CacheableQueryRowReader { private static Logger log = Logger.getLogger(CacheableQueryRowReader.class); - public static CacheableQueryRow createRow(CachedRowSet cachedRowSet, Set fixedFieldsInEvent, ResponseObjectFactory responseObjectFactory) { + public static CacheableQueryRow createRow(ResultSet cachedRowSet, Set fixedFieldsInEvent, ResponseObjectFactory responseObjectFactory, + MarkingFunctions markingFunctions) { CacheableQueryRow cqfc = responseObjectFactory.getCacheableQueryRow(); + cqfc.setMarkingFunctions(markingFunctions); ResultSetMetaData metadata; try { diff --git a/web-services/cached-results/src/main/java/datawave/webservice/results/cached/CachedResultsParameters.java b/core/cached-results/src/main/java/datawave/core/query/cachedresults/CachedResultsQueryParameters.java similarity index 96% rename from web-services/cached-results/src/main/java/datawave/webservice/results/cached/CachedResultsParameters.java rename to core/cached-results/src/main/java/datawave/core/query/cachedresults/CachedResultsQueryParameters.java index 3c5cc6cef89..59d12f3f225 100644 --- a/web-services/cached-results/src/main/java/datawave/webservice/results/cached/CachedResultsParameters.java +++ b/core/cached-results/src/main/java/datawave/core/query/cachedresults/CachedResultsQueryParameters.java @@ -1,4 +1,4 @@ -package datawave.webservice.results.cached; +package datawave.core.query.cachedresults; import java.util.Arrays; import java.util.List; @@ -7,11 +7,10 @@ import com.google.common.base.Preconditions; +import datawave.microservice.query.QueryParameters; import datawave.validation.ParameterValidator; -import datawave.webservice.query.QueryParameters; - -public class CachedResultsParameters implements ParameterValidator { +public class CachedResultsQueryParameters implements ParameterValidator { public static final String QUERY_ID = "queryId"; public static final String ALIAS = "alias"; public static final String VIEW = "view"; diff --git a/core/common-util/pom.xml b/core/common-util/pom.xml new file mode 100644 index 00000000000..5846437d977 --- /dev/null +++ b/core/common-util/pom.xml @@ -0,0 +1,121 @@ + + + 4.0.0 + + gov.nsa.datawave.core + datawave-core-parent + 7.0.0-SNAPSHOT + + datawave-core-common-util + ${project.artifactId} + + + + + + + gov.nsa.datawave.core + datawave-core-connection-pool + ${project.version} + + + gov.nsa.datawave.microservice + accumulo-utils + + + gov.nsa.datawave.microservice + authorization-api + + + org.apache.accumulo + accumulo-core + provided + + + javax.servlet + servlet-api + + + + + + + + true + src/main/resources + + source-templates/** + + + + test-classes + true + src/test/resources + + + + + org.apache.maven.plugins + maven-jar-plugin + + + META-INF/beans.xml + META-INF/jboss-ejb3.xml + + + + + jboss + + jar + + + jboss + + + + + + + maven-resources-plugin + + + copy-templated-sources + + copy-resources + + validate + + ${project.build.directory}/generated-sources/templated-sources + + + src/main/resources/source-templates + true + + + + + + + + org.codehaus.mojo + build-helper-maven-plugin + 3.3.0 + + + add-source + + add-source + + generate-sources + + + target/generated-sources/templated-sources + + + + + + + + diff --git a/web-services/common-util/src/main/java/datawave/webservice/common/logging/ThreadConfigurableLogger.java b/core/common-util/src/main/java/datawave/core/common/logging/ThreadConfigurableLogger.java similarity index 99% rename from web-services/common-util/src/main/java/datawave/webservice/common/logging/ThreadConfigurableLogger.java rename to core/common-util/src/main/java/datawave/core/common/logging/ThreadConfigurableLogger.java index 0e65d99c07f..64a3fbe64e1 100644 --- a/web-services/common-util/src/main/java/datawave/webservice/common/logging/ThreadConfigurableLogger.java +++ b/core/common-util/src/main/java/datawave/core/common/logging/ThreadConfigurableLogger.java @@ -1,4 +1,4 @@ -package datawave.webservice.common.logging; +package datawave.core.common.logging; import java.util.HashMap; import java.util.Map; diff --git a/web-services/common-util/src/main/java/datawave/webservice/util/EnvProvider.java b/core/common-util/src/main/java/datawave/core/common/util/EnvProvider.java similarity index 91% rename from web-services/common-util/src/main/java/datawave/webservice/util/EnvProvider.java rename to core/common-util/src/main/java/datawave/core/common/util/EnvProvider.java index 99d2e40b6b8..2b08e99c545 100644 --- a/web-services/common-util/src/main/java/datawave/webservice/util/EnvProvider.java +++ b/core/common-util/src/main/java/datawave/core/common/util/EnvProvider.java @@ -1,9 +1,9 @@ -package datawave.webservice.util; +package datawave.core.common.util; import org.apache.commons.lang3.StringUtils; import org.apache.log4j.Logger; -import datawave.webservice.common.logging.ThreadConfigurableLogger; +import datawave.core.common.logging.ThreadConfigurableLogger; public class EnvProvider { diff --git a/core/common-util/src/main/java/datawave/core/query/cache/ResultsPage.java b/core/common-util/src/main/java/datawave/core/query/cache/ResultsPage.java new file mode 100644 index 00000000000..3107497359f --- /dev/null +++ b/core/common-util/src/main/java/datawave/core/query/cache/ResultsPage.java @@ -0,0 +1,45 @@ +package datawave.core.query.cache; + +import java.util.ArrayList; +import java.util.List; + +/** + * + */ +public class ResultsPage { + public enum Status { + NONE, PARTIAL, COMPLETE + } + + private List results; + private Status status; + + public ResultsPage() { + this(new ArrayList<>()); + } + + public ResultsPage(List results) { + this(results, Status.COMPLETE); + } + + public ResultsPage(List results, Status status) { + this.results = results; + this.status = status; + } + + public Status getStatus() { + return status; + } + + public void setStatus(Status status) { + this.status = status; + } + + public List getResults() { + return results; + } + + public void setResults(List results) { + this.results = results; + } +} diff --git a/web-services/common-util/src/main/java/datawave/security/authorization/DatawavePrincipal.java b/core/common-util/src/main/java/datawave/security/authorization/DatawavePrincipal.java similarity index 97% rename from web-services/common-util/src/main/java/datawave/security/authorization/DatawavePrincipal.java rename to core/common-util/src/main/java/datawave/security/authorization/DatawavePrincipal.java index 8e798e79be6..1c43eb931da 100644 --- a/web-services/common-util/src/main/java/datawave/security/authorization/DatawavePrincipal.java +++ b/core/common-util/src/main/java/datawave/security/authorization/DatawavePrincipal.java @@ -17,7 +17,7 @@ import javax.xml.bind.annotation.XmlType; import datawave.security.authorization.DatawaveUser.UserType; -import datawave.security.util.DnUtils; +import datawave.security.util.ProxiedEntityUtils; /** * A {@link Principal} that represents a set of proxied {@link DatawaveUser}s. For example, this proxied user could represent a GUI server acting on behalf of a @@ -108,14 +108,17 @@ static protected List orderProxiedUsers(List datawav return users; } + @Override public Collection getProxiedUsers() { return Collections.unmodifiableCollection(this.proxiedUsers); } + @Override public DatawaveUser getPrimaryUser() { return primaryUser; } + @Override public Collection> getAuthorizations() { // @formatter:off return Collections.unmodifiableCollection( @@ -125,6 +128,7 @@ public Collection> getAuthorizations() { // @formatter:on } + @Override public String[] getDNs() { // @formatter:off return DatawavePrincipal.orderProxiedUsers(this.proxiedUsers).stream() @@ -144,14 +148,16 @@ public String getName() { return this.username; } + @Override public String getShortName() { - return DnUtils.getShortName(getPrimaryUser().getName()); + return ProxiedEntityUtils.getShortName(getPrimaryUser().getName()); } public SubjectIssuerDNPair getUserDN() { return getPrimaryUser().getDn(); } + @Override public List getProxyServers() { // @formatter:off diff --git a/core/common-util/src/main/resources/META-INF/beans.xml b/core/common-util/src/main/resources/META-INF/beans.xml new file mode 100644 index 00000000000..4ca201f8ff2 --- /dev/null +++ b/core/common-util/src/main/resources/META-INF/beans.xml @@ -0,0 +1,9 @@ + + + + \ No newline at end of file diff --git a/core/common-util/src/main/resources/META-INF/jboss-ejb3.xml b/core/common-util/src/main/resources/META-INF/jboss-ejb3.xml new file mode 100644 index 00000000000..8cf49db8c87 --- /dev/null +++ b/core/common-util/src/main/resources/META-INF/jboss-ejb3.xml @@ -0,0 +1,16 @@ + + + + + + + * + datawave + + + + \ No newline at end of file diff --git a/web-services/common-util/src/main/resources/source-templates/datawave/security/authorization/package-info.java b/core/common-util/src/main/resources/source-templates/datawave/security/authorization/package-info.java similarity index 99% rename from web-services/common-util/src/main/resources/source-templates/datawave/security/authorization/package-info.java rename to core/common-util/src/main/resources/source-templates/datawave/security/authorization/package-info.java index 7eb19db2923..6cc9445d7c2 100644 --- a/web-services/common-util/src/main/resources/source-templates/datawave/security/authorization/package-info.java +++ b/core/common-util/src/main/resources/source-templates/datawave/security/authorization/package-info.java @@ -4,4 +4,3 @@ import javax.xml.bind.annotation.XmlNs; import javax.xml.bind.annotation.XmlNsForm; import javax.xml.bind.annotation.XmlSchema; - diff --git a/web-services/common-util/src/test/java/datawave/webservice/util/EnvProviderTest.java b/core/common-util/src/test/java/datawave/core/common/util/EnvProviderTest.java similarity index 95% rename from web-services/common-util/src/test/java/datawave/webservice/util/EnvProviderTest.java rename to core/common-util/src/test/java/datawave/core/common/util/EnvProviderTest.java index bab9edeeda9..b792f0721e2 100644 --- a/web-services/common-util/src/test/java/datawave/webservice/util/EnvProviderTest.java +++ b/core/common-util/src/test/java/datawave/core/common/util/EnvProviderTest.java @@ -1,4 +1,4 @@ -package datawave.webservice.util; +package datawave.core.common.util; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; diff --git a/core/common/pom.xml b/core/common/pom.xml new file mode 100644 index 00000000000..01f207a1580 --- /dev/null +++ b/core/common/pom.xml @@ -0,0 +1,79 @@ + + + 4.0.0 + + gov.nsa.datawave.core + datawave-core-parent + 7.0.0-SNAPSHOT + + datawave-core-common + ${project.artifactId} + + + + + org.apache.accumulo + accumulo-core + ${version.accumulo} + + + org.slf4j + * + + + commons-logging + commons-logging + + + junit + junit + + + log4j + log4j + + + javax.servlet + servlet-api + + + + + org.springframework + spring-core + ${version.spring} + + + + + + gov.nsa.datawave.webservices + datawave-ws-client + ${project.version} + + + org.apache.accumulo + accumulo-core + + + org.springframework + spring-core + provided + + + junit + junit + test + + + org.junit.jupiter + junit-jupiter-engine + test + + + org.junit.vintage + junit-vintage-engine + test + + + diff --git a/web-services/common/src/main/java/datawave/webservice/common/audit/PrivateAuditConstants.java b/core/common/src/main/java/datawave/core/common/audit/PrivateAuditConstants.java similarity index 80% rename from web-services/common/src/main/java/datawave/webservice/common/audit/PrivateAuditConstants.java rename to core/common/src/main/java/datawave/core/common/audit/PrivateAuditConstants.java index c11f969b166..1cffc3e0177 100644 --- a/web-services/common/src/main/java/datawave/webservice/common/audit/PrivateAuditConstants.java +++ b/core/common/src/main/java/datawave/core/common/audit/PrivateAuditConstants.java @@ -1,6 +1,7 @@ -package datawave.webservice.common.audit; +package datawave.core.common.audit; -import javax.ws.rs.core.MultivaluedMap; +import java.util.List; +import java.util.Map; /** * Constants marking private parameters that are computed internally and then added at runtime to the incoming query parameters for the purposes of passing them @@ -15,7 +16,7 @@ public class PrivateAuditConstants { public static final String USER_DN = PREFIX + "userDn"; public static final String SELECTORS = PREFIX + "selectors"; - public static void stripPrivateParameters(MultivaluedMap queryParameters) { + public static void stripPrivateParameters(Map> queryParameters) { queryParameters.entrySet().removeIf(entry -> entry.getKey().startsWith(PREFIX)); } } diff --git a/core/common/src/main/java/datawave/core/common/edgedictionary/EdgeDictionaryProvider.java b/core/common/src/main/java/datawave/core/common/edgedictionary/EdgeDictionaryProvider.java new file mode 100644 index 00000000000..b473fbb8051 --- /dev/null +++ b/core/common/src/main/java/datawave/core/common/edgedictionary/EdgeDictionaryProvider.java @@ -0,0 +1,9 @@ +package datawave.core.common.edgedictionary; + +import datawave.microservice.query.Query; +import datawave.webservice.dictionary.edge.EdgeDictionaryBase; +import datawave.webservice.dictionary.edge.MetadataBase; + +public interface EdgeDictionaryProvider { + EdgeDictionaryBase> getEdgeDictionary(Query settings, String metadataTableName); +} diff --git a/web-services/common/src/main/java/datawave/webservice/common/extjs/ExtJsResponse.java b/core/common/src/main/java/datawave/core/common/extjs/ExtJsResponse.java similarity index 95% rename from web-services/common/src/main/java/datawave/webservice/common/extjs/ExtJsResponse.java rename to core/common/src/main/java/datawave/core/common/extjs/ExtJsResponse.java index e9760dcc039..d92feee8cbb 100644 --- a/web-services/common/src/main/java/datawave/webservice/common/extjs/ExtJsResponse.java +++ b/core/common/src/main/java/datawave/core/common/extjs/ExtJsResponse.java @@ -1,4 +1,4 @@ -package datawave.webservice.common.extjs; +package datawave.core.common.extjs; import java.util.List; diff --git a/core/connection-pool/pom.xml b/core/connection-pool/pom.xml new file mode 100644 index 00000000000..bbeab199f17 --- /dev/null +++ b/core/connection-pool/pom.xml @@ -0,0 +1,161 @@ + + + 4.0.0 + + gov.nsa.datawave.core + datawave-core-parent + 7.0.0-SNAPSHOT + + datawave-core-connection-pool + ${project.artifactId} + + 2.1.8 + + + + + dnsjava + dnsjava + ${version.dnsjava} + + + + + + com.fasterxml.jackson.module + jackson-module-jaxb-annotations + + + commons-configuration + commons-configuration + + + commons-lang + commons-lang + + + dnsjava + dnsjava + + + gov.nsa.datawave + datawave-in-memory-accumulo + + + gov.nsa.datawave.microservice + accumulo-api + + + junit + junit + + + org.apache.curator + curator-recipes + + + org.apache.curator + curator-test + + + org.easymock + easymock + + + com.fasterxml.woodstox + woodstox-core + provided + + + org.apache.commons + commons-configuration2 + provided + + + org.apache.hadoop.thirdparty + hadoop-shaded-guava + provided + + + org.powermock + powermock-reflect + test + + + + ${project.artifactId} + + + true + src/main/resources + + source-templates/** + + + + + + org.apache.maven.plugins + maven-jar-plugin + + + META-INF/beans.xml + META-INF/jboss-ejb3.xml + + + + + jboss + + jar + + + jboss + + + + + + + maven-resources-plugin + + + copy-templated-sources + + copy-resources + + validate + + ${project.build.directory}/generated-sources/templated-sources + + + src/main/resources/source-templates + true + + + + + + + + org.codehaus.mojo + build-helper-maven-plugin + 3.3.0 + + + add-source + + add-source + + generate-sources + + + target/generated-sources/templated-sources + + + + + + + + diff --git a/core/connection-pool/src/main/java/datawave/core/common/cache/AccumuloTableCache.java b/core/connection-pool/src/main/java/datawave/core/common/cache/AccumuloTableCache.java new file mode 100644 index 00000000000..674dfe3183c --- /dev/null +++ b/core/connection-pool/src/main/java/datawave/core/common/cache/AccumuloTableCache.java @@ -0,0 +1,28 @@ +package datawave.core.common.cache; + +import java.util.List; + +import org.apache.accumulo.core.client.security.tokens.PasswordToken; + +import datawave.accumulo.inmemory.InMemoryInstance; +import datawave.core.common.connection.AccumuloConnectionFactory; +import datawave.core.common.result.TableCacheDescription; + +/** + * Object that caches data from Accumulo tables. + */ +public interface AccumuloTableCache extends AutoCloseable { + + String MOCK_USERNAME = ""; + PasswordToken MOCK_PASSWORD = new PasswordToken(new byte[0]); + + void setConnectionFactory(AccumuloConnectionFactory connectionFactory); + + InMemoryInstance getInstance(); + + void submitReloadTasks(); + + public void reloadTableCache(String tableName); + + public List getTableCaches(); +} diff --git a/web-services/common/src/main/java/datawave/webservice/common/cache/AccumuloTableCache.java b/core/connection-pool/src/main/java/datawave/core/common/cache/AccumuloTableCacheImpl.java similarity index 50% rename from web-services/common/src/main/java/datawave/webservice/common/cache/AccumuloTableCache.java rename to core/connection-pool/src/main/java/datawave/core/common/cache/AccumuloTableCacheImpl.java index 9ffe3cf48b8..582e72c4e17 100644 --- a/web-services/common/src/main/java/datawave/webservice/common/cache/AccumuloTableCache.java +++ b/core/connection-pool/src/main/java/datawave/core/common/cache/AccumuloTableCacheImpl.java @@ -1,4 +1,4 @@ -package datawave.webservice.common.cache; +package datawave.core.common.cache; import java.util.ArrayList; import java.util.Date; @@ -6,125 +6,78 @@ import java.util.List; import java.util.Map; import java.util.Map.Entry; +import java.util.concurrent.ExecutorService; import java.util.concurrent.Future; +import java.util.concurrent.LinkedBlockingDeque; +import java.util.concurrent.ThreadPoolExecutor; +import java.util.concurrent.TimeUnit; -import javax.annotation.PostConstruct; -import javax.annotation.PreDestroy; -import javax.annotation.Resource; -import javax.annotation.security.DeclareRoles; -import javax.annotation.security.RolesAllowed; -import javax.annotation.security.RunAs; -import javax.ejb.EJBException; -import javax.ejb.LocalBean; -import javax.ejb.Lock; -import javax.ejb.LockType; -import javax.ejb.Schedule; -import javax.ejb.Singleton; -import javax.ejb.Startup; -import javax.enterprise.concurrent.ManagedExecutorService; -import javax.inject.Inject; -import javax.interceptor.Interceptors; -import javax.jms.Destination; -import javax.jms.JMSContext; -import javax.ws.rs.GET; -import javax.ws.rs.Path; -import javax.ws.rs.PathParam; -import javax.ws.rs.Produces; - -import org.apache.accumulo.core.client.security.tokens.PasswordToken; import org.apache.curator.framework.CuratorFramework; import org.apache.curator.framework.recipes.shared.SharedCountListener; import org.apache.curator.framework.recipes.shared.SharedCountReader; import org.apache.curator.framework.state.ConnectionState; -import org.apache.deltaspike.core.api.config.ConfigProperty; -import org.apache.deltaspike.core.api.exclude.Exclude; import org.apache.log4j.Logger; -import org.jboss.resteasy.annotations.GZIP; + +import com.google.common.util.concurrent.ThreadFactoryBuilder; import datawave.accumulo.inmemory.InMemoryInstance; -import datawave.annotation.Required; -import datawave.configuration.DatawaveEmbeddedProjectStageHolder; -import datawave.interceptor.RequiredInterceptor; -import datawave.webservice.common.connection.AccumuloConnectionFactory; -import datawave.webservice.common.exception.DatawaveWebApplicationException; -import datawave.webservice.common.result.AccumuloTableCacheStatus; -import datawave.webservice.query.exception.QueryException; -import datawave.webservice.result.VoidResponse; +import datawave.core.common.connection.AccumuloConnectionFactory; +import datawave.core.common.result.TableCacheDescription; /** * Object that caches data from Accumulo tables. */ -@Path("/Common/AccumuloTableCache") -@RunAs("InternalUser") -@RolesAllowed({"AuthorizedUser", "AuthorizedQueryServer", "AuthorizedServer", "InternalUser", "Administrator", "JBossAdministrator"}) -@DeclareRoles({"AuthorizedUser", "AuthorizedQueryServer", "AuthorizedServer", "InternalUser", "Administrator", "JBossAdministrator"}) -@LocalBean -@Startup -// tells the container to initialize on startup -@Singleton -// this is a singleton bean in the container -@Lock(LockType.READ) -@Exclude(ifProjectStage = DatawaveEmbeddedProjectStageHolder.DatawaveEmbedded.class) -public class AccumuloTableCache { - +public class AccumuloTableCacheImpl implements AccumuloTableCache { private final Logger log = Logger.getLogger(this.getClass()); - @Inject - private JMSContext jmsContext; - - @Resource(mappedName = "java:/topic/AccumuloTableCache") - private Destination cacheTopic; - - @Resource - private ManagedExecutorService executorService; - - @Inject - private AccumuloTableCacheConfiguration accumuloTableCacheConfiguration; - - @Inject - @ConfigProperty(name = "dw.cacheCoordinator.evictionReaperIntervalSeconds", defaultValue = "30") - private int evictionReaperIntervalInSeconds; - @Inject - @ConfigProperty(name = "dw.cacheCoordinator.numLocks", defaultValue = "300") - private int numLocks; - @Inject - @ConfigProperty(name = "dw.cacheCoordinator.maxRetries", defaultValue = "10") - private int maxRetries; - - public static final String MOCK_USERNAME = ""; - public static final PasswordToken MOCK_PASSWORD = new PasswordToken(new byte[0]); - + private final ExecutorService executorService; + private final AccumuloTableCacheProperties accumuloTableCacheProperties; private InMemoryInstance instance; private Map details; private List cacheCoordinators; private boolean connectionFactoryProvided = false; - public AccumuloTableCache() { - log.debug("Called AccumuloTableCacheBean and accumuloTableCacheConfiguration = " + accumuloTableCacheConfiguration); + public AccumuloTableCacheImpl(ExecutorService executorService, AccumuloTableCacheProperties accumuloTableCacheProperties) { + log.debug("Called AccumuloTableCacheImpl with accumuloTableCacheConfiguration = " + accumuloTableCacheProperties); + this.executorService = executorService; + this.accumuloTableCacheProperties = accumuloTableCacheProperties; + setup(); + } + + public AccumuloTableCacheImpl(AccumuloTableCacheProperties accumuloTableCacheProperties) { + this(getThreadPoolExecutor(accumuloTableCacheProperties), accumuloTableCacheProperties); } - @PostConstruct - private void setup() { - log.debug("accumuloTableCacheConfiguration was setup as: " + accumuloTableCacheConfiguration); + private static ExecutorService getThreadPoolExecutor(AccumuloTableCacheProperties accumuloTableCacheProperties) { + return new ThreadPoolExecutor(Math.max(accumuloTableCacheProperties.getTableNames().size() / 2, 1), + Math.max(accumuloTableCacheProperties.getTableNames().size(), 1), 5, TimeUnit.MINUTES, new LinkedBlockingDeque<>(), + new ThreadFactoryBuilder().setNameFormat("TableCacheReloader %d").build()); + } + public void setup() { + log.debug("accumuloTableCacheConfiguration was setup as: " + accumuloTableCacheProperties); instance = new InMemoryInstance(); details = new HashMap<>(); cacheCoordinators = new ArrayList<>(); - String zookeepers = accumuloTableCacheConfiguration.getZookeepers(); + String zookeepers = accumuloTableCacheProperties.getZookeepers(); + + for (String tableName : accumuloTableCacheProperties.getTableNames()) { + BaseTableCache detail = new BaseTableCache(); + detail.setTableName(tableName); + detail.setConnectionPoolName(accumuloTableCacheProperties.getPoolName()); + detail.setReloadInterval(accumuloTableCacheProperties.getReloadInterval()); - for (Entry entry : accumuloTableCacheConfiguration.getCaches().entrySet()) { - final String tableName = entry.getKey(); - TableCache detail = entry.getValue(); detail.setInstance(instance); - final SharedCacheCoordinator cacheCoordinator = new SharedCacheCoordinator(tableName, zookeepers, evictionReaperIntervalInSeconds, numLocks, - maxRetries); + final SharedCacheCoordinator cacheCoordinator = new SharedCacheCoordinator(tableName, zookeepers, + accumuloTableCacheProperties.getEvictionReaperIntervalInSeconds(), accumuloTableCacheProperties.getNumLocks(), + accumuloTableCacheProperties.getMaxRetries()); cacheCoordinators.add(cacheCoordinator); try { cacheCoordinator.start(); } catch (Exception e) { - throw new EJBException("Error starting AccumuloTableCache", e); + throw new RuntimeException("Error starting AccumuloTableCache", e); } try { @@ -165,24 +118,24 @@ public void countHasChanged(SharedCountReader sharedCount, int newCount) throws throw new RuntimeException("table:" + tableName + " Unable to create shared counters: " + e.getMessage(), e); } detail.setWatcher(cacheCoordinator); - details.put(entry.getKey(), entry.getValue()); - + details.put(tableName, detail); } } public void setConnectionFactory(AccumuloConnectionFactory connectionFactory) { - for (Entry entry : accumuloTableCacheConfiguration.getCaches().entrySet()) { + for (Entry entry : details.entrySet()) { TableCache detail = entry.getValue(); detail.setConnectionFactory(connectionFactory); } connectionFactoryProvided = true; } + @Override public InMemoryInstance getInstance() { return this.instance; } - @Schedule(hour = "*", minute = "*", second = "1", persistent = false) + @Override public void submitReloadTasks() { if (!connectionFactoryProvided) { log.trace("NOT submitting reload tasks since our connection factory hasn't been provided yet."); @@ -217,8 +170,8 @@ public void submitReloadTasks() { } } - @PreDestroy - public void stop() { + @Override + public void close() { for (Entry entry : details.entrySet()) { Future ref = entry.getValue().getReference(); if (null != ref) @@ -230,47 +183,25 @@ public void stop() { } /** - * JBossAdministrator or Administrator credentials required. + * Reload a table cache * * @param tableName * the name of the table for which the cached version is to be reloaded - * @return datawave.webservice.result.VoidResponse - * @RequestHeader X-ProxiedEntitiesChain use when proxying request for user - * @RequestHeader X-ProxiedIssuersChain required when using X-ProxiedEntitiesChain, specify one issuer DN per subject DN listed in X-ProxiedEntitiesChain - * @RequestHeader query-session-id session id value used for load balancing purposes. query-session-id can be placed in the request in a Cookie header or as - * a query parameter - * @ResponseHeader X-OperationTimeInMS time spent on the server performing the operation, does not account for network or result serialization - * - * @HTTP 200 success - * @HTTP 404 queries not found using {@code id} - * @HTTP 500 internal server error */ - @GET - @Path("/reload/{tableName}") - @Produces({"application/xml", "text/xml", "application/json", "text/yaml", "text/x-yaml", "application/x-yaml", "application/x-protobuf", - "application/x-protostuff"}) - @GZIP - @Interceptors(RequiredInterceptor.class) - public VoidResponse reloadCache(@Required("tableName") @PathParam("tableName") String tableName) { - VoidResponse response = new VoidResponse(); + @Override + public void reloadTableCache(String tableName) { if (null == details.get(tableName)) { - return response; + return; } + log.debug("Reloading table cache for " + tableName); // send an eviction notice to the cluster try { details.get(tableName).getWatcher().incrementCounter(tableName); } catch (Exception e) { - response.addException(new QueryException(e).getBottomQueryException()); - throw new DatawaveWebApplicationException(e, response); - } - try { - this.sendCacheReloadMessage(tableName); - } catch (Exception e) { - log.error("Unable to send message about cache reload"); + throw new RuntimeException(e); } handleReload(tableName); handleReloadTypeMetadata(tableName); - return response; } private void handleReloadTypeMetadata(String tableName) { @@ -291,26 +222,13 @@ private void handleReload(String tableName) { } /** - * JBossAdministrator or Administrator credentials required. - * - * @return datawave.webservice.common.result.AccumuloTableCacheStatus - * @RequestHeader X-ProxiedEntitiesChain use when proxying request for user - * @RequestHeader X-ProxiedIssuersChain required when using X-ProxiedEntitiesChain, specify one issuer DN per subject DN listed in X-ProxiedEntitiesChain - * @RequestHeader query-session-id session id value used for load balancing purposes. query-session-id can be placed in the request in a Cookie header or as - * a query parameter - * @ResponseHeader X-OperationTimeInMS time spent on the server performing the operation, does not account for network or result serialization - * - * @HTTP 200 success + * Get the table caches */ - @GET - @Path("/") - @Produces({"application/xml", "text/xml", "application/json", "text/yaml", "text/x-yaml", "application/x-yaml", "application/x-protobuf", - "application/x-protostuff", "text/html"}) - @GZIP - public AccumuloTableCacheStatus getStatus() { - AccumuloTableCacheStatus response = new AccumuloTableCacheStatus(); + @Override + public List getTableCaches() { + List tableCaches = new ArrayList<>(); for (Entry entry : details.entrySet()) { - datawave.webservice.common.result.TableCache t = new datawave.webservice.common.result.TableCache(); + TableCacheDescription t = new TableCacheDescription(); t.setTableName(entry.getValue().getTableName()); t.setConnectionPoolName(entry.getValue().getConnectionPoolName()); t.setAuthorizations(entry.getValue().getAuths()); @@ -318,14 +236,8 @@ public AccumuloTableCacheStatus getStatus() { t.setMaxRows(entry.getValue().getMaxRows()); t.setLastRefresh(entry.getValue().getLastRefresh()); t.setCurrentlyRefreshing((entry.getValue().getReference() != null)); - response.getCaches().add(t); + tableCaches.add(t); } - return response; - } - - private void sendCacheReloadMessage(String tableName) { - log.warn("table:" + tableName + " sending cache reload message about table " + tableName); - - jmsContext.createProducer().send(cacheTopic, tableName); + return tableCaches; } } diff --git a/core/connection-pool/src/main/java/datawave/core/common/cache/AccumuloTableCacheProperties.java b/core/connection-pool/src/main/java/datawave/core/common/cache/AccumuloTableCacheProperties.java new file mode 100644 index 00000000000..3be5b90b666 --- /dev/null +++ b/core/connection-pool/src/main/java/datawave/core/common/cache/AccumuloTableCacheProperties.java @@ -0,0 +1,129 @@ +package datawave.core.common.cache; + +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.TimeUnit; + +public class AccumuloTableCacheProperties { + private String zookeepers = null; + private List tableNames = new ArrayList<>(); + private String poolName; + private long reloadInterval; + private int evictionReaperIntervalInSeconds; + private int numLocks; + private int maxRetries; + private long tableCacheReloadTaskLease = TimeUnit.MINUTES.toMillis(10); + private TimeUnit tableCacheReloadTaskLeaseTimeUnit = TimeUnit.MILLISECONDS; + + public String getZookeepers() { + return zookeepers; + } + + public AccumuloTableCacheProperties withZookeepers(String zookeepers) { + this.zookeepers = zookeepers; + return this; + } + + public List getTableNames() { + return tableNames; + } + + public AccumuloTableCacheProperties withTableNames(List tableNames) { + this.tableNames = tableNames; + return this; + } + + public String getPoolName() { + return poolName; + } + + public AccumuloTableCacheProperties withPoolName(String poolName) { + this.poolName = poolName; + return this; + } + + public long getReloadInterval() { + return reloadInterval; + } + + public AccumuloTableCacheProperties withReloadInterval(long reloadInterval) { + this.reloadInterval = reloadInterval; + return this; + } + + public int getEvictionReaperIntervalInSeconds() { + return evictionReaperIntervalInSeconds; + } + + public AccumuloTableCacheProperties withEvictionReaperIntervalInSeconds(int evictionReaperIntervalInSeconds) { + this.evictionReaperIntervalInSeconds = evictionReaperIntervalInSeconds; + return this; + } + + public int getNumLocks() { + return numLocks; + } + + public AccumuloTableCacheProperties withNumLocks(int numLocks) { + this.numLocks = numLocks; + return this; + } + + public int getMaxRetries() { + return maxRetries; + } + + public AccumuloTableCacheProperties withMaxRetries(int maxRetries) { + this.maxRetries = maxRetries; + return this; + } + + public void setZookeepers(String zookeepers) { + this.zookeepers = zookeepers; + } + + public void setTableNames(List tableNames) { + this.tableNames = tableNames; + } + + public void setPoolName(String poolName) { + this.poolName = poolName; + } + + public void setReloadInterval(long reloadInterval) { + this.reloadInterval = reloadInterval; + } + + public void setEvictionReaperIntervalInSeconds(int evictionReaperIntervalInSeconds) { + this.evictionReaperIntervalInSeconds = evictionReaperIntervalInSeconds; + } + + public void setNumLocks(int numLocks) { + this.numLocks = numLocks; + } + + public void setMaxRetries(int maxRetries) { + this.maxRetries = maxRetries; + } + + public long getTableCacheReloadTaskLease() { + return tableCacheReloadTaskLease; + } + + public void setTableCacheReloadTaskLease(long tableCacheReloadTaskLease) { + this.tableCacheReloadTaskLease = tableCacheReloadTaskLease; + } + + public long getTableCacheReloadTaskLeaseMillis() { + return tableCacheReloadTaskLeaseTimeUnit.toMillis(tableCacheReloadTaskLease); + } + + public TimeUnit getTableCacheReloadTaskLeaseTimeUnit() { + return tableCacheReloadTaskLeaseTimeUnit; + } + + public void setTableCacheReloadTaskLeaseTimeUnit(TimeUnit tableCacheReloadTaskLeaseTimeUnit) { + this.tableCacheReloadTaskLeaseTimeUnit = tableCacheReloadTaskLeaseTimeUnit; + } + +} diff --git a/web-services/common/src/main/java/datawave/webservice/common/cache/BaseTableCache.java b/core/connection-pool/src/main/java/datawave/core/common/cache/BaseTableCache.java similarity index 96% rename from web-services/common/src/main/java/datawave/webservice/common/cache/BaseTableCache.java rename to core/connection-pool/src/main/java/datawave/core/common/cache/BaseTableCache.java index 2f90cfa77ce..e662b70f4fb 100644 --- a/web-services/common/src/main/java/datawave/webservice/common/cache/BaseTableCache.java +++ b/core/connection-pool/src/main/java/datawave/core/common/cache/BaseTableCache.java @@ -1,4 +1,4 @@ -package datawave.webservice.common.cache; +package datawave.core.common.cache; import java.io.Serializable; import java.util.Date; @@ -33,8 +33,7 @@ import datawave.accumulo.inmemory.InMemoryAccumuloClient; import datawave.accumulo.inmemory.InMemoryInstance; -import datawave.webservice.common.connection.AccumuloConnectionFactory; -import datawave.webservice.common.connection.AccumuloConnectionFactory.Priority; +import datawave.core.common.connection.AccumuloConnectionFactory; import datawave.webservice.common.connection.WrappedAccumuloClient; public class BaseTableCache implements Serializable, TableCache { @@ -171,7 +170,7 @@ public Boolean call() throws Exception { String tempTableName = tableName + "Temp"; try { Map map = connectionFactory.getTrackingMap(Thread.currentThread().getStackTrace()); - accumuloClient = connectionFactory.getClient(connectionPoolName, Priority.ADMIN, map); + accumuloClient = connectionFactory.getClient(null, null, connectionPoolName, AccumuloConnectionFactory.Priority.ADMIN, map); if (accumuloClient instanceof WrappedAccumuloClient) { accumuloClient = ((WrappedAccumuloClient) accumuloClient).getReal(); } diff --git a/web-services/common/src/main/java/datawave/webservice/common/cache/SharedBoolean.java b/core/connection-pool/src/main/java/datawave/core/common/cache/SharedBoolean.java similarity index 98% rename from web-services/common/src/main/java/datawave/webservice/common/cache/SharedBoolean.java rename to core/connection-pool/src/main/java/datawave/core/common/cache/SharedBoolean.java index 1e6bd7b9644..ec2965f76a4 100644 --- a/web-services/common/src/main/java/datawave/webservice/common/cache/SharedBoolean.java +++ b/core/connection-pool/src/main/java/datawave/core/common/cache/SharedBoolean.java @@ -3,7 +3,7 @@ * To change this template file, choose Tools | Templates * and open the template in the editor. */ -package datawave.webservice.common.cache; +package datawave.core.common.cache; import java.io.Closeable; import java.io.IOException; diff --git a/web-services/common/src/main/java/datawave/webservice/common/cache/SharedBooleanListener.java b/core/connection-pool/src/main/java/datawave/core/common/cache/SharedBooleanListener.java similarity index 90% rename from web-services/common/src/main/java/datawave/webservice/common/cache/SharedBooleanListener.java rename to core/connection-pool/src/main/java/datawave/core/common/cache/SharedBooleanListener.java index fa20bd7228b..e9300b7072b 100644 --- a/web-services/common/src/main/java/datawave/webservice/common/cache/SharedBooleanListener.java +++ b/core/connection-pool/src/main/java/datawave/core/common/cache/SharedBooleanListener.java @@ -3,7 +3,7 @@ * To change this template file, choose Tools | Templates * and open the template in the editor. */ -package datawave.webservice.common.cache; +package datawave.core.common.cache; import org.apache.curator.framework.state.ConnectionStateListener; diff --git a/web-services/common/src/main/java/datawave/webservice/common/cache/SharedBooleanReader.java b/core/connection-pool/src/main/java/datawave/core/common/cache/SharedBooleanReader.java similarity index 89% rename from web-services/common/src/main/java/datawave/webservice/common/cache/SharedBooleanReader.java rename to core/connection-pool/src/main/java/datawave/core/common/cache/SharedBooleanReader.java index 6703b03e73b..31721333f42 100644 --- a/web-services/common/src/main/java/datawave/webservice/common/cache/SharedBooleanReader.java +++ b/core/connection-pool/src/main/java/datawave/core/common/cache/SharedBooleanReader.java @@ -3,7 +3,7 @@ * To change this template file, choose Tools | Templates * and open the template in the editor. */ -package datawave.webservice.common.cache; +package datawave.core.common.cache; import org.apache.curator.framework.listen.Listenable; diff --git a/web-services/common/src/main/java/datawave/webservice/common/cache/SharedCacheCoordinator.java b/core/connection-pool/src/main/java/datawave/core/common/cache/SharedCacheCoordinator.java similarity index 97% rename from web-services/common/src/main/java/datawave/webservice/common/cache/SharedCacheCoordinator.java rename to core/connection-pool/src/main/java/datawave/core/common/cache/SharedCacheCoordinator.java index 363da12ef16..ac322004e26 100644 --- a/web-services/common/src/main/java/datawave/webservice/common/cache/SharedCacheCoordinator.java +++ b/core/connection-pool/src/main/java/datawave/core/common/cache/SharedCacheCoordinator.java @@ -1,4 +1,4 @@ -package datawave.webservice.common.cache; +package datawave.core.common.cache; import java.io.IOException; import java.io.Serializable; @@ -11,10 +11,6 @@ import java.util.Timer; import java.util.TimerTask; -import javax.annotation.PostConstruct; -import javax.annotation.PreDestroy; -import javax.inject.Inject; - import org.apache.curator.framework.CuratorFramework; import org.apache.curator.framework.CuratorFrameworkFactory; import org.apache.curator.framework.recipes.cache.ChildData; @@ -31,17 +27,14 @@ import org.apache.curator.framework.state.ConnectionStateListener; import org.apache.curator.retry.BoundedExponentialBackoffRetry; import org.apache.curator.utils.ZKPaths; -import org.apache.deltaspike.core.api.config.ConfigProperty; +import org.apache.log4j.Logger; import org.apache.zookeeper.CreateMode; import org.apache.zookeeper.KeeperException; import org.apache.zookeeper.ZKUtil; import org.apache.zookeeper.data.Stat; -import org.jboss.logging.Logger; import com.google.common.base.Preconditions; -import datawave.common.util.ArgumentChecker; - /** * Coordinates operations on a shared cache. That is, this coordinates operations where an in-memory cache may be running on multiple servers and each in-memory * cache is using a shared backing store (e.g., shared filesystem, Accumulo, etc). There are helper methods to handle distributed locking, notification of @@ -58,7 +51,7 @@ public interface EvictionCallback { private static final String LIVE_SERVERS = "/liveServers"; private static final long EVICT_MESSAGE_TIMEOUT = 60 * 1000L; - private Logger log = Logger.getLogger(getClass()); + private static final Logger log = Logger.getLogger(SharedCacheCoordinator.class); private transient CuratorFramework curatorClient; private String localName; private String serverIdentifierPath; @@ -98,12 +91,7 @@ public interface EvictionCallback { * @param numLocks * number of locks */ - @Inject - public SharedCacheCoordinator(@ConfigProperty(name = "dw.cache.coordinator.namespace") String namespace, - @ConfigProperty(name = "dw.warehouse.zookeepers") String zookeeperConnectionString, - @ConfigProperty(name = "dw.cacheCoordinator.evictionReaperIntervalSeconds", defaultValue = "30") int evictionReaperIntervalInSeconds, - @ConfigProperty(name = "dw.cacheCoordinator.numLocks", defaultValue = "300") int numLocks, - @ConfigProperty(name = "dw.cacheCoordinator.maxRetries", defaultValue = "10") int maxRetries) { + public SharedCacheCoordinator(String namespace, String zookeeperConnectionString, int evictionReaperIntervalInSeconds, int numLocks, int maxRetries) { ArgumentChecker.notNull(namespace, zookeeperConnectionString); locks = new HashMap<>(); @@ -130,7 +118,6 @@ public SharedCacheCoordinator(@ConfigProperty(name = "dw.cache.coordinator.names evictionReaper = new Timer("cache-eviction-reaper-" + namespace, true); } - @PostConstruct public void start() { curatorClient.start(); @@ -284,7 +271,6 @@ private void restartTriStates() { } } - @PreDestroy public void stop() { evictionReaper.cancel(); @@ -748,4 +734,18 @@ protected void reapEvictions() { log.warn("Error cleaning up eviction notices: " + e.getMessage(), e); } } + + public static class ArgumentChecker { + private static final String NULL_ARG_MSG = "argument was null"; + + public static final void notNull(final Object arg1) { + if (arg1 == null) + throw new IllegalArgumentException(NULL_ARG_MSG + ":Is null- arg1? true"); + } + + public static final void notNull(final Object arg1, final Object arg2) { + if (arg1 == null || arg2 == null) + throw new IllegalArgumentException(NULL_ARG_MSG + ":Is null- arg1? " + (arg1 == null) + " arg2? " + (arg2 == null)); + } + } } diff --git a/web-services/common/src/main/java/datawave/webservice/common/cache/SharedTriState.java b/core/connection-pool/src/main/java/datawave/core/common/cache/SharedTriState.java similarity index 99% rename from web-services/common/src/main/java/datawave/webservice/common/cache/SharedTriState.java rename to core/connection-pool/src/main/java/datawave/core/common/cache/SharedTriState.java index f1aa73e7f36..e98be8527ef 100644 --- a/web-services/common/src/main/java/datawave/webservice/common/cache/SharedTriState.java +++ b/core/connection-pool/src/main/java/datawave/core/common/cache/SharedTriState.java @@ -1,4 +1,4 @@ -package datawave.webservice.common.cache; +package datawave.core.common.cache; import java.io.Closeable; import java.io.IOException; diff --git a/web-services/common/src/main/java/datawave/webservice/common/cache/SharedTriStateListener.java b/core/connection-pool/src/main/java/datawave/core/common/cache/SharedTriStateListener.java similarity index 85% rename from web-services/common/src/main/java/datawave/webservice/common/cache/SharedTriStateListener.java rename to core/connection-pool/src/main/java/datawave/core/common/cache/SharedTriStateListener.java index 78ec9012cc7..363277c4db4 100644 --- a/web-services/common/src/main/java/datawave/webservice/common/cache/SharedTriStateListener.java +++ b/core/connection-pool/src/main/java/datawave/core/common/cache/SharedTriStateListener.java @@ -1,4 +1,4 @@ -package datawave.webservice.common.cache; +package datawave.core.common.cache; import org.apache.curator.framework.state.ConnectionStateListener; diff --git a/web-services/common/src/main/java/datawave/webservice/common/cache/SharedTriStateReader.java b/core/connection-pool/src/main/java/datawave/core/common/cache/SharedTriStateReader.java similarity index 82% rename from web-services/common/src/main/java/datawave/webservice/common/cache/SharedTriStateReader.java rename to core/connection-pool/src/main/java/datawave/core/common/cache/SharedTriStateReader.java index 02101aeb2f0..019e28af88d 100644 --- a/web-services/common/src/main/java/datawave/webservice/common/cache/SharedTriStateReader.java +++ b/core/connection-pool/src/main/java/datawave/core/common/cache/SharedTriStateReader.java @@ -1,4 +1,4 @@ -package datawave.webservice.common.cache; +package datawave.core.common.cache; import org.apache.curator.framework.listen.Listenable; diff --git a/web-services/common/src/main/java/datawave/webservice/common/cache/TableCache.java b/core/connection-pool/src/main/java/datawave/core/common/cache/TableCache.java similarity index 90% rename from web-services/common/src/main/java/datawave/webservice/common/cache/TableCache.java rename to core/connection-pool/src/main/java/datawave/core/common/cache/TableCache.java index 808f50726ef..5296146c1d5 100644 --- a/web-services/common/src/main/java/datawave/webservice/common/cache/TableCache.java +++ b/core/connection-pool/src/main/java/datawave/core/common/cache/TableCache.java @@ -1,4 +1,4 @@ -package datawave.webservice.common.cache; +package datawave.core.common.cache; import java.io.Serializable; import java.util.Date; @@ -6,7 +6,7 @@ import java.util.concurrent.Future; import datawave.accumulo.inmemory.InMemoryInstance; -import datawave.webservice.common.connection.AccumuloConnectionFactory; +import datawave.core.common.connection.AccumuloConnectionFactory; public interface TableCache extends Callable, Serializable { diff --git a/web-services/common/src/main/java/datawave/webservice/common/connection/AccumuloClientPool.java b/core/connection-pool/src/main/java/datawave/core/common/connection/AccumuloClientPool.java similarity index 88% rename from web-services/common/src/main/java/datawave/webservice/common/connection/AccumuloClientPool.java rename to core/connection-pool/src/main/java/datawave/core/common/connection/AccumuloClientPool.java index 978880fd932..2032b783f19 100644 --- a/web-services/common/src/main/java/datawave/webservice/common/connection/AccumuloClientPool.java +++ b/core/connection-pool/src/main/java/datawave/core/common/connection/AccumuloClientPool.java @@ -1,4 +1,4 @@ -package datawave.webservice.common.connection; +package datawave.core.common.connection; import java.util.ArrayList; import java.util.Arrays; @@ -34,9 +34,9 @@ public AccumuloClient borrowObject(Map trackingMap) throws Except Long threadId = Thread.currentThread().getId(); AccumuloClient o; try { - trackingMap.put("connection.state.start", Long.valueOf(System.currentTimeMillis()).toString()); - trackingMap.put("state", AccumuloConnectionFactory.State.WAITING.toString()); - trackingMap.put("thread.name", Thread.currentThread().getName()); + trackingMap.put(AccumuloConnectionFactory.START_TIME, Long.valueOf(System.currentTimeMillis()).toString()); + trackingMap.put(AccumuloConnectionFactory.STATE, AccumuloConnectionFactory.State.WAITING.toString()); + trackingMap.put(AccumuloConnectionFactory.THREAD_NAME, Thread.currentThread().getName()); threadToTrackingMapMap.put(threadId, trackingMap); o = super.borrowObject(); log.debug(System.currentTimeMillis() + " thread: " + threadId + " borrowed connector: " + o); @@ -47,8 +47,8 @@ public AccumuloClient borrowObject(Map trackingMap) throws Except // connection being moved from the threadToTrackingMapMap to the connectorToTrackingMapMap if (o != null) { - trackingMap.put("connection.state.start", Long.valueOf(System.currentTimeMillis()).toString()); - trackingMap.put("state", AccumuloConnectionFactory.State.CONNECTED.toString()); + trackingMap.put(AccumuloConnectionFactory.START_TIME, Long.valueOf(System.currentTimeMillis()).toString()); + trackingMap.put(AccumuloConnectionFactory.STATE, AccumuloConnectionFactory.State.CONNECTED.toString()); connectorToTrackingMapMap.put(o, trackingMap); } diff --git a/web-services/common/src/main/java/datawave/webservice/common/connection/AccumuloClientPoolFactory.java b/core/connection-pool/src/main/java/datawave/core/common/connection/AccumuloClientPoolFactory.java similarity index 97% rename from web-services/common/src/main/java/datawave/webservice/common/connection/AccumuloClientPoolFactory.java rename to core/connection-pool/src/main/java/datawave/core/common/connection/AccumuloClientPoolFactory.java index 0b7b246b3b2..bbc192ee193 100644 --- a/web-services/common/src/main/java/datawave/webservice/common/connection/AccumuloClientPoolFactory.java +++ b/core/connection-pool/src/main/java/datawave/core/common/connection/AccumuloClientPoolFactory.java @@ -1,4 +1,4 @@ -package datawave.webservice.common.connection; +package datawave.core.common.connection; import org.apache.accumulo.core.client.Accumulo; import org.apache.accumulo.core.client.AccumuloClient; diff --git a/web-services/common/src/main/java/datawave/webservice/common/connection/AccumuloConnectionFactory.java b/core/connection-pool/src/main/java/datawave/core/common/connection/AccumuloConnectionFactory.java similarity index 52% rename from web-services/common/src/main/java/datawave/webservice/common/connection/AccumuloConnectionFactory.java rename to core/connection-pool/src/main/java/datawave/core/common/connection/AccumuloConnectionFactory.java index 66a652baba9..80d67e6184d 100644 --- a/web-services/common/src/main/java/datawave/webservice/common/connection/AccumuloConnectionFactory.java +++ b/core/connection-pool/src/main/java/datawave/core/common/connection/AccumuloConnectionFactory.java @@ -1,11 +1,26 @@ -package datawave.webservice.common.connection; +package datawave.core.common.connection; +import java.util.Collection; +import java.util.List; import java.util.Map; import org.apache.accumulo.core.client.AccumuloClient; import org.apache.accumulo.core.clientImpl.ClientContext; -public interface AccumuloConnectionFactory { +import datawave.core.common.result.ConnectionPool; +import datawave.webservice.common.connection.WrappedAccumuloClient; + +public interface AccumuloConnectionFactory extends AutoCloseable { + + String USER_DN = "user.dn"; + String PROXY_SERVERS = "proxyServers"; + String REQUEST_LOCATION = "request.location"; + String START_TIME = "connection.state.start"; + String STATE = "state"; + String THREAD_NAME = "thread.name"; + String QUERY_USER = "query.user"; + String QUERY_ID = "query.id"; + String QUERY = "query.query"; enum Priority { @@ -17,17 +32,10 @@ enum State { WAITING, CONNECTED } - /** - * @param poolName - * the name of the pool to query - * @return name of the user used in the connection pools - */ - String getConnectionUserName(String poolName); - /** * Gets a connection from the pool with the assigned priority * - * Deprecated in 2.2.3, use {@link #getClient(Priority, Map)} + * Deprecated in 2.2.3, use {@link #getClient(String, Collection, String, Priority, Map)} * * @param priority * the connection's Priority @@ -35,9 +43,9 @@ enum State { * the tracking map * @return accumulo connection * @throws Exception - * if there are issues + * on failure */ - AccumuloClient getClient(Priority priority, Map trackingMap) throws Exception; + AccumuloClient getClient(String userDN, Collection proxyServers, Priority priority, Map trackingMap) throws Exception; /** * Gets a connection from the named pool with the assigned priority @@ -50,20 +58,47 @@ enum State { * the tracking map * @return Accumulo connection * @throws Exception - * if there are issues + * on failure */ - AccumuloClient getClient(String poolName, Priority priority, Map trackingMap) throws Exception; + AccumuloClient getClient(String userDN, Collection proxyServers, String poolName, Priority priority, Map trackingMap) + throws Exception; /** * Returns the connection to the pool with the associated priority. * * @param client - * The client to return + * The connection to return * @throws Exception - * if there are issues + * on failure */ void returnClient(AccumuloClient client) throws Exception; + /** + * Return a report of the current connection factory usage + */ + String report(); + + /** + * Get a description of the current pools + * + * @return A list of connection pools + */ + List getConnectionPools(); + + /** + * Get the current connection usage percentage. This can be used for balancing purposes. + * + * @return The usage percentage (0 - 100) + */ + int getConnectionUsagePercent(); + + /** + * Get a tracking map to be used in the getConnection calls + * + * @param stackTrace + * The callers stack trace + * @return A map representation + */ Map getTrackingMap(StackTraceElement[] stackTrace); /** diff --git a/core/connection-pool/src/main/java/datawave/core/common/connection/AccumuloConnectionFactoryImpl.java b/core/connection-pool/src/main/java/datawave/core/common/connection/AccumuloConnectionFactoryImpl.java new file mode 100644 index 00000000000..09d117697fd --- /dev/null +++ b/core/connection-pool/src/main/java/datawave/core/common/connection/AccumuloConnectionFactoryImpl.java @@ -0,0 +1,381 @@ +package datawave.core.common.connection; + +import java.text.SimpleDateFormat; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.Date; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; + +import org.apache.accumulo.core.client.AccumuloClient; +import org.apache.accumulo.core.client.admin.SecurityOperations; +import org.apache.accumulo.core.client.security.tokens.PasswordToken; +import org.apache.accumulo.core.util.Pair; +import org.apache.commons.lang.StringUtils; +import org.apache.commons.lang.mutable.MutableInt; +import org.apache.log4j.Logger; + +import datawave.accumulo.inmemory.InMemoryAccumuloClient; +import datawave.core.common.cache.AccumuloTableCache; +import datawave.core.common.result.Connection; +import datawave.core.common.result.ConnectionPool; +import datawave.core.common.result.ConnectionPoolProperties; +import datawave.core.common.result.ConnectionPoolsProperties; +import datawave.webservice.common.connection.WrappedAccumuloClient; + +/** + * An accumulo connection factory + */ +public class AccumuloConnectionFactoryImpl implements AccumuloConnectionFactory { + + private Logger log = Logger.getLogger(this.getClass()); + + private final AccumuloTableCache cache; + private final ConnectionPoolsProperties connectionPoolsConfiguration; + + private Map> pools; + + private String defaultPoolName = null; + + private static AccumuloConnectionFactoryImpl factory = null; + + public static AccumuloConnectionFactory getInstance(AccumuloTableCache cache, ConnectionPoolsProperties config) { + if (factory == null) { + synchronized (AccumuloConnectionFactoryImpl.class) { + if (factory == null) { + setFactory(new AccumuloConnectionFactoryImpl(cache, config)); + } + } + } + return factory; + } + + private AccumuloConnectionFactoryImpl(AccumuloTableCache cache, ConnectionPoolsProperties config) { + this.cache = cache; + this.connectionPoolsConfiguration = config; + log.info("Initializing AccumuloConnectionFactoryImpl with " + config.getDefaultPool() + " and " + config.getPoolNames()); + init(); + } + + public void init() { + this.pools = new HashMap<>(); + + if (this.connectionPoolsConfiguration == null) { + log.error("connectionPoolsConfiguration was null - aborting init()"); + return; + } + HashMap> instances = new HashMap<>(); + this.defaultPoolName = connectionPoolsConfiguration.getDefaultPool(); + for (Entry entry : connectionPoolsConfiguration.getPools().entrySet()) { + Map p = new HashMap<>(); + ConnectionPoolProperties conf = entry.getValue(); + p.put(Priority.ADMIN, createConnectionPool(conf, conf.getAdminPriorityPoolSize())); + p.put(Priority.HIGH, createConnectionPool(conf, conf.getHighPriorityPoolSize())); + p.put(Priority.NORMAL, createConnectionPool(conf, conf.getNormalPriorityPoolSize())); + p.put(Priority.LOW, createConnectionPool(conf, conf.getLowPriorityPoolSize())); + this.pools.put(entry.getKey(), Collections.unmodifiableMap(p)); + try { + setupMockAccumuloUser(conf, p.get(AccumuloConnectionFactory.Priority.NORMAL), instances); + } catch (Exception e) { + log.error("Error configuring mock accumulo user for AccumuloConnectionFactoryBean.", e); + } + + // Initialize the distributed tracing system. This needs to be done once at application startup. Since + // it is tied to Accumulo connections, we do it here in this singleton bean. + String appName = "datawave_ws"; + try { + appName = System.getProperty("app", "datawave_ws"); + } catch (SecurityException e) { + log.warn("Unable to retrieve system property \"app\": " + e.getMessage()); + } + } + + cache.setConnectionFactory(this); + } + + private AccumuloClientPool createConnectionPool(ConnectionPoolProperties conf, int limit) { + AccumuloClientPoolFactory factory = new AccumuloClientPoolFactory(conf.getUsername(), conf.getPassword(), conf.getZookeepers(), conf.getInstance()); + AccumuloClientPool pool = new AccumuloClientPool(factory); + pool.setTestOnBorrow(true); + pool.setTestOnReturn(true); + pool.setMaxTotal(limit); + pool.setMaxIdle(-1); + + try { + pool.addObject(); + } catch (Exception e) { + log.error("Error pre-populating connection pool", e); + } + + return pool; + } + + private void setupMockAccumuloUser(ConnectionPoolProperties conf, AccumuloClientPool pool, HashMap> instances) + throws Exception { + AccumuloClient c = null; + try { + c = pool.borrowObject(new HashMap<>()); + + Pair pair = instances.get(cache.getInstance().getInstanceID()); + String user = "root"; + PasswordToken password = new PasswordToken(new byte[0]); + if (pair != null && user.equals(pair.getFirst())) + password = pair.getSecond(); + SecurityOperations security = cache.getInstance().getConnector(user, password).securityOperations(); + Set users = security.listLocalUsers(); + if (!users.contains(conf.getUsername())) { + security.createLocalUser(conf.getUsername(), new PasswordToken(conf.getPassword())); + security.changeUserAuthorizations(conf.getUsername(), c.securityOperations().getUserAuthorizations(conf.getUsername())); + } else { + PasswordToken newPassword = new PasswordToken(conf.getPassword()); + // If we're changing root's password, and trying to change then keep track of that. If we have multiple instances + // that specify mismatching passwords, then throw an error. + if (user.equals(conf.getUsername())) { + if (pair != null && !newPassword.equals(pair.getSecond())) + throw new IllegalStateException( + "Invalid AccumuloConnectionFactoryBean configuration--multiple pools are configured with different root passwords!"); + instances.put(cache.getInstance().getInstanceID(), new Pair<>(conf.getUsername(), newPassword)); + } + // match root's password on mock to the password on the actual Accumulo instance + security.changeLocalUserPassword(conf.getUsername(), newPassword); + } + } finally { + pool.returnObject(c); + } + } + + private static void setFactory(AccumuloConnectionFactoryImpl factory) { + AccumuloConnectionFactoryImpl.factory = factory; + } + + @Override + public void close() { + synchronized (AccumuloConnectionFactoryImpl.class) { + setFactory(null); + for (Entry> entry : this.pools.entrySet()) { + for (Entry poolEntry : entry.getValue().entrySet()) { + try { + poolEntry.getValue().close(); + } catch (Exception e) { + log.error("Error closing Accumulo Connection Pool: " + e); + } + } + } + } + } + + /** + * Gets a connection from the pool with the assigned priority + * + * Deprecated in 2.2.3, use getConnection(UserContext context, String poolName, Priority priority, {@code Map trackingMap)} + * + * @param priority + * the connection's Priority + * @return accumulo connection + * @throws Exception + */ + @Override + public AccumuloClient getClient(final String userDN, final Collection proxyServers, Priority priority, Map trackingMap) + throws Exception { + return getClient(userDN, proxyServers, defaultPoolName, priority, trackingMap); + } + + /** + * Gets a connection from the named pool with the assigned priority + * + * @param cpn + * the name of the pool to retrieve the connection from + * @param priority + * the priority of the connection + * @param tm + * the tracking map + * @return Accumulo connection + * @throws Exception + */ + @Override + public AccumuloClient getClient(final String userDN, final Collection proxyServers, final String cpn, final Priority priority, + final Map tm) throws Exception { + final Map trackingMap = (tm != null) ? tm : new HashMap<>(); + final String poolName = (cpn != null) ? cpn : defaultPoolName; + + if (!priority.equals(Priority.ADMIN)) { + if (userDN != null) + trackingMap.put(USER_DN, userDN); + if (proxyServers != null) + trackingMap.put(PROXY_SERVERS, StringUtils.join(proxyServers, " -> ")); + } + log.info("Getting pool from " + poolName + " for priority " + priority); + log.info("Pools = " + pools); + log.info("Pools.get(poolName) = " + pools.get(poolName)); + AccumuloClientPool pool = pools.get(poolName).get(priority); + AccumuloClient c = pool.borrowObject(trackingMap); + AccumuloClient mock = new InMemoryAccumuloClient(pool.getFactory().getUsername(), cache.getInstance()); + WrappedAccumuloClient wrappedAccumuloClient = new WrappedAccumuloClient(c, mock); + if (connectionPoolsConfiguration.getClientConfiguration(poolName) != null) { + wrappedAccumuloClient.setClientConfig(connectionPoolsConfiguration.getClientConfiguration(poolName).getConfiguration()); + } + String classLoaderContext = System.getProperty("dw.accumulo.classLoader.context"); + if (classLoaderContext != null) { + wrappedAccumuloClient.setScannerClassLoaderContext(classLoaderContext); + } + String timeout = System.getProperty("dw.accumulo.scan.batch.timeout.seconds"); + if (timeout != null) { + wrappedAccumuloClient.setScanBatchTimeoutSeconds(Long.parseLong(timeout)); + } + return wrappedAccumuloClient; + } + + /** + * Returns the connection to the pool with the associated priority. + * + * @param client + * The connection to return + * @throws Exception + */ + @Override + public void returnClient(AccumuloClient client) throws Exception { + if (client instanceof WrappedAccumuloClient) { + WrappedAccumuloClient wrappedAccumuloClient = (WrappedAccumuloClient) client; + wrappedAccumuloClient.clearScannerClassLoaderContext(); + client = wrappedAccumuloClient.getReal(); + } + for (Entry> entry : this.pools.entrySet()) { + for (Entry poolEntry : entry.getValue().entrySet()) { + if (poolEntry.getValue().connectorCameFromHere(client)) { + poolEntry.getValue().returnObject(client); + log.info("Returning connection to pool " + entry.getKey() + " for priority " + poolEntry.getKey()); + return; + } + } + } + log.info("returnConnection called with connection that did not come from any AccumuloConnectionPool"); + } + + @Override + public String report() { + StringBuilder buf = new StringBuilder(); + for (Entry> entry : this.pools.entrySet()) { + buf.append("**** ").append(entry.getKey()).append(" ****\n"); + buf.append("ADMIN: ").append(entry.getValue().get(Priority.ADMIN)).append("\n"); + buf.append("HIGH: ").append(entry.getValue().get(Priority.HIGH)).append("\n"); + buf.append("NORMAL: ").append(entry.getValue().get(Priority.NORMAL)).append("\n"); + buf.append("LOW: ").append(entry.getValue().get(Priority.LOW)).append("\n"); + } + + return buf.toString(); + } + + /** + * Returns metrics for the AccumuloConnectionFactory + * + * @return list of ConnectionPool (connection pool metrics) + */ + @Override + public List getConnectionPools() { + ArrayList connectionPools = new ArrayList<>(); + + Set exclude = new HashSet<>(); + exclude.add("connection.state.start"); + exclude.add("state"); + exclude.add("request.location"); + + SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss,SSS"); + + for (Entry> entry : this.pools.entrySet()) { + for (Entry entry2 : entry.getValue().entrySet()) { + String poolName = entry.getKey(); + Priority priority = entry2.getKey(); + AccumuloClientPool p = entry2.getValue(); + + Long now = System.currentTimeMillis(); + MutableInt maxActive = new MutableInt(); + MutableInt numActive = new MutableInt(); + MutableInt maxIdle = new MutableInt(); + MutableInt numIdle = new MutableInt(); + MutableInt numWaiting = new MutableInt(); + // getConnectionPoolStats will collect the tracking maps and maxActive, numActive, maxIdle, numIdle while synchronized + // to ensure consistency between the GenericObjectPool and the tracking maps + List> requestingConnectionsMap = p.getConnectionPoolStats(maxActive, numActive, maxIdle, numIdle, numWaiting); + + ConnectionPool poolInfo = new ConnectionPool(); + poolInfo.setPriority(priority.name()); + poolInfo.setMaxActive(maxActive.toInteger()); + poolInfo.setNumActive(numActive.toInteger()); + poolInfo.setNumWaiting(numWaiting.toInteger()); + poolInfo.setMaxIdle(maxIdle.toInteger()); + poolInfo.setNumIdle(numIdle.toInteger()); + poolInfo.setPoolName(poolName); + + List requestingConnections = new ArrayList<>(); + for (Map m : requestingConnectionsMap) { + Connection c = new Connection(); + String state = m.get("state"); + if (state != null) { + c.setState(state); + } + String requestLocation = m.get("request.location"); + if (requestLocation != null) { + c.setRequestLocation(requestLocation); + } + String stateStart = m.get("connection.state.start"); + if (stateStart != null) { + Long stateStartLong = Long.valueOf(stateStart); + c.setTimeInState((now - stateStartLong)); + Date stateStartDate = new Date(stateStartLong); + c.addProperty("connection.state.start", sdf.format(stateStartDate)); + } + for (Entry e : m.entrySet()) { + if (!exclude.contains(e.getKey())) { + c.addProperty(e.getKey(), e.getValue()); + } + } + requestingConnections.add(c); + } + Collections.sort(requestingConnections); + poolInfo.setConnectionRequests(requestingConnections); + connectionPools.add(poolInfo); + } + } + return connectionPools; + } + + @Override + public int getConnectionUsagePercent() { + double maxPercentage = 0.0; + for (Entry> entry : pools.entrySet()) { + for (Entry poolEntry : entry.getValue().entrySet()) { + // Don't include ADMIN priority connections when computing a usage percentage + if (Priority.ADMIN.equals(poolEntry.getKey())) + continue; + + MutableInt maxActive = new MutableInt(); + MutableInt numActive = new MutableInt(); + MutableInt numWaiting = new MutableInt(); + MutableInt unused = new MutableInt(); + poolEntry.getValue().getConnectionPoolStats(maxActive, numActive, unused, unused, numWaiting); + + double percentage = (numActive.doubleValue() + numWaiting.doubleValue()) / maxActive.doubleValue(); + if (percentage > maxPercentage) { + maxPercentage = percentage; + } + } + } + return (int) (maxPercentage * 100); + } + + @Override + public Map getTrackingMap(StackTraceElement[] stackTrace) { + HashMap trackingMap = new HashMap<>(); + if (stackTrace != null) { + StackTraceElement ste = stackTrace[1]; + trackingMap.put(REQUEST_LOCATION, ste.getClassName() + "." + ste.getMethodName() + ":" + ste.getLineNumber()); + } + + return trackingMap; + } +} diff --git a/web-services/common-util/src/main/java/datawave/webservice/common/result/AccumuloTableCacheStatus.java b/core/connection-pool/src/main/java/datawave/core/common/result/AccumuloTableCacheStatus.java similarity index 92% rename from web-services/common-util/src/main/java/datawave/webservice/common/result/AccumuloTableCacheStatus.java rename to core/connection-pool/src/main/java/datawave/core/common/result/AccumuloTableCacheStatus.java index db7bc955185..ac7a0f52798 100644 --- a/web-services/common-util/src/main/java/datawave/webservice/common/result/AccumuloTableCacheStatus.java +++ b/core/connection-pool/src/main/java/datawave/core/common/result/AccumuloTableCacheStatus.java @@ -1,4 +1,4 @@ -package datawave.webservice.common.result; +package datawave.core.common.result; import java.util.LinkedList; import java.util.List; @@ -24,7 +24,7 @@ public class AccumuloTableCacheStatus extends BaseResponse implements HtmlProvid @XmlElementWrapper(name = "TableCaches") @XmlElement(name = "TableCache") - private List caches = new LinkedList<>(); + private List caches = new LinkedList<>(); @Override public String getTitle() { @@ -57,7 +57,7 @@ public String getMainContent() { builder.append("
"); builder.append(""); builder.append(""); - for (TableCache cache : caches) { + for (TableCacheDescription cache : caches) { builder.append(""); builder.append(""); builder.append(""); @@ -72,7 +72,7 @@ public String getMainContent() { return builder.toString(); } - public List getCaches() { + public List getCaches() { return caches; } } diff --git a/web-services/common-util/src/main/java/datawave/webservice/common/result/Connection.java b/core/connection-pool/src/main/java/datawave/core/common/result/Connection.java similarity index 98% rename from web-services/common-util/src/main/java/datawave/webservice/common/result/Connection.java rename to core/connection-pool/src/main/java/datawave/core/common/result/Connection.java index daf21c74e4b..ad234db4cb1 100644 --- a/web-services/common-util/src/main/java/datawave/webservice/common/result/Connection.java +++ b/core/connection-pool/src/main/java/datawave/core/common/result/Connection.java @@ -1,4 +1,4 @@ -package datawave.webservice.common.result; +package datawave.core.common.result; import java.io.Serializable; import java.util.Set; diff --git a/web-services/common-util/src/main/java/datawave/webservice/common/result/ConnectionFactoryResponse.java b/core/connection-pool/src/main/java/datawave/core/common/result/ConnectionFactoryResponse.java similarity index 99% rename from web-services/common-util/src/main/java/datawave/webservice/common/result/ConnectionFactoryResponse.java rename to core/connection-pool/src/main/java/datawave/core/common/result/ConnectionFactoryResponse.java index 12a518b9000..613d79697f8 100644 --- a/web-services/common-util/src/main/java/datawave/webservice/common/result/ConnectionFactoryResponse.java +++ b/core/connection-pool/src/main/java/datawave/core/common/result/ConnectionFactoryResponse.java @@ -1,4 +1,4 @@ -package datawave.webservice.common.result; +package datawave.core.common.result; import java.text.NumberFormat; import java.util.LinkedList; diff --git a/web-services/common-util/src/main/java/datawave/webservice/common/result/ConnectionPool.java b/core/connection-pool/src/main/java/datawave/core/common/result/ConnectionPool.java similarity index 98% rename from web-services/common-util/src/main/java/datawave/webservice/common/result/ConnectionPool.java rename to core/connection-pool/src/main/java/datawave/core/common/result/ConnectionPool.java index d9b66d0623e..fc453a9ef8d 100644 --- a/web-services/common-util/src/main/java/datawave/webservice/common/result/ConnectionPool.java +++ b/core/connection-pool/src/main/java/datawave/core/common/result/ConnectionPool.java @@ -1,4 +1,4 @@ -package datawave.webservice.common.result; +package datawave.core.common.result; import java.io.Serializable; import java.util.List; diff --git a/core/connection-pool/src/main/java/datawave/core/common/result/ConnectionPoolClientProperties.java b/core/connection-pool/src/main/java/datawave/core/common/result/ConnectionPoolClientProperties.java new file mode 100644 index 00000000000..7bd1a77136d --- /dev/null +++ b/core/connection-pool/src/main/java/datawave/core/common/result/ConnectionPoolClientProperties.java @@ -0,0 +1,25 @@ +package datawave.core.common.result; + +import org.apache.log4j.Logger; + +import datawave.webservice.common.connection.AccumuloClientConfiguration; + +/** + * The configuration for the connection pool clients of the form derived from properties as follows: + * + * dw.{pool}.client.{tableName}.consistency = IMMEDIATE|EVENTUAL dw.{pool}.client.{tableName}.{hintName} = {hintValue} + * + */ +public class ConnectionPoolClientProperties { + + private static final Logger log = Logger.getLogger(ConnectionPoolClientProperties.class); + protected AccumuloClientConfiguration config = new AccumuloClientConfiguration(); + + public AccumuloClientConfiguration getConfiguration() { + return config; + } + + public void setConfiguration(AccumuloClientConfiguration config) { + this.config = config; + } +} diff --git a/core/connection-pool/src/main/java/datawave/core/common/result/ConnectionPoolProperties.java b/core/connection-pool/src/main/java/datawave/core/common/result/ConnectionPoolProperties.java new file mode 100644 index 00000000000..980a0a75af3 --- /dev/null +++ b/core/connection-pool/src/main/java/datawave/core/common/result/ConnectionPoolProperties.java @@ -0,0 +1,77 @@ +package datawave.core.common.result; + +public class ConnectionPoolProperties { + protected String username; + protected String password; + protected String instance; + protected String zookeepers; + protected int lowPriorityPoolSize; + protected int normalPriorityPoolSize; + protected int highPriorityPoolSize; + protected int adminPriorityPoolSize; + + public String getUsername() { + return username; + } + + public String getPassword() { + return password; + } + + public String getInstance() { + return instance; + } + + public String getZookeepers() { + return zookeepers; + } + + public int getLowPriorityPoolSize() { + return lowPriorityPoolSize; + } + + public int getNormalPriorityPoolSize() { + return normalPriorityPoolSize; + } + + public int getHighPriorityPoolSize() { + return highPriorityPoolSize; + } + + public int getAdminPriorityPoolSize() { + return adminPriorityPoolSize; + } + + public void setUsername(String username) { + this.username = username; + } + + public void setPassword(String password) { + this.password = password; + } + + public void setInstance(String instance) { + this.instance = instance; + } + + public void setZookeepers(String zookeepers) { + this.zookeepers = zookeepers; + } + + public void setLowPriorityPoolSize(int lowPriorityPoolSize) { + this.lowPriorityPoolSize = lowPriorityPoolSize; + } + + public void setNormalPriorityPoolSize(int normalPriorityPoolSize) { + this.normalPriorityPoolSize = normalPriorityPoolSize; + } + + public void setHighPriorityPoolSize(int highPriorityPoolSize) { + this.highPriorityPoolSize = highPriorityPoolSize; + } + + public void setAdminPriorityPoolSize(int adminPriorityPoolSize) { + this.adminPriorityPoolSize = adminPriorityPoolSize; + } + +} diff --git a/core/connection-pool/src/main/java/datawave/core/common/result/ConnectionPoolsProperties.java b/core/connection-pool/src/main/java/datawave/core/common/result/ConnectionPoolsProperties.java new file mode 100644 index 00000000000..d57fc39d3d8 --- /dev/null +++ b/core/connection-pool/src/main/java/datawave/core/common/result/ConnectionPoolsProperties.java @@ -0,0 +1,49 @@ +package datawave.core.common.result; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class ConnectionPoolsProperties { + protected String defaultPool; + protected Map pools = new HashMap<>(); + protected Map configs = new HashMap<>(); + + public String getDefaultPool() { + return defaultPool; + } + + public Map getPools() { + return Collections.unmodifiableMap(pools); + } + + public ConnectionPoolProperties getConfiguration(String pool) { + return pools.get(pool); + } + + public List getPoolNames() { + return Collections.unmodifiableList(new ArrayList<>(pools.keySet())); + } + + public Map getClientConfiguration() { + return Collections.unmodifiableMap(configs); + } + + public ConnectionPoolClientProperties getClientConfiguration(String pool) { + return configs.get(pool); + } + + public void setDefaultPool(String defaultPool) { + this.defaultPool = defaultPool; + } + + public void setPools(Map pools) { + this.pools = pools; + } + + public void setClientConfiguration(Map configs) { + this.configs = configs; + } +} diff --git a/web-services/common-util/src/main/java/datawave/webservice/common/result/ConnectionProperty.java b/core/connection-pool/src/main/java/datawave/core/common/result/ConnectionProperty.java similarity index 97% rename from web-services/common-util/src/main/java/datawave/webservice/common/result/ConnectionProperty.java rename to core/connection-pool/src/main/java/datawave/core/common/result/ConnectionProperty.java index eb09d1bca66..937aeea7e07 100644 --- a/web-services/common-util/src/main/java/datawave/webservice/common/result/ConnectionProperty.java +++ b/core/connection-pool/src/main/java/datawave/core/common/result/ConnectionProperty.java @@ -1,4 +1,4 @@ -package datawave.webservice.common.result; +package datawave.core.common.result; import java.io.Serializable; diff --git a/web-services/common-util/src/main/java/datawave/webservice/common/result/TableCache.java b/core/connection-pool/src/main/java/datawave/core/common/result/TableCacheDescription.java similarity index 95% rename from web-services/common-util/src/main/java/datawave/webservice/common/result/TableCache.java rename to core/connection-pool/src/main/java/datawave/core/common/result/TableCacheDescription.java index bf2502a8a32..ced34faed6c 100644 --- a/web-services/common-util/src/main/java/datawave/webservice/common/result/TableCache.java +++ b/core/connection-pool/src/main/java/datawave/core/common/result/TableCacheDescription.java @@ -1,4 +1,4 @@ -package datawave.webservice.common.result; +package datawave.core.common.result; import java.io.Serializable; import java.util.Date; @@ -10,8 +10,7 @@ @XmlRootElement @XmlAccessorType(XmlAccessType.NONE) -public class TableCache implements Serializable { - +public class TableCacheDescription implements Serializable { private static final long serialVersionUID = 1L; @XmlAttribute diff --git a/core/connection-pool/src/main/java/datawave/core/query/runner/AccumuloConnectionRequestMap.java b/core/connection-pool/src/main/java/datawave/core/query/runner/AccumuloConnectionRequestMap.java new file mode 100644 index 00000000000..6e346f431d6 --- /dev/null +++ b/core/connection-pool/src/main/java/datawave/core/query/runner/AccumuloConnectionRequestMap.java @@ -0,0 +1,105 @@ +package datawave.core.query.runner; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; + +import org.apache.accumulo.core.util.Pair; +import org.apache.log4j.Logger; + +import datawave.core.common.connection.AccumuloConnectionFactory; + +/** + * For storing a map of queryId to Thread that is requesting an AccumuloConnection + */ +public class AccumuloConnectionRequestMap { + + private static Logger log = Logger.getLogger(AccumuloConnectionRequestMap.class); + + /** + * This maps the query-id to a pair containing the tracking map (see the AccumuloConnectionFactory) and the thread handling the request + */ + private Map,Thread>>> connectionThreadMap = new HashMap<>(); + + public boolean cancelConnectionRequest(String id, String userDn) { + // this call checks that the Principal used for the connection request and the connection cancel are the same + // if query is waiting for an accumulo connection in create or reset, then interrupt it + boolean connectionRequestCanceled = false; + synchronized (connectionThreadMap) { + List,Thread>> connectionRequestPairs = connectionThreadMap.get(id); + if (connectionRequestPairs != null) { + for (Pair,Thread> connectionRequestPair : connectionRequestPairs) { + try { + if (connectionRequestPair != null && connectionRequestPair.getFirst() != null) { + String connectionRequestPrincipalName = connectionRequestPair.getFirst().get(AccumuloConnectionFactory.USER_DN); + String connectionCancelPrincipalName = userDn; + if (connectionRequestPrincipalName.equals(connectionCancelPrincipalName)) { + connectionRequestPair.getSecond().interrupt(); + connectionRequestCanceled = true; + } + } + } catch (Exception e) { + log.error(e.getMessage(), e); + } + } + } + } + return connectionRequestCanceled; + } + + public boolean adminCancelConnectionRequest(String id) { + // it is assumed that admin status is already checked, so this call does not check the calling Principals + // if query is waiting for an accumulo connection in create or reset, then interrupt it + boolean connectionRequestCanceled = false; + List,Thread>> connectionRequestPairs = connectionThreadMap.get(id); + if (connectionRequestPairs != null) { + for (Pair,Thread> connectionRequestPair : connectionRequestPairs) { + try { + if (connectionRequestPair != null && connectionRequestPair.getFirst() != null) { + connectionRequestPair.getSecond().interrupt(); + connectionRequestCanceled = true; + } + } catch (Exception e) { + log.error(e.getMessage(), e); + } + } + } + + return connectionRequestCanceled; + } + + public void requestBegin(String id, String userDN, Map trackingMap) { + synchronized (connectionThreadMap) { + List,Thread>> connectionRequestPairs = connectionThreadMap.get(id); + if (connectionRequestPairs == null) { + connectionRequestPairs = new ArrayList<>(); + connectionThreadMap.put(id, connectionRequestPairs); + } + Pair,Thread> connectionRequestPair = new Pair<>(trackingMap, Thread.currentThread()); + if (userDN != null && trackingMap != null) + trackingMap.put(AccumuloConnectionFactory.USER_DN, userDN); + connectionRequestPairs.add(connectionRequestPair); + } + } + + public void requestEnd(String id) { + synchronized (connectionThreadMap) { + List,Thread>> connectionRequestPairs = connectionThreadMap.get(id); + Thread t = Thread.currentThread(); + Iterator,Thread>> it = connectionRequestPairs.iterator(); + boolean found = false; + while (!found && it.hasNext()) { + Pair,Thread> connectionRequestPair = it.next(); + if (connectionRequestPair.getSecond().equals(t)) { + it.remove(); + found = true; + } + } + if (connectionRequestPairs.isEmpty()) { + connectionThreadMap.remove(id); + } + } + } +} diff --git a/core/connection-pool/src/main/resources/META-INF/beans.xml b/core/connection-pool/src/main/resources/META-INF/beans.xml new file mode 100644 index 00000000000..4ca201f8ff2 --- /dev/null +++ b/core/connection-pool/src/main/resources/META-INF/beans.xml @@ -0,0 +1,9 @@ + + + + \ No newline at end of file diff --git a/core/connection-pool/src/main/resources/META-INF/jboss-ejb3.xml b/core/connection-pool/src/main/resources/META-INF/jboss-ejb3.xml new file mode 100644 index 00000000000..8cf49db8c87 --- /dev/null +++ b/core/connection-pool/src/main/resources/META-INF/jboss-ejb3.xml @@ -0,0 +1,16 @@ + + + + + + + * + datawave + + + + \ No newline at end of file diff --git a/web-services/security/src/main/resources/source-templates/datawave/security/cache/package-info.java b/core/connection-pool/src/main/resources/source-templates/datawave/core/common/result/package-info.java similarity index 76% rename from web-services/security/src/main/resources/source-templates/datawave/security/cache/package-info.java rename to core/connection-pool/src/main/resources/source-templates/datawave/core/common/result/package-info.java index c364a3b87ec..366079fbbb5 100644 --- a/web-services/security/src/main/resources/source-templates/datawave/security/cache/package-info.java +++ b/core/connection-pool/src/main/resources/source-templates/datawave/core/common/result/package-info.java @@ -1,7 +1,6 @@ @XmlSchema(namespace="${datawave.webservice.namespace}", elementFormDefault=XmlNsForm.QUALIFIED, xmlns={@XmlNs(prefix = "", namespaceURI = "${datawave.webservice.namespace}")}) -package datawave.security.cache; +package datawave.core.common.result; import javax.xml.bind.annotation.XmlNs; import javax.xml.bind.annotation.XmlNsForm; -import javax.xml.bind.annotation.XmlSchema; - +import javax.xml.bind.annotation.XmlSchema; \ No newline at end of file diff --git a/web-services/common/src/test/java/datawave/webservice/common/cache/SharedCacheCoordinatorTest.java b/core/connection-pool/src/test/java/datawave/core/common/cache/SharedCacheCoordinatorTest.java similarity index 96% rename from web-services/common/src/test/java/datawave/webservice/common/cache/SharedCacheCoordinatorTest.java rename to core/connection-pool/src/test/java/datawave/core/common/cache/SharedCacheCoordinatorTest.java index f38395e070a..d466442cab6 100644 --- a/web-services/common/src/test/java/datawave/webservice/common/cache/SharedCacheCoordinatorTest.java +++ b/core/connection-pool/src/test/java/datawave/core/common/cache/SharedCacheCoordinatorTest.java @@ -1,4 +1,4 @@ -package datawave.webservice.common.cache; +package datawave.core.common.cache; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; @@ -124,8 +124,9 @@ public void stateChanged(CuratorFramework client, ConnectionState newState) { break; Thread.sleep(200L); } - assertEquals("Client never reconnected.", ConnectionState.RECONNECTED, state[0]); + // unfortunately curator does not always propogate the RECONNECTED state to the listener + // assertEquals("Client never reconnected.", ConnectionState.RECONNECTED, state[0]); newCount = 42; oldCount = counter.getVersionedValue(); counter.trySetCount(oldCount, newCount); @@ -181,7 +182,7 @@ public void stateChanged(CuratorFramework client, ConnectionState newState) {} testingZooKeeperServer.kill(); - for (int i = 0; i < 15; ++i) { + for (int i = 0; i < 20; ++i) { if (ConnectionState.LOST.equals(state[0])) break; Thread.sleep(3000L); @@ -190,7 +191,7 @@ public void stateChanged(CuratorFramework client, ConnectionState newState) {} testingZooKeeperServer.restart(); - for (int i = 0; i < 15; ++i) { + for (int i = 0; i < 20; ++i) { if (ConnectionState.RECONNECTED.equals(state[0])) break; Thread.sleep(3000L); diff --git a/web-services/common/src/test/java/datawave/webservice/common/connection/AccumuloConnectionFactoryTest.java b/core/connection-pool/src/test/java/datawave/core/common/connection/AccumuloConnectionFactoryTest.java similarity index 68% rename from web-services/common/src/test/java/datawave/webservice/common/connection/AccumuloConnectionFactoryTest.java rename to core/connection-pool/src/test/java/datawave/core/common/connection/AccumuloConnectionFactoryTest.java index 3d516b01f5b..8d891c689e7 100644 --- a/web-services/common/src/test/java/datawave/webservice/common/connection/AccumuloConnectionFactoryTest.java +++ b/core/connection-pool/src/test/java/datawave/core/common/connection/AccumuloConnectionFactoryTest.java @@ -1,6 +1,5 @@ -package datawave.webservice.common.connection; +package datawave.core.common.connection; -import static org.easymock.MockType.STRICT; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; @@ -14,10 +13,12 @@ import org.apache.accumulo.core.client.AccumuloClient; import org.apache.commons.pool2.PooledObject; import org.apache.commons.pool2.impl.DefaultPooledObject; +import org.apache.log4j.Logger; import org.easymock.EasyMock; import org.easymock.EasyMockRunner; import org.easymock.EasyMockSupport; import org.easymock.Mock; +import org.easymock.MockType; import org.easymock.TestSubject; import org.junit.After; import org.junit.Before; @@ -25,30 +26,27 @@ import org.junit.runner.RunWith; import org.powermock.reflect.Whitebox; -import com.google.common.collect.Lists; - import datawave.accumulo.inmemory.InMemoryInstance; -import datawave.webservice.common.cache.AccumuloTableCache; -import datawave.webservice.common.connection.AccumuloConnectionFactory.Priority; -import datawave.webservice.common.connection.config.ConnectionPoolConfiguration; -import datawave.webservice.common.connection.config.ConnectionPoolsConfiguration; +import datawave.core.common.cache.AccumuloTableCache; +import datawave.core.common.result.ConnectionPoolProperties; +import datawave.core.common.result.ConnectionPoolsProperties; +import datawave.webservice.common.connection.WrappedAccumuloClient; @RunWith(EasyMockRunner.class) public class AccumuloConnectionFactoryTest extends EasyMockSupport { - @TestSubject - private AccumuloConnectionFactoryBean bean = createMockBuilder(AccumuloConnectionFactoryBean.class) - .addMockedMethods("getCurrentUserDN", "getCurrentProxyServers").createStrictMock(); - - @Mock(type = STRICT) + @Mock(type = MockType.STRICT) private AccumuloTableCache cache; private InMemoryInstance instance = new InMemoryInstance(); - @Mock(type = STRICT) + @TestSubject + private AccumuloConnectionFactoryImpl factory = createMockBuilder(AccumuloConnectionFactoryImpl.class).createStrictMock(); + + @Mock(type = MockType.STRICT) private WrappedAccumuloClient warehouseClient; - @Mock(type = STRICT) + @Mock(type = MockType.STRICT) private WrappedAccumuloClient metricsClient; @Before @@ -63,19 +61,17 @@ public void setup() throws Exception { warehouseFactory.setClient(warehouseClient); metricsFactory.setClient(metricsClient); - Map configs = new HashMap<>(); + Map configs = new HashMap<>(); configs.put("WAREHOUSE", null); configs.put("METRICS", null); - ConnectionPoolsConfiguration conf = new ConnectionPoolsConfiguration(); + ConnectionPoolsProperties conf = new ConnectionPoolsProperties(); Whitebox.setInternalState(conf, "defaultPool", "WAREHOUSE"); - Whitebox.setInternalState(conf, "poolNames", Lists.newArrayList("WAREHOUSE", "METRICS")); Whitebox.setInternalState(conf, "pools", configs); - String defaultPoolName = conf.getDefaultPool(); - HashMap> pools = new HashMap<>(); + HashMap> pools = new HashMap<>(); MyAccumuloClientPool warehousePool = new MyAccumuloClientPool(warehouseFactory); MyAccumuloClientPool metricsPool = new MyAccumuloClientPool(metricsFactory); - for (Entry entry : conf.getPools().entrySet()) { + for (Entry entry : conf.getPools().entrySet()) { AccumuloClientPool acp = null; switch (entry.getKey()) { case "METRICS": @@ -87,16 +83,18 @@ public void setup() throws Exception { default: fail("Unknown pool name " + entry.getKey()); } - Map p = new HashMap<>(); - p.put(Priority.ADMIN, acp); - p.put(Priority.HIGH, acp); - p.put(Priority.NORMAL, acp); - p.put(Priority.LOW, acp); + Map p = new HashMap<>(); + p.put(AccumuloConnectionFactory.Priority.ADMIN, acp); + p.put(AccumuloConnectionFactory.Priority.HIGH, acp); + p.put(AccumuloConnectionFactory.Priority.NORMAL, acp); + p.put(AccumuloConnectionFactory.Priority.LOW, acp); pools.put(entry.getKey(), Collections.unmodifiableMap(p)); } - Whitebox.setInternalState(bean, ConnectionPoolsConfiguration.class, conf); - Whitebox.setInternalState(bean, "defaultPoolName", defaultPoolName); - Whitebox.setInternalState(bean, "pools", pools); + Whitebox.setInternalState(factory, "log", Logger.getLogger(AccumuloConnectionFactoryImpl.class)); + Whitebox.setInternalState(factory, ConnectionPoolsProperties.class, conf); + Whitebox.setInternalState(factory, "defaultPoolName", conf.getDefaultPool()); + Whitebox.setInternalState(factory, "pools", pools); + Whitebox.setInternalState(factory, "cache", cache); } @After @@ -108,10 +106,8 @@ public void cleanup() { public void testGetConnection() throws Exception { resetAll(); EasyMock.expect(cache.getInstance()).andReturn(instance); - EasyMock.expect(bean.getCurrentUserDN()).andReturn(null); - EasyMock.expect(bean.getCurrentProxyServers()).andReturn(null); replayAll(); - AccumuloClient con = bean.getClient(Priority.HIGH, new HashMap<>()); + AccumuloClient con = factory.getClient(null, null, AccumuloConnectionFactory.Priority.HIGH, new HashMap<>()); verifyAll(); assertNotNull(con); assertEquals(warehouseClient, ((WrappedAccumuloClient) con).getReal()); @@ -122,10 +118,8 @@ public void testGetConnection() throws Exception { public void testGetWarehouseConnection() throws Exception { resetAll(); EasyMock.expect(cache.getInstance()).andReturn(new InMemoryInstance()); - EasyMock.expect(bean.getCurrentUserDN()).andReturn(null); - EasyMock.expect(bean.getCurrentProxyServers()).andReturn(null); replayAll(); - AccumuloClient con = bean.getClient("WAREHOUSE", Priority.HIGH, new HashMap<>()); + AccumuloClient con = factory.getClient(null, null, "WAREHOUSE", AccumuloConnectionFactory.Priority.HIGH, new HashMap<>()); verifyAll(); assertNotNull(con); assertEquals(warehouseClient, ((WrappedAccumuloClient) con).getReal()); @@ -136,10 +130,8 @@ public void testGetContextConnection() throws Exception { System.setProperty("dw.accumulo.classLoader.context", "alternateContext"); resetAll(); EasyMock.expect(cache.getInstance()).andReturn(new InMemoryInstance()); - EasyMock.expect(bean.getCurrentUserDN()).andReturn(null); - EasyMock.expect(bean.getCurrentProxyServers()).andReturn(null); replayAll(); - AccumuloClient con = bean.getClient("WAREHOUSE", Priority.HIGH, new HashMap<>()); + AccumuloClient con = factory.getClient(null, null, "WAREHOUSE", AccumuloConnectionFactory.Priority.HIGH, new HashMap<>()); verifyAll(); assertNotNull(con); assertEquals(warehouseClient, ((WrappedAccumuloClient) con).getReal()); @@ -150,10 +142,8 @@ public void testGetContextConnection() throws Exception { public void testGetMetricsConnection() throws Exception { resetAll(); EasyMock.expect(cache.getInstance()).andReturn(new InMemoryInstance()); - EasyMock.expect(bean.getCurrentUserDN()).andReturn(null); - EasyMock.expect(bean.getCurrentProxyServers()).andReturn(null); replayAll(); - AccumuloClient con = bean.getClient("METRICS", Priority.HIGH, new HashMap<>()); + AccumuloClient con = factory.getClient(null, null, "METRICS", AccumuloConnectionFactory.Priority.HIGH, new HashMap<>()); verifyAll(); assertNotNull(con); assertEquals(metricsClient, ((WrappedAccumuloClient) con).getReal()); diff --git a/web-services/common/src/test/java/datawave/webservice/common/curator/TestSharedCacheCoordinator.java b/core/connection-pool/src/test/java/datawave/core/common/curator/TestSharedCacheCoordinator.java similarity index 98% rename from web-services/common/src/test/java/datawave/webservice/common/curator/TestSharedCacheCoordinator.java rename to core/connection-pool/src/test/java/datawave/core/common/curator/TestSharedCacheCoordinator.java index 33386d31cf6..8d7a7d7dea6 100644 --- a/web-services/common/src/test/java/datawave/webservice/common/curator/TestSharedCacheCoordinator.java +++ b/core/connection-pool/src/test/java/datawave/core/common/curator/TestSharedCacheCoordinator.java @@ -1,4 +1,4 @@ -package datawave.webservice.common.curator; +package datawave.core.common.curator; import java.io.IOException; import java.io.Serializable; @@ -23,16 +23,16 @@ import org.apache.curator.framework.recipes.shared.SharedCountListener; import org.apache.curator.retry.ExponentialBackoffRetry; import org.apache.curator.utils.ZKPaths; +import org.apache.log4j.Logger; import org.apache.zookeeper.CreateMode; import org.apache.zookeeper.ZKUtil; import org.apache.zookeeper.data.Stat; -import org.jboss.logging.Logger; import com.google.common.base.Preconditions; -import datawave.common.util.ArgumentChecker; -import datawave.webservice.common.cache.SharedBoolean; -import datawave.webservice.common.cache.SharedBooleanListener; +import datawave.core.common.cache.SharedBoolean; +import datawave.core.common.cache.SharedBooleanListener; +import datawave.core.common.cache.SharedCacheCoordinator.ArgumentChecker; /** * Coordinates operations on a shared cache. That is, this coordinates operations where an in-memory cache may be running on multiple servers and each in-memory diff --git a/web-services/common-util/src/test/java/datawave/common/result/ConnectionPoolTest.java b/core/connection-pool/src/test/java/datawave/core/common/result/ConnectionPoolTest.java similarity index 94% rename from web-services/common-util/src/test/java/datawave/common/result/ConnectionPoolTest.java rename to core/connection-pool/src/test/java/datawave/core/common/result/ConnectionPoolTest.java index 33af8b795f0..6c5171908ac 100644 --- a/web-services/common-util/src/test/java/datawave/common/result/ConnectionPoolTest.java +++ b/core/connection-pool/src/test/java/datawave/core/common/result/ConnectionPoolTest.java @@ -1,4 +1,4 @@ -package datawave.common.result; +package datawave.core.common.result; import java.util.Iterator; import java.util.LinkedList; @@ -9,8 +9,7 @@ import org.junit.Before; import org.junit.Test; -import datawave.webservice.common.result.ConnectionPool; -import datawave.webservice.common.result.ConnectionPool.Priority; +import datawave.core.common.result.ConnectionPool.Priority; /** * diff --git a/core/connection-pool/src/test/resources/log4j.properties b/core/connection-pool/src/test/resources/log4j.properties new file mode 100644 index 00000000000..cacd01b436c --- /dev/null +++ b/core/connection-pool/src/test/resources/log4j.properties @@ -0,0 +1,6 @@ +log4j.rootCategory=INFO, CONSOLE + +log4j.appender.CONSOLE=org.apache.log4j.ConsoleAppender +log4j.appender.CONSOLE.Threshold=INFO +log4j.appender.CONSOLE.layout=org.apache.log4j.PatternLayout +log4j.appender.CONSOLE.layout.ConversionPattern=%-5p [%C{1}:%M] %m%n diff --git a/core/in-memory-accumulo b/core/in-memory-accumulo index 7cc068bb7e0..ab81e784581 160000 --- a/core/in-memory-accumulo +++ b/core/in-memory-accumulo @@ -1 +1 @@ -Subproject commit 7cc068bb7e0f09c1ba9f2e859828998f581b134d +Subproject commit ab81e784581d6cef04622fbf8f9a689d5aa4b616 diff --git a/core/map-reduce/pom.xml b/core/map-reduce/pom.xml new file mode 100644 index 00000000000..998c9ff23da --- /dev/null +++ b/core/map-reduce/pom.xml @@ -0,0 +1,37 @@ + + + 4.0.0 + + gov.nsa.datawave.core + datawave-core-parent + 7.0.0-SNAPSHOT + + datawave-core-map-reduce + ${project.artifactId} + + + gov.nsa.datawave.core + datawave-core-common-util + ${project.version} + + + gov.nsa.datawave.core + datawave-core-query + ${project.version} + + + gov.nsa.datawave.microservice + mapreduce-query-api + + + gov.nsa.datawave.webservices + datawave-ws-client + ${project.version} + + + org.jboss.resteasy + resteasy-jaxrs + provided + + + diff --git a/core/map-reduce/src/main/java/datawave/core/mapreduce/bulkresults/map/ApplicationContextAwareMapper.java b/core/map-reduce/src/main/java/datawave/core/mapreduce/bulkresults/map/ApplicationContextAwareMapper.java new file mode 100644 index 00000000000..5a451a0b437 --- /dev/null +++ b/core/map-reduce/src/main/java/datawave/core/mapreduce/bulkresults/map/ApplicationContextAwareMapper.java @@ -0,0 +1,64 @@ +package datawave.core.mapreduce.bulkresults.map; + +import java.io.IOException; + +import org.apache.hadoop.mapreduce.Mapper; +import org.apache.log4j.Logger; +import org.springframework.context.ApplicationContext; +import org.springframework.context.annotation.AnnotationConfigApplicationContext; +import org.springframework.context.support.ClassPathXmlApplicationContext; +import org.springframework.core.io.ClassPathResource; +import org.springframework.core.io.support.ResourcePropertySource; + +public class ApplicationContextAwareMapper extends Mapper { + + private static Logger log = Logger.getLogger(ApplicationContextAwareMapper.class); + + public static final String SPRING_CONFIG_LOCATIONS = "spring.config.locations"; + public static final String SPRING_CONFIG_BASE_PACKAGES = "spring.config.base-packages"; + public static final String SPRING_CONFIG_STARTING_CLASS = "spring.config.starting-class"; + + protected ApplicationContext applicationContext; + + /** + * Create a Spring Application Context + * + * @param contextPath + * is a possibly CSV of spring config file locations + * @param basePackages + * is a possibly CSV of base packages to scan + * @param startingClass + * the annotated starting class to be processes + */ + protected void setApplicationContext(String contextPath, String basePackages, String startingClass) { + AnnotationConfigApplicationContext annotationApplicationContext = new AnnotationConfigApplicationContext(); + + try { + annotationApplicationContext.getEnvironment().getPropertySources() + .addLast(new ResourcePropertySource(new ClassPathResource("application.properties"))); + } catch (IOException e) { + log.error("application.properties could not be loaded", e); + throw new RuntimeException(e); + } + + if (basePackages != null && !basePackages.isEmpty()) { + annotationApplicationContext.scan(basePackages.split(",")); + } + + if (startingClass != null && !startingClass.isEmpty()) { + try { + annotationApplicationContext.register(Class.forName(startingClass)); + } catch (ClassNotFoundException e) { + throw new RuntimeException("Could not find starting class: " + startingClass, e); + } + } + + annotationApplicationContext.refresh(); + + if (contextPath != null && !contextPath.isEmpty()) { + this.applicationContext = new ClassPathXmlApplicationContext(contextPath.split(","), annotationApplicationContext); + } else { + this.applicationContext = annotationApplicationContext; + } + } +} diff --git a/web-services/map-reduce/src/main/java/datawave/webservice/mr/bulkresults/map/BulkResultsFileOutputMapper.java b/core/map-reduce/src/main/java/datawave/core/mapreduce/bulkresults/map/BulkResultsFileOutputMapper.java similarity index 90% rename from web-services/map-reduce/src/main/java/datawave/webservice/mr/bulkresults/map/BulkResultsFileOutputMapper.java rename to core/map-reduce/src/main/java/datawave/core/mapreduce/bulkresults/map/BulkResultsFileOutputMapper.java index 08e6d834c21..62b7f2d403e 100644 --- a/web-services/map-reduce/src/main/java/datawave/webservice/mr/bulkresults/map/BulkResultsFileOutputMapper.java +++ b/core/map-reduce/src/main/java/datawave/core/mapreduce/bulkresults/map/BulkResultsFileOutputMapper.java @@ -1,4 +1,4 @@ -package datawave.webservice.mr.bulkresults.map; +package datawave.core.mapreduce.bulkresults.map; import java.io.ByteArrayOutputStream; import java.io.IOException; @@ -22,15 +22,14 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.NullWritable; import org.apache.log4j.Logger; -import org.jboss.weld.environment.se.Weld; import org.springframework.util.Assert; -import datawave.webservice.query.Query; -import datawave.webservice.query.cache.ResultsPage; -import datawave.webservice.query.exception.EmptyObjectException; -import datawave.webservice.query.exception.QueryException; -import datawave.webservice.query.logic.QueryLogic; -import datawave.webservice.query.logic.QueryLogicTransformer; +import datawave.core.query.cache.ResultsPage; +import datawave.core.query.exception.EmptyObjectException; +import datawave.core.query.logic.QueryLogic; +import datawave.core.query.logic.QueryLogicTransformer; +import datawave.microservice.mapreduce.bulkresults.map.SerializationFormat; +import datawave.microservice.query.Query; import datawave.webservice.result.BaseQueryResponse; import datawave.webservice.util.ProtostuffMessageBodyWriter; @@ -62,17 +61,9 @@ public class BulkResultsFileOutputMapper extends ApplicationContextAwareMapper entries = new HashMap<>(); private Map> responseClassMap = new HashMap<>(); private SerializationFormat format = SerializationFormat.XML; - private Weld weld; @Override protected void setup(org.apache.hadoop.mapreduce.Mapper.Context context) throws IOException, InterruptedException { - if (System.getProperty("ignore.weld.startMain") == null) { - System.setProperty("com.sun.jersey.server.impl.cdi.lookupExtensionInBeanManager", "true"); // Disable CDI extensions in Jersey libs - - weld = new Weld("STATIC_INSTANCE"); - weld.initialize(); - } - super.setup(context); Query query; try { @@ -85,8 +76,8 @@ protected void setup(org.apache.hadoop.mapreduce.Mapper.Con } final Configuration configuration = context.getConfiguration(); - this.setApplicationContext(configuration.get(SPRING_CONFIG_LOCATIONS)); - + this.setApplicationContext(configuration.get(SPRING_CONFIG_LOCATIONS), configuration.get(SPRING_CONFIG_BASE_PACKAGES), + configuration.get(SPRING_CONFIG_STARTING_CLASS)); String logicName = context.getConfiguration().get(QUERY_LOGIC_NAME); QueryLogic logic = (QueryLogic) super.applicationContext.getBean(logicName); @@ -100,10 +91,6 @@ protected void setup(org.apache.hadoop.mapreduce.Mapper.Con @Override protected void cleanup(Context context) throws IOException, InterruptedException { super.cleanup(context); - - if (weld != null) { - weld.shutdown(); - } } @Override diff --git a/web-services/map-reduce/src/main/java/datawave/webservice/mr/bulkresults/map/BulkResultsTableOutputMapper.java b/core/map-reduce/src/main/java/datawave/core/mapreduce/bulkresults/map/BulkResultsTableOutputMapper.java similarity index 82% rename from web-services/map-reduce/src/main/java/datawave/webservice/mr/bulkresults/map/BulkResultsTableOutputMapper.java rename to core/map-reduce/src/main/java/datawave/core/mapreduce/bulkresults/map/BulkResultsTableOutputMapper.java index 67904139b61..4f965dc1728 100644 --- a/web-services/map-reduce/src/main/java/datawave/webservice/mr/bulkresults/map/BulkResultsTableOutputMapper.java +++ b/core/map-reduce/src/main/java/datawave/core/mapreduce/bulkresults/map/BulkResultsTableOutputMapper.java @@ -1,4 +1,4 @@ -package datawave.webservice.mr.bulkresults.map; +package datawave.core.mapreduce.bulkresults.map; import java.io.IOException; import java.util.Collections; @@ -12,14 +12,16 @@ import org.apache.accumulo.core.data.Mutation; import org.apache.accumulo.core.data.Value; import org.apache.accumulo.core.security.ColumnVisibility; +import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.Text; +import org.springframework.util.Assert; -import datawave.webservice.query.Query; -import datawave.webservice.query.cache.ResultsPage; -import datawave.webservice.query.exception.EmptyObjectException; -import datawave.webservice.query.exception.QueryException; -import datawave.webservice.query.logic.QueryLogic; -import datawave.webservice.query.logic.QueryLogicTransformer; +import datawave.core.query.cache.ResultsPage; +import datawave.core.query.exception.EmptyObjectException; +import datawave.core.query.logic.QueryLogic; +import datawave.core.query.logic.QueryLogicTransformer; +import datawave.microservice.mapreduce.bulkresults.map.SerializationFormat; +import datawave.microservice.query.Query; import datawave.webservice.result.BaseQueryResponse; public class BulkResultsTableOutputMapper extends ApplicationContextAwareMapper { @@ -48,9 +50,17 @@ protected void setup(org.apache.hadoop.mapreduce.Mapper throw new RuntimeException("Error instantiating query impl class " + context.getConfiguration().get(BulkResultsFileOutputMapper.QUERY_IMPL_CLASS), e); } - QueryLogic logic = (QueryLogic) super.applicationContext.getBean(QUERY_LOGIC_NAME); - t = logic.getEnrichedTransformer(query); + final Configuration configuration = context.getConfiguration(); + + this.setApplicationContext(configuration.get(SPRING_CONFIG_LOCATIONS), configuration.get(SPRING_CONFIG_BASE_PACKAGES), + configuration.get(SPRING_CONFIG_STARTING_CLASS)); + String logicName = context.getConfiguration().get(QUERY_LOGIC_NAME); + + QueryLogic logic = (QueryLogic) super.applicationContext.getBean(logicName); + t = logic.getEnrichedTransformer(query); + Assert.notNull(logic.getMarkingFunctions()); + Assert.notNull(logic.getResponseObjectFactory()); this.tableName = new Text(context.getConfiguration().get(TABLE_NAME)); this.format = SerializationFormat.valueOf(context.getConfiguration().get(BulkResultsFileOutputMapper.RESULT_SERIALIZATION_FORMAT)); diff --git a/core/metrics-reporter b/core/metrics-reporter index 74e1e202de8..cdf1d8d474a 160000 --- a/core/metrics-reporter +++ b/core/metrics-reporter @@ -1 +1 @@ -Subproject commit 74e1e202de82827362fb381a915cba4642179d7c +Subproject commit cdf1d8d474a98faa592c9acac8e7817f1faf2af2 diff --git a/core/modification/pom.xml b/core/modification/pom.xml new file mode 100644 index 00000000000..553f3194d32 --- /dev/null +++ b/core/modification/pom.xml @@ -0,0 +1,53 @@ + + + 4.0.0 + + gov.nsa.datawave.core + datawave-core-parent + 7.0.0-SNAPSHOT + + datawave-core-modification + ${project.artifactId} + + + + gov.nsa.datawave + datawave-query-core + ${project.version} + + + gov.nsa.datawave.core + datawave-core-common + ${project.version} + + + gov.nsa.datawave.core + datawave-core-common-util + ${project.version} + + + gov.nsa.datawave.core + datawave-core-connection-pool + ${project.version} + + + gov.nsa.datawave.microservice + base-rest-responses + + + junit + junit + test + + + org.junit.jupiter + junit-jupiter-engine + test + + + org.junit.vintage + junit-vintage-engine + test + + + diff --git a/core/modification/src/main/java/datawave/modification/DatawaveModificationException.java b/core/modification/src/main/java/datawave/modification/DatawaveModificationException.java new file mode 100644 index 00000000000..baa24c156e4 --- /dev/null +++ b/core/modification/src/main/java/datawave/modification/DatawaveModificationException.java @@ -0,0 +1,30 @@ +package datawave.modification; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +import datawave.webservice.query.exception.QueryException; + +public class DatawaveModificationException extends RuntimeException { + + private List exceptions = new ArrayList<>(); + + public DatawaveModificationException(QueryException qe) { + super(qe); + exceptions.add(qe); + } + + public DatawaveModificationException(String msg, QueryException qe) { + super(msg, qe); + exceptions.add(qe); + } + + public void addException(QueryException e) { + exceptions.add(e); + } + + public List getExceptions() { + return Collections.unmodifiableList(exceptions); + } +} diff --git a/core/modification/src/main/java/datawave/modification/ModificationService.java b/core/modification/src/main/java/datawave/modification/ModificationService.java new file mode 100644 index 00000000000..0668dd4ae28 --- /dev/null +++ b/core/modification/src/main/java/datawave/modification/ModificationService.java @@ -0,0 +1,142 @@ +package datawave.modification; + +import static java.util.Map.Entry; + +import java.text.MessageFormat; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; + +import org.apache.accumulo.core.client.AccumuloClient; +import org.apache.accumulo.core.security.Authorizations; +import org.apache.log4j.Logger; + +import datawave.core.common.connection.AccumuloConnectionFactory; +import datawave.modification.cache.ModificationCache; +import datawave.modification.configuration.ModificationConfiguration; +import datawave.modification.configuration.ModificationServiceConfiguration; +import datawave.modification.query.ModificationQueryService; +import datawave.security.authorization.DatawaveUser; +import datawave.security.authorization.ProxiedUserDetails; +import datawave.webservice.modification.ModificationRequestBase; +import datawave.webservice.query.exception.BadRequestQueryException; +import datawave.webservice.query.exception.DatawaveErrorCode; +import datawave.webservice.query.exception.QueryException; +import datawave.webservice.query.exception.UnauthorizedQueryException; +import datawave.webservice.result.VoidResponse; +import datawave.webservice.results.modification.ModificationConfigurationResponse; + +public class ModificationService { + + private static final Logger log = Logger.getLogger(ModificationService.class); + + private final AccumuloConnectionFactory connectionFactory; + + private final ModificationCache cache; + + private final ModificationQueryService.ModificationQueryServiceFactory queryServiceFactory; + + private final ModificationConfiguration modificationConfiguration; + + public ModificationService(ModificationConfiguration modificationConfiguration, ModificationCache cache, AccumuloConnectionFactory connectionFactory, + ModificationQueryService.ModificationQueryServiceFactory queryServiceFactory) { + this.modificationConfiguration = modificationConfiguration; + this.cache = cache; + this.connectionFactory = connectionFactory; + this.queryServiceFactory = queryServiceFactory; + } + + /** + * Returns a list of the Modification service names and their configurations + * + * @return datawave.webservice.results.modification.ModificationConfigurationResponse + */ + public List listConfigurations() { + List configs = new ArrayList<>(); + for (Entry entry : this.modificationConfiguration.getConfigurations().entrySet()) { + ModificationConfigurationResponse r = new ModificationConfigurationResponse(); + r.setName(entry.getKey()); + r.setRequestClass(entry.getValue().getRequestClass().getName()); + r.setDescription(entry.getValue().getDescription()); + r.setAuthorizedRoles(entry.getValue().getAuthorizedRoles()); + configs.add(r); + } + return configs; + } + + /** + * Execute a Modification service with the given name and runtime parameters + * + * @param userDetails + * The proxied user list + * @param modificationServiceName + * Name of the modification service configuration + * @param request + * object type specified in listConfigurations response. + * @return datawave.webservice.result.VoidResponse + */ + public VoidResponse submit(ProxiedUserDetails userDetails, String modificationServiceName, ModificationRequestBase request) { + VoidResponse response = new VoidResponse(); + + // Find out who/what called this method + DatawaveUser primaryUser = userDetails.getPrimaryUser(); + String userDn = primaryUser.getDn().subjectDN(); + Collection proxyServers = userDetails.getProxiedUsers().stream().map(u -> u.getDn().subjectDN()).collect(Collectors.toList()); + Collection userRoles = primaryUser.getRoles(); + Set cbAuths = userDetails.getProxiedUsers().stream().map(u -> new Authorizations(u.getAuths().toArray(new String[0]))) + .collect(Collectors.toSet()); + + AccumuloClient client = null; + AccumuloConnectionFactory.Priority priority; + try { + // Get the Modification Service from the configuration + ModificationServiceConfiguration service = modificationConfiguration.getConfiguration(modificationServiceName); + if (!request.getClass().equals(service.getRequestClass())) { + BadRequestQueryException qe = new BadRequestQueryException(DatawaveErrorCode.INVALID_REQUEST_CLASS, + MessageFormat.format("Requires: {0} but got {1}", service.getRequestClass().getName(), request.getClass().getName())); + throw new DatawaveModificationException(qe); + } + + priority = service.getPriority(); + + // Ensure that the user is in the list of authorized roles + if (null != service.getAuthorizedRoles()) { + boolean authorized = !Collections.disjoint(userRoles, service.getAuthorizedRoles()); + if (!authorized) { + // Then the user does not have any of the authorized roles + UnauthorizedQueryException qe = new UnauthorizedQueryException(DatawaveErrorCode.JOB_EXECUTION_UNAUTHORIZED, + MessageFormat.format("Requires one of: {0}", service.getAuthorizedRoles())); + throw new DatawaveModificationException(qe); + } + } + + // Process the modification + Map trackingMap = connectionFactory.getTrackingMap(Thread.currentThread().getStackTrace()); + client = connectionFactory.getClient(userDn, proxyServers, modificationConfiguration.getPoolName(), priority, trackingMap); + service.setQueryServiceFactory(queryServiceFactory); + log.info("Processing modification request from user=" + userDetails.getShortName() + ": \n" + request); + service.process(client, request, cache.getCachedMutableFieldList(), cbAuths, userDetails); + } catch (DatawaveModificationException e) { + throw e; + } catch (Exception e) { + QueryException qe = new QueryException(DatawaveErrorCode.MODIFICATION_ERROR, e); + log.error(qe); + throw new DatawaveModificationException(qe); + } finally { + if (null != client) { + try { + connectionFactory.returnClient(client); + } catch (Exception e) { + log.error("Error returning connection", e); + } + } + } + + return response; + } + +} diff --git a/warehouse/query-core/src/main/java/datawave/webservice/modification/MutableMetadataHandler.java b/core/modification/src/main/java/datawave/modification/MutableMetadataHandler.java similarity index 94% rename from warehouse/query-core/src/main/java/datawave/webservice/modification/MutableMetadataHandler.java rename to core/modification/src/main/java/datawave/modification/MutableMetadataHandler.java index fd360a166ca..1a2bd92fce6 100644 --- a/warehouse/query-core/src/main/java/datawave/webservice/modification/MutableMetadataHandler.java +++ b/core/modification/src/main/java/datawave/modification/MutableMetadataHandler.java @@ -1,4 +1,4 @@ -package datawave.webservice.modification; +package datawave.modification; import java.util.ArrayList; import java.util.Arrays; @@ -15,7 +15,6 @@ import java.util.concurrent.TimeUnit; import javax.annotation.Nullable; -import javax.ws.rs.core.MultivaluedMap; import org.apache.accumulo.core.client.AccumuloClient; import org.apache.accumulo.core.client.AccumuloException; @@ -38,33 +37,37 @@ import org.apache.commons.lang.StringUtils; import org.apache.hadoop.io.Text; import org.apache.log4j.Logger; -import org.jboss.resteasy.specimpl.MultivaluedMapImpl; import com.google.common.base.Function; import com.google.common.collect.HashMultimap; import com.google.common.collect.Iterators; import com.google.common.collect.Multimap; +import datawave.core.common.connection.AccumuloConnectionFactory; import datawave.core.iterators.FieldIndexDocumentFilter; import datawave.data.ColumnFamilyConstants; import datawave.data.type.Type; import datawave.ingest.protobuf.Uid; import datawave.ingest.protobuf.Uid.List.Builder; import datawave.marking.MarkingFunctions; +import datawave.microservice.query.DefaultQueryParameters; +import datawave.microservice.query.QueryPersistence; +import datawave.modification.configuration.ModificationServiceConfiguration; +import datawave.modification.query.ModificationQueryService; import datawave.query.data.parsers.DatawaveKey; import datawave.query.data.parsers.DatawaveKey.KeyType; import datawave.query.util.MetadataHelper; import datawave.query.util.MetadataHelperFactory; +import datawave.security.authorization.ProxiedUserDetails; import datawave.security.util.ScannerHelper; import datawave.util.TextUtil; import datawave.util.time.DateHelper; -import datawave.webservice.common.connection.AccumuloConnectionFactory; +import datawave.webservice.modification.DefaultModificationRequest; +import datawave.webservice.modification.EventIdentifier; +import datawave.webservice.modification.ModificationOperation; +import datawave.webservice.modification.ModificationRequestBase; import datawave.webservice.modification.ModificationRequestBase.MODE; -import datawave.webservice.modification.configuration.ModificationServiceConfiguration; -import datawave.webservice.query.QueryParametersImpl; -import datawave.webservice.query.QueryPersistence; import datawave.webservice.query.result.event.EventBase; -import datawave.webservice.query.runner.QueryExecutorBean; import datawave.webservice.result.BaseQueryResponse; import datawave.webservice.result.EventQueryResponseBase; import datawave.webservice.result.GenericResponse; @@ -301,12 +304,14 @@ public Class getRequestClass() { // Default the insert history option to true so that the call remains backwards compatible. @Override public void process(AccumuloClient client, ModificationRequestBase request, Map> mutableFieldList, Set userAuths, - String user) throws Exception { - this.process(client, request, mutableFieldList, userAuths, user, false, true); + ProxiedUserDetails userDetails) throws Exception { + this.process(client, request, mutableFieldList, userAuths, userDetails, false, true); } public void process(AccumuloClient client, ModificationRequestBase request, Map> mutableFieldList, Set userAuths, - String user, boolean purgeIndex, boolean insertHistory) throws Exception { + ProxiedUserDetails userDetails, boolean purgeIndex, boolean insertHistory) throws Exception { + + String user = userDetails.getShortName(); DefaultModificationRequest mr = DefaultModificationRequest.class.cast(request); @@ -703,7 +708,6 @@ protected void delete(MultiTableBatchWriter writer, AccumuloClient client, Set findMatchingEventUuid(String uuid, String uuidType, Set userAuths, ModificationOperation operation) - throws Exception { - + protected EventBase findMatchingEventUuid(String uuid, String uuidType, Set userAuths, ModificationOperation operation, + ProxiedUserDetails userDetails) throws Exception { String field = operation.getFieldName(); String columnVisibility = operation.getColumnVisibility(); @@ -1008,8 +1011,7 @@ protected EventBase findMatchingEventUuid(String uuid, String uuidType, Set String logicName = "LuceneUUIDEventQuery"; EventBase e = null; - QueryExecutorBean queryService = this.getQueryService(); - + ModificationQueryService queryService = this.getQueryService(userDetails); String id = null; HashSet auths = new HashSet<>(); for (Authorizations a : userAuths) @@ -1019,11 +1021,10 @@ protected EventBase findMatchingEventUuid(String uuid, String uuidType, Set expiration = new Date(expiration.getTime() + (1000 * 60 * 60 * 24)); try { - MultivaluedMap paramsMap = new MultivaluedMapImpl<>(); - paramsMap.putAll(QueryParametersImpl.paramsToMap(logicName, query.toString(), "Query to find matching records for metadata modification", - columnVisibility, new Date(0), new Date(), StringUtils.join(auths, ','), expiration, 2, -1, null, QueryPersistence.TRANSIENT, null, - queryOptions.toString(), false)); - GenericResponse createResponse = queryService.createQuery(logicName, paramsMap); + GenericResponse createResponse = queryService.createQuery(logicName, + DefaultQueryParameters.paramsToMap(logicName, query.toString(), "Query to find matching records for metadata modification", + columnVisibility, new Date(0), new Date(), StringUtils.join(auths, ','), expiration, 2, -1, null, + QueryPersistence.TRANSIENT, null, queryOptions.toString(), false)); id = createResponse.getResult(); BaseQueryResponse response = queryService.next(id); @@ -1120,29 +1121,38 @@ protected static class FieldIndexIterable implements Iterable, AutoCloseabl public FieldIndexIterable(AccumuloClient client, String shardTable, String eventUid, String datatype, Set userAuths, List ranges) throws TableNotFoundException { - scanner = ScannerHelper.createBatchScanner(client, shardTable, userAuths, ranges.size()); - scanner.setRanges(ranges); - Map options = new HashMap(); - options.put(FieldIndexDocumentFilter.DATA_TYPE_OPT, datatype); - options.put(FieldIndexDocumentFilter.EVENT_UID_OPT, eventUid); - IteratorSetting settings = new IteratorSetting(100, FieldIndexDocumentFilter.class, options); - scanner.addScanIterator(settings); + if (!ranges.isEmpty()) { + scanner = ScannerHelper.createBatchScanner(client, shardTable, userAuths, ranges.size()); + scanner.setRanges(ranges); + Map options = new HashMap(); + options.put(FieldIndexDocumentFilter.DATA_TYPE_OPT, datatype); + options.put(FieldIndexDocumentFilter.EVENT_UID_OPT, eventUid); + IteratorSetting settings = new IteratorSetting(100, FieldIndexDocumentFilter.class, options); + scanner.addScanIterator(settings); + } } @Override public Iterator iterator() { - return Iterators.transform(scanner.iterator(), new Function,Key>() { - @Nullable - @Override - public Key apply(@Nullable Entry keyValueEntry) { - return keyValueEntry.getKey(); - } - }); + if (scanner != null) { + return Iterators.transform(scanner.iterator(), new Function,Key>() { + @Nullable + @Override + public Key apply(@Nullable Entry keyValueEntry) { + return keyValueEntry.getKey(); + } + }); + } else { + List list = Collections.emptyList(); + return list.iterator(); + } } @Override public void close() throws Exception { - scanner.close(); + if (scanner != null) { + scanner.close(); + } } } diff --git a/warehouse/query-core/src/main/java/datawave/webservice/modification/MutableMetadataUUIDHandler.java b/core/modification/src/main/java/datawave/modification/MutableMetadataUUIDHandler.java similarity index 92% rename from warehouse/query-core/src/main/java/datawave/webservice/modification/MutableMetadataUUIDHandler.java rename to core/modification/src/main/java/datawave/modification/MutableMetadataUUIDHandler.java index fc4a32ba8ba..23e28276552 100644 --- a/warehouse/query-core/src/main/java/datawave/webservice/modification/MutableMetadataUUIDHandler.java +++ b/core/modification/src/main/java/datawave/modification/MutableMetadataUUIDHandler.java @@ -1,4 +1,4 @@ -package datawave.webservice.modification; +package datawave.modification; import java.util.ArrayList; import java.util.Collections; @@ -16,14 +16,20 @@ import org.apache.log4j.Logger; import datawave.query.util.MetadataHelper; -import datawave.webservice.common.exception.BadRequestException; +import datawave.security.authorization.ProxiedUserDetails; +import datawave.webservice.modification.DefaultModificationRequest; +import datawave.webservice.modification.DefaultUUIDModificationRequest; +import datawave.webservice.modification.EventIdentifier; +import datawave.webservice.modification.ModificationEvent; +import datawave.webservice.modification.ModificationOperation; import datawave.webservice.modification.ModificationOperation.OPERATIONMODE; +import datawave.webservice.modification.ModificationOperationImpl; +import datawave.webservice.modification.ModificationRequestBase; import datawave.webservice.modification.ModificationRequestBase.MODE; import datawave.webservice.query.exception.DatawaveErrorCode; import datawave.webservice.query.exception.QueryException; import datawave.webservice.query.result.event.EventBase; import datawave.webservice.query.result.event.FieldBase; -import datawave.webservice.result.VoidResponse; /** * Class that handles requests for modification requests (INSERT, UPDATE, DELETE, REPLACE) for metadata. From a DefaultUUIDModificationRequest it performs
@@ -207,8 +213,10 @@ public void ResetValues() { @Override public void process(AccumuloClient client, ModificationRequestBase request, Map> mutableFieldList, Set userAuths, - String user) throws BadRequestException, AccumuloException, AccumuloSecurityException, TableNotFoundException, ExecutionException { - VoidResponse response = new VoidResponse(); + ProxiedUserDetails userDetails) + throws DatawaveModificationException, AccumuloException, AccumuloSecurityException, TableNotFoundException, ExecutionException { + String user = userDetails.getShortName(); + ArrayList exceptions = new ArrayList<>(); MetadataHelper mHelper = getMetadataHelper(client); @@ -246,8 +254,7 @@ public void process(AccumuloClient client, ModificationRequestBase request, Map< } // perform the lookupUUID - EventBase> idEvent = findMatchingEventUuid(event.getId(), event.getIdType(), userAuths, operation); - + EventBase> idEvent = findMatchingEventUuid(event.getId(), event.getIdType(), userAuths, operation, userDetails); // extract contents from lookupUUID necessary for modification List> fields = idEvent.getFields(); if (operation.getOldFieldValue() != null) @@ -321,14 +328,7 @@ else if (f.getName().equalsIgnoreCase(event.getIdType()) && fieldCount < 1 && co if (log != null) log.trace("Submitting request to MutableMetadataHandler from MutableMetadataUUIDHandler: " + modReq); - // make sure user isn't null or empty - if (eventUser == null || eventUser.equals("")) { - if (log != null) - log.trace("No user provided for event. Using caller: " + user); - super.process(client, modReq, mutableFieldList, userAuths, user); - } else { - super.process(client, modReq, mutableFieldList, userAuths, event.getUser()); - } + super.process(client, modReq, mutableFieldList, userAuths, userDetails); } } // log exceptions that occur for each modification request. Let as many requests work as possible before returning @@ -353,12 +353,16 @@ else if (f.getName().equalsIgnoreCase(event.getIdType()) && fieldCount < 1 && co // If any errors occurred, return them in the response to the user if (!exceptions.isEmpty()) { - for (Exception e : exceptions) { - QueryException qe = new QueryException(DatawaveErrorCode.MODIFICATION_ERROR, e); - response.addException(qe.getBottomQueryException()); + if (exceptions.size() == 1) { + throw new DatawaveModificationException(new QueryException(DatawaveErrorCode.MODIFICATION_ERROR, exceptions.get(0))); + } else { + DatawaveModificationException exception = new DatawaveModificationException(new QueryException(DatawaveErrorCode.MODIFICATION_ERROR)); + for (Exception e : exceptions) { + QueryException qe = new QueryException(DatawaveErrorCode.MODIFICATION_ERROR, e); + exception.addException(qe); + } + throw exception; } - QueryException e = new QueryException(DatawaveErrorCode.MODIFICATION_ERROR); - throw new BadRequestException(e, response); } } diff --git a/core/modification/src/main/java/datawave/modification/cache/ModificationCache.java b/core/modification/src/main/java/datawave/modification/cache/ModificationCache.java new file mode 100644 index 00000000000..2258be9e061 --- /dev/null +++ b/core/modification/src/main/java/datawave/modification/cache/ModificationCache.java @@ -0,0 +1,120 @@ +package datawave.modification.cache; + +import static datawave.core.common.connection.AccumuloConnectionFactory.Priority; + +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Map.Entry; +import java.util.Set; + +import org.apache.accumulo.core.client.AccumuloClient; +import org.apache.accumulo.core.client.BatchScanner; +import org.apache.accumulo.core.data.Key; +import org.apache.accumulo.core.data.Range; +import org.apache.accumulo.core.data.Value; +import org.apache.hadoop.io.Text; +import org.apache.log4j.Logger; + +import datawave.core.common.connection.AccumuloConnectionFactory; +import datawave.modification.configuration.ModificationConfiguration; +import datawave.security.util.ScannerHelper; + +public class ModificationCache { + private static Logger log = Logger.getLogger(ModificationCache.class); + + private static final Text MODIFICATION_COLUMN = new Text("m"); + + private Map> cache = new HashMap<>(); + + private final AccumuloConnectionFactory connectionFactory; + + private ModificationConfiguration modificationConfiguration; + + public ModificationCache(AccumuloConnectionFactory connectionFactory, ModificationConfiguration modificationConfiguration) { + this.connectionFactory = connectionFactory; + this.modificationConfiguration = modificationConfiguration; + if (modificationConfiguration != null) { + reloadMutableFieldCache(); + } else { + log.error("modificationConfiguration was null"); + } + } + + /** + * Reload the cache + */ + public void reloadMutableFieldCache() { + Map> cache = new HashMap<>(); + AccumuloClient client = null; + BatchScanner s = null; + try { + Map trackingMap = connectionFactory.getTrackingMap(Thread.currentThread().getStackTrace()); + log.trace("getting mutable list from table " + this.modificationConfiguration.getTableName()); + log.trace("modificationConfiguration.getPoolName() = " + modificationConfiguration.getPoolName()); + client = connectionFactory.getClient(null, null, modificationConfiguration.getPoolName(), Priority.ADMIN, trackingMap); + log.trace("got connection"); + s = ScannerHelper.createBatchScanner(client, this.modificationConfiguration.getTableName(), + Collections.singleton(client.securityOperations().getUserAuthorizations(client.whoami())), 8); + s.setRanges(Collections.singleton(new Range())); + s.fetchColumnFamily(MODIFICATION_COLUMN); + for (Entry e : s) { + // Field name is in the row and datatype is in the colq. + String datatype = e.getKey().getColumnQualifier().toString(); + log.trace("datatype = " + datatype); + String fieldName = e.getKey().getRow().toString(); + log.trace("fieldname = " + fieldName); + if (null == cache.get(datatype)) + cache.put(datatype, new HashSet<>()); + cache.get(datatype).add(fieldName); + } + log.trace("cache size = " + cache.size()); + for (Entry> e : cache.entrySet()) { + log.trace("datatype = " + e.getKey() + ", fieldcount = " + e.getValue().size()); + } + // now atomically replace the cache + this.cache = cache; + } catch (Exception e) { + log.error("Error during initialization of ModificationCacheBean", e); + throw new RuntimeException("Error during initialization of ModificationCacheBean", e); + } finally { + if (null != s) + s.close(); + try { + connectionFactory.returnClient(client); + } catch (Exception e) { + log.error("Error returning connection to pool", e); + } + } + } + + /** + * List the mutable fields in the cache + */ + public String listMutableFields() { + return cache.toString(); + } + + /** + * Check to see if field for specified datatype is mutable + * + * @param datatype + * @param field + * name of field + * @return true if field is mutable for the given datatype + */ + public boolean isFieldMutable(String datatype, String field) { + log.trace("datatype = " + datatype + ", field = " + field); + return cache.get(datatype).contains(field); + } + + public Map> getCachedMutableFieldList() { + log.trace("cache = " + cache); + return Collections.unmodifiableMap(cache); + } + + public ModificationConfiguration getModificationConfiguration() { + return modificationConfiguration; + } +} diff --git a/warehouse/query-core/src/main/java/datawave/webservice/modification/configuration/ModificationConfiguration.java b/core/modification/src/main/java/datawave/modification/configuration/ModificationConfiguration.java similarity index 95% rename from warehouse/query-core/src/main/java/datawave/webservice/modification/configuration/ModificationConfiguration.java rename to core/modification/src/main/java/datawave/modification/configuration/ModificationConfiguration.java index bd23b94c27a..218ec2becea 100644 --- a/warehouse/query-core/src/main/java/datawave/webservice/modification/configuration/ModificationConfiguration.java +++ b/core/modification/src/main/java/datawave/modification/configuration/ModificationConfiguration.java @@ -1,4 +1,4 @@ -package datawave.webservice.modification.configuration; +package datawave.modification.configuration; import java.util.Map; diff --git a/warehouse/query-core/src/main/java/datawave/webservice/modification/configuration/ModificationServiceConfiguration.java b/core/modification/src/main/java/datawave/modification/configuration/ModificationServiceConfiguration.java similarity index 70% rename from warehouse/query-core/src/main/java/datawave/webservice/modification/configuration/ModificationServiceConfiguration.java rename to core/modification/src/main/java/datawave/modification/configuration/ModificationServiceConfiguration.java index 271d087a9fa..ccc5acf828a 100644 --- a/warehouse/query-core/src/main/java/datawave/webservice/modification/configuration/ModificationServiceConfiguration.java +++ b/core/modification/src/main/java/datawave/modification/configuration/ModificationServiceConfiguration.java @@ -1,4 +1,4 @@ -package datawave.webservice.modification.configuration; +package datawave.modification.configuration; import java.util.List; import java.util.Map; @@ -7,9 +7,10 @@ import org.apache.accumulo.core.client.AccumuloClient; import org.apache.accumulo.core.security.Authorizations; -import datawave.webservice.common.connection.AccumuloConnectionFactory; +import datawave.core.common.connection.AccumuloConnectionFactory; +import datawave.modification.query.ModificationQueryService; +import datawave.security.authorization.ProxiedUserDetails; import datawave.webservice.modification.ModificationRequestBase; -import datawave.webservice.query.runner.QueryExecutorBean; public abstract class ModificationServiceConfiguration { @@ -17,7 +18,7 @@ public abstract class ModificationServiceConfiguration { protected String description = null; protected List authorizedRoles = null; - protected QueryExecutorBean queryService = null; + protected ModificationQueryService.ModificationQueryServiceFactory queryServiceFactory = null; protected List securityMarkingExemptFields = null; public String getDescription() { @@ -47,14 +48,18 @@ public void setSecurityMarkingExemptFields(List securityMarkingExemptFie /** * Handle to query service in case the modification service needs to run queries. * - * @return RemoteQueryExecutor + * @return ModificationQueryService */ - public QueryExecutorBean getQueryService() { - return queryService; + public ModificationQueryService getQueryService(ProxiedUserDetails userDetails) { + return queryServiceFactory.createService(userDetails); } - public void setQueryService(QueryExecutorBean queryService) { - this.queryService = queryService; + public ModificationQueryService.ModificationQueryServiceFactory getQueryServiceFactory() { + return queryServiceFactory; + } + + public void setQueryServiceFactory(ModificationQueryService.ModificationQueryServiceFactory queryServiceFactory) { + this.queryServiceFactory = queryServiceFactory; } /** @@ -74,13 +79,13 @@ public void setQueryService(QueryExecutorBean queryService) { * map of datatype to set of fields that are mutable * @param userAuths * authorizations of user making the call - * @param user - * user identifier + * @param userDetails + * user details * @throws Exception * if there is an issue */ public abstract void process(AccumuloClient client, ModificationRequestBase request, Map> mutableFieldList, - Set userAuths, String user) throws Exception; + Set userAuths, ProxiedUserDetails userDetails) throws Exception; /** * diff --git a/core/modification/src/main/java/datawave/modification/query/ModificationQueryService.java b/core/modification/src/main/java/datawave/modification/query/ModificationQueryService.java new file mode 100644 index 00000000000..8cbba14ca47 --- /dev/null +++ b/core/modification/src/main/java/datawave/modification/query/ModificationQueryService.java @@ -0,0 +1,21 @@ +package datawave.modification.query; + +import java.util.List; +import java.util.Map; + +import datawave.query.exceptions.DatawaveQueryException; +import datawave.security.authorization.ProxiedUserDetails; +import datawave.webservice.result.BaseQueryResponse; +import datawave.webservice.result.GenericResponse; + +public interface ModificationQueryService { + GenericResponse createQuery(String logicName, Map> paramsToMap) throws DatawaveQueryException; + + BaseQueryResponse next(String id) throws DatawaveQueryException; + + void close(String id) throws DatawaveQueryException; + + public interface ModificationQueryServiceFactory { + ModificationQueryService createService(ProxiedUserDetails userDetails); + } +} diff --git a/core/pom.xml b/core/pom.xml index 7fef6cb47d5..a4980953cb3 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -4,13 +4,20 @@ gov.nsa.datawave datawave-parent - 6.14.0-SNAPSHOT + 7.0.0-SNAPSHOT gov.nsa.datawave.core datawave-core-parent pom ${project.artifactId} + cached-results + common + common-util + connection-pool + map-reduce + modification + query utils @@ -38,6 +45,11 @@ junit test + + org.junit.jupiter + junit-jupiter-engine + test + diff --git a/core/query/pom.xml b/core/query/pom.xml new file mode 100644 index 00000000000..cb4e29e6556 --- /dev/null +++ b/core/query/pom.xml @@ -0,0 +1,102 @@ + + + 4.0.0 + + gov.nsa.datawave.core + datawave-core-parent + 7.0.0-SNAPSHOT + + datawave-core-query + ${project.artifactId} + + + + gov.nsa.datawave.core + datawave-core-cached-results + ${project.version} + + + gov.nsa.datawave.core + datawave-core-common + ${project.version} + + + gov.nsa.datawave.core + datawave-core-common-util + ${project.version} + + + gov.nsa.datawave.core + datawave-core-connection-pool + ${project.version} + + + gov.nsa.datawave.microservice + audit-api + + + gov.nsa.datawave.microservice + base-rest-responses + + + gov.nsa.datawave.microservice + query-metric-api + + + gov.nsa.datawave.webservices + datawave-ws-client + ${project.version} + + + org.slf4j + * + + + log4j + log4j + + + + + junit + junit + test + + + org.junit.jupiter + junit-jupiter-engine + test + + + org.junit.vintage + junit-vintage-engine + test + + + + + + org.apache.maven.plugins + maven-jar-plugin + + + META-INF/beans.xml + META-INF/jboss-ejb3.xml + + + + + jboss + + jar + + + jboss + + + + + + + + diff --git a/web-services/query/src/main/java/datawave/webservice/query/cachedresults/CacheableLogic.java b/core/query/src/main/java/datawave/core/query/cachedresults/CacheableLogic.java similarity index 51% rename from web-services/query/src/main/java/datawave/webservice/query/cachedresults/CacheableLogic.java rename to core/query/src/main/java/datawave/core/query/cachedresults/CacheableLogic.java index 85a5fb82972..bb145dac89d 100644 --- a/web-services/query/src/main/java/datawave/webservice/query/cachedresults/CacheableLogic.java +++ b/core/query/src/main/java/datawave/core/query/cachedresults/CacheableLogic.java @@ -1,15 +1,13 @@ -package datawave.webservice.query.cachedresults; - -import java.util.List; +package datawave.core.query.cachedresults; +import datawave.webservice.query.cachedresults.CacheableQueryRow; import datawave.webservice.query.exception.QueryException; public interface CacheableLogic { - List writeToCache(Object o) throws QueryException; + CacheableQueryRow writeToCache(Object o) throws QueryException; // CachedRowSet is passed pointing to the current row // This method must create the objects that will later be passed to createResponse - List readFromCache(List row); - + Object readFromCache(CacheableQueryRow row); } diff --git a/core/query/src/main/java/datawave/core/query/configuration/CheckpointableQueryConfiguration.java b/core/query/src/main/java/datawave/core/query/configuration/CheckpointableQueryConfiguration.java new file mode 100644 index 00000000000..8a243afcbaa --- /dev/null +++ b/core/query/src/main/java/datawave/core/query/configuration/CheckpointableQueryConfiguration.java @@ -0,0 +1,12 @@ +package datawave.core.query.configuration; + +public interface CheckpointableQueryConfiguration { + + /** + * Create an instance of this configuration suitable for a checkpoint. Basically ensure that everything is copied that is required to continue execution of + * the query post create. + * + * @return The configuration + */ + GenericQueryConfiguration checkpoint(); +} diff --git a/web-services/query/src/main/java/datawave/webservice/query/configuration/GenericQueryConfiguration.java b/core/query/src/main/java/datawave/core/query/configuration/GenericQueryConfiguration.java similarity index 53% rename from web-services/query/src/main/java/datawave/webservice/query/configuration/GenericQueryConfiguration.java rename to core/query/src/main/java/datawave/core/query/configuration/GenericQueryConfiguration.java index ac8cfc96bd1..a8dd9cfbd6e 100644 --- a/web-services/query/src/main/java/datawave/webservice/query/configuration/GenericQueryConfiguration.java +++ b/core/query/src/main/java/datawave/core/query/configuration/GenericQueryConfiguration.java @@ -1,22 +1,25 @@ -package datawave.webservice.query.configuration; +package datawave.core.query.configuration; +import java.io.Serializable; +import java.nio.charset.StandardCharsets; +import java.util.Collection; import java.util.Collections; import java.util.Date; import java.util.Iterator; +import java.util.Objects; import java.util.Set; +import java.util.stream.Collectors; import org.apache.accumulo.core.client.AccumuloClient; import org.apache.accumulo.core.client.BatchScanner; import org.apache.accumulo.core.security.Authorizations; -import org.apache.log4j.Logger; -import com.fasterxml.jackson.annotation.JsonIgnore; import com.google.common.collect.Iterators; +import datawave.core.common.util.EnvProvider; +import datawave.core.query.logic.BaseQueryLogic; +import datawave.microservice.query.Query; import datawave.util.TableName; -import datawave.webservice.common.logging.ThreadConfigurableLogger; -import datawave.webservice.query.logic.BaseQueryLogic; -import datawave.webservice.util.EnvProvider; /** *

@@ -29,13 +32,19 @@ *

* */ -public class GenericQueryConfiguration { +public class GenericQueryConfiguration implements Serializable { + // is this execution expected to be checkpointable (changes how we allocate ranges to scanners) + private boolean checkpointable = false; - private static final Logger log = ThreadConfigurableLogger.getLogger(GenericQueryConfiguration.class); - - @JsonIgnore private transient AccumuloClient client = null; + + // This is just used for (de)serialization + private Set auths = Collections.emptySet(); + private Set authorizations = Collections.singleton(Authorizations.EMPTY); + + private Query query = null; + // Leave in a top-level query for backwards-compatibility purposes private String queryString = null; @@ -50,14 +59,17 @@ public class GenericQueryConfiguration { // Table name private String tableName = TableName.SHARD; - @JsonIgnore - private transient Iterator queries = Collections.emptyIterator(); + private Collection queries = Collections.emptyList(); + private transient Iterator queriesIter = Collections.emptyIterator(); protected boolean bypassAccumulo; // use a value like 'env:PASS' to pull from the environment private String accumuloPassword = ""; + // Whether or not this query emits every result or performs some kind of result reduction + protected boolean reduceResults = false; + /** * Empty default constructor */ @@ -76,6 +88,8 @@ public GenericQueryConfiguration(BaseQueryLogic configuredLogic) { } public GenericQueryConfiguration(GenericQueryConfiguration genericConfig) { + this.setQuery(genericConfig.getQuery()); + this.setCheckpointable(genericConfig.isCheckpointable()); this.setBaseIteratorPriority(genericConfig.getBaseIteratorPriority()); this.setBypassAccumulo(genericConfig.getBypassAccumulo()); this.setAccumuloPassword(genericConfig.getAccumuloPassword()); @@ -85,27 +99,49 @@ public GenericQueryConfiguration(GenericQueryConfiguration genericConfig) { this.setEndDate(genericConfig.getEndDate()); this.setMaxWork(genericConfig.getMaxWork()); this.setQueries(genericConfig.getQueries()); + this.setQueriesIter(genericConfig.getQueriesIter()); this.setQueryString(genericConfig.getQueryString()); this.setTableName(genericConfig.getTableName()); + this.setReduceResults(genericConfig.isReduceResults()); + } + + public Collection getQueries() { + return queries; + } + + public void setQueries(Collection queries) { + this.queries = queries; } /** * Return the configured {@code Iterator} * - * @return the configured iterator + * @return An iterator of query ranges */ - public Iterator getQueries() { - return Iterators.unmodifiableIterator(this.queries); + public Iterator getQueriesIter() { + if ((queriesIter == null || !queriesIter.hasNext()) && queries != null) { + return Iterators.unmodifiableIterator(queries.iterator()); + } else { + return Iterators.unmodifiableIterator(this.queriesIter); + } } /** * Set the queries to be run. * - * @param queries - * the queries + * @param queriesIter + * An iterator of query ranges */ - public void setQueries(Iterator queries) { - this.queries = queries; + public void setQueriesIter(Iterator queriesIter) { + this.queriesIter = queriesIter; + } + + public boolean isCheckpointable() { + return checkpointable; + } + + public void setCheckpointable(boolean checkpointable) { + this.checkpointable = checkpointable; } public AccumuloClient getClient() { @@ -116,6 +152,14 @@ public void setClient(AccumuloClient client) { this.client = client; } + public Query getQuery() { + return query; + } + + public void setQuery(Query query) { + this.query = query; + } + public void setQueryString(String query) { this.queryString = query; } @@ -124,12 +168,32 @@ public String getQueryString() { return queryString; } + public Set getAuths() { + if (auths == null && authorizations != null) { + auths = authorizations.stream().flatMap(a -> a.getAuthorizations().stream()).map(b -> new String(b, StandardCharsets.UTF_8)) + .collect(Collectors.toSet()); + } + return auths; + } + + public void setAuths(Set auths) { + this.auths = auths; + this.authorizations = null; + getAuthorizations(); + } + public Set getAuthorizations() { + if (authorizations == null && auths != null) { + authorizations = Collections + .singleton(new Authorizations(auths.stream().map(a -> a.getBytes(StandardCharsets.UTF_8)).collect(Collectors.toList()))); + } return authorizations; } - public void setAuthorizations(Set auths) { - this.authorizations = auths; + public void setAuthorizations(Set authorizations) { + this.authorizations = authorizations; + this.auths = null; + getAuths(); } public int getBaseIteratorPriority() { @@ -187,6 +251,14 @@ public String getAccumuloPassword() { return this.accumuloPassword; } + public boolean isReduceResults() { + return reduceResults; + } + + public void setReduceResults(boolean reduceResults) { + this.reduceResults = reduceResults; + } + /** * Sets configured password for accumulo access * @@ -219,10 +291,32 @@ public boolean canRunQuery() { } // At least one QueryData was provided - if (null == this.queries) { + if (null == this.getQueriesIter()) { return false; } return true; } + + @Override + public boolean equals(Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + GenericQueryConfiguration that = (GenericQueryConfiguration) o; + return isCheckpointable() == that.isCheckpointable() && getBaseIteratorPriority() == that.getBaseIteratorPriority() + && getBypassAccumulo() == that.getBypassAccumulo() && Objects.equals(getAuthorizations(), that.getAuthorizations()) + && Objects.equals(getQuery(), that.getQuery()) && Objects.equals(getQueryString(), that.getQueryString()) + && Objects.equals(getBeginDate(), that.getBeginDate()) && Objects.equals(getEndDate(), that.getEndDate()) + && Objects.equals(getMaxWork(), that.getMaxWork()) && Objects.equals(getTableName(), that.getTableName()) + && Objects.equals(getQueries(), that.getQueries()) && Objects.equals(getAccumuloPassword(), that.getAccumuloPassword()) + && Objects.equals(isReduceResults(), that.isReduceResults()); + } + + @Override + public int hashCode() { + return Objects.hash(isCheckpointable(), getAuthorizations(), getQuery(), getQueryString(), getBeginDate(), getEndDate(), getMaxWork(), + getBaseIteratorPriority(), getTableName(), getQueries(), getBypassAccumulo(), getAccumuloPassword(), isReduceResults()); + } } diff --git a/core/query/src/main/java/datawave/core/query/configuration/QueryData.java b/core/query/src/main/java/datawave/core/query/configuration/QueryData.java new file mode 100644 index 00000000000..564e3eabea7 --- /dev/null +++ b/core/query/src/main/java/datawave/core/query/configuration/QueryData.java @@ -0,0 +1,585 @@ +package datawave.core.query.configuration; + +import java.io.Externalizable; +import java.io.IOException; +import java.io.ObjectInput; +import java.io.ObjectOutput; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; + +import org.apache.accumulo.core.client.IteratorSetting; +import org.apache.accumulo.core.data.Key; +import org.apache.accumulo.core.data.Range; +import org.apache.commons.lang3.builder.EqualsBuilder; +import org.apache.commons.lang3.builder.HashCodeBuilder; +import org.apache.hadoop.io.Text; + +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.core.ObjectCodec; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.annotation.JsonSerialize; +import com.fasterxml.jackson.databind.deser.std.StdDeserializer; +import com.fasterxml.jackson.databind.ser.std.StdSerializer; + +/** + * Class to encapsulate all required information to run a query. + */ +public class QueryData implements ResultContext, Externalizable { + private String tableName; + private String query; + @JsonSerialize(using = RangeListSerializer.class) + @JsonDeserialize(using = RangeListDeserializer.class) + private Collection ranges = new HashSet<>(); + private Collection columnFamilies = new HashSet<>(); + @JsonDeserialize(using = IteratorSettingListDeserializer.class) + private List settings = new ArrayList<>(); + @JsonSerialize(using = KeySerializer.class) + @JsonDeserialize(using = KeyDeserializer.class) + private Key lastResult; + private boolean rebuildHashCode = true; + private int hashCode = -1; + boolean finished = false; + + public QueryData() { + // empty constructor + } + + /** + * Full constructor + * + * @param tableName + * the table name + * @param query + * the query string + * @param ranges + * a collection of ranges + * @param columnFamilies + * a collection of column families + * @param settings + * a list of IteratorSetting + */ + public QueryData(String tableName, String query, Collection ranges, Collection columnFamilies, List settings) { + this.tableName = tableName; + this.query = query; + this.ranges = ranges; + this.columnFamilies = columnFamilies; + this.settings = settings; + } + + /** + * Copy constructor + * + * @param other + * another instance of QueryData + */ + public QueryData(QueryData other) { + this.tableName = other.tableName; + this.query = other.query; + this.ranges = new HashSet<>(other.ranges); + this.columnFamilies = new HashSet<>(other.columnFamilies); + this.settings = new ArrayList<>(other.settings); + this.hashCode = other.hashCode; + this.rebuildHashCode = other.rebuildHashCode; + this.lastResult = other.lastResult; + this.finished = other.finished; + } + + @Deprecated(since = "6.5.0", forRemoval = true) + public QueryData(String tableName, String query, Collection ranges, List settings) { + setTableName(tableName); + setQuery(query); + setRanges(ranges); + setSettings(settings); + } + + /** + * Weak copy constructor that updates the ranges + * + * @param other + * another QueryData + * @param ranges + * a collection of updated ranges + * @deprecated + */ + @Deprecated(since = "6.5.0", forRemoval = true) + public QueryData(QueryData other, Collection ranges) { + this(other); + setRanges(ranges); + } + + @Deprecated(since = "6.5.0", forRemoval = true) + public QueryData(String tableName, String queryString, List ranges, List settings, Collection columnFamilies) { + this(tableName, queryString, ranges, settings); + this.columnFamilies.addAll(columnFamilies); + } + + // builder style methods + + public QueryData withTableName(String tableName) { + this.tableName = tableName; + resetHashCode(); + return this; + } + + public QueryData withQuery(String query) { + this.query = query; + resetHashCode(); + return this; + } + + public QueryData withRanges(Collection ranges) { + this.ranges = ranges; + resetHashCode(); + return this; + } + + public QueryData withColumnFamilies(Collection columnFamilies) { + this.columnFamilies = columnFamilies; + resetHashCode(); + return this; + } + + public QueryData withSettings(List settings) { + this.settings = settings; + resetHashCode(); + return this; + } + + public void setSettings(List settings) { + this.settings.clear(); + if (settings != null) { + this.settings.addAll(settings); + } + resetHashCode(); + } + + public List getSettings() { + return settings; + } + + public void setQuery(String query) { + this.query = query; + resetHashCode(); + } + + public String getQuery() { + return query; + } + + public String getTableName() { + return tableName; + } + + public void setTableName(String tableName) { + this.tableName = tableName; + resetHashCode(); + } + + public Collection getRanges() { + if (isFinished()) { + return Collections.emptySet(); + } else if (lastResult != null) { + List newRanges = new ArrayList<>(); + for (Range range : ranges) { + if (range.contains(lastResult)) { + newRanges.add(new Range(lastResult, false, range.getEndKey(), range.isEndKeyInclusive())); + } else { + newRanges.add(range); + } + } + return newRanges; + } + return ranges; + } + + public Collection getColumnFamilies() { + return columnFamilies; + } + + public void setColumnFamilies(Collection columnFamilies) { + this.columnFamilies.clear(); + if (columnFamilies != null) { + this.columnFamilies.addAll(columnFamilies); + } + resetHashCode(); + } + + public void addColumnFamily(String cf) { + this.columnFamilies.add(cf); + resetHashCode(); + } + + public void addColumnFamily(Text cf) { + this.columnFamilies.add(cf.toString()); + resetHashCode(); + } + + public void setRanges(Collection ranges) { + this.ranges.clear(); + if (null != ranges) { + this.ranges.addAll(ranges); + } + resetHashCode(); + } + + public void addRange(Range range) { + this.ranges.add(range); + resetHashCode(); + } + + public void addIterator(IteratorSetting cfg) { + this.settings.add(cfg); + resetHashCode(); + } + + public void setLastResult(Key result) { + this.lastResult = result; + if (this.lastResult == null) { + this.finished = true; + } + resetHashCode(); + } + + public boolean isFinished() { + return this.finished; + } + + public Key getLastResult() { + return lastResult; + } + + @Override + public String toString() { + // @formatter:off + return new StringBuilder() + .append("Query: '").append(this.query) + .append("', Ranges: ").append(this.ranges) + .append(", lastResult: ").append(this.lastResult) + .append(", Settings: ").append(this.settings) + .toString(); + // @formatter:on + } + + @Override + public void writeExternal(ObjectOutput out) throws IOException { + out.writeUTF(tableName); + out.writeInt(settings.size()); + for (IteratorSetting setting : settings) { + setting.write(out); + } + if (query != null) { + out.writeBoolean(true); + out.writeUTF(query); + } else { + out.writeBoolean(false); + } + out.writeInt(ranges.size()); + for (Range range : ranges) { + range.write(out); + } + out.writeInt(columnFamilies.size()); + for (String cf : columnFamilies) { + out.writeUTF(cf); + } + if (lastResult != null) { + out.writeBoolean(true); + lastResult.write(out); + } else { + out.writeBoolean(false); + } + out.writeBoolean(finished); + } + + @Override + public void readExternal(ObjectInput in) throws IOException { + tableName = in.readUTF(); + settings.clear(); + int count = in.readInt(); + for (int i = 0; i < count; i++) { + settings.add(new IteratorSetting(in)); + } + boolean exists = in.readBoolean(); + if (exists) { + query = in.readUTF(); + } + ranges.clear(); + count = in.readInt(); + for (int i = 0; i < count; i++) { + Range range = new Range(); + range.readFields(in); + ranges.add(range); + } + count = in.readInt(); + for (int i = 0; i < count; i++) { + columnFamilies.add(in.readUTF()); + } + exists = in.readBoolean(); + if (exists) { + lastResult = new Key(); + lastResult.readFields(in); + } + finished = in.readBoolean(); + } + + public QueryData(ObjectInput in) throws IOException { + readExternal(in); + } + + @Override + public int hashCode() { + if (rebuildHashCode) { + // @formatter:off + hashCode = new HashCodeBuilder() + .append(tableName) + .append(query) + .append(ranges) + .append(columnFamilies) + .append(settings) + .append(lastResult) + .append(finished) + .hashCode(); + rebuildHashCode = false; + // @formatter:on + } + return hashCode; + } + + public boolean equals(Object o) { + if (o instanceof QueryData) { + QueryData other = (QueryData) o; + // @formatter:off + return new EqualsBuilder() + .append(tableName, other.tableName) + .append(query, other.query) + .append(ranges, other.ranges) + .append(columnFamilies, other.columnFamilies) + .append(settings, other.settings) + .append(lastResult, other.lastResult) + .append(finished, other.finished) + .isEquals(); + // @formatter:on + } + return false; + } + + /** + * Method to reset the hashcode when an internal variable is updated + */ + private void resetHashCode() { + rebuildHashCode = true; + } + + /** + * A json deserializer for a list of IteratorSetting which handles the json deserialization issues. The accumulo IteratorSetting does not have a default + * constructor. + */ + public static class IteratorSettingListDeserializer extends StdDeserializer> { + private ObjectMapper mapper = new ObjectMapper(); + + public IteratorSettingListDeserializer() { + this(null); + } + + public IteratorSettingListDeserializer(Class valueClass) { + super(valueClass); + } + + @Override + public List deserialize(JsonParser parser, DeserializationContext deserializer) throws IOException, JsonProcessingException { + List list = new ArrayList<>(); + ObjectCodec codec = parser.getCodec(); + JsonNode node = codec.readTree(parser); + + for (int i = 0; i < node.size(); i++) { + list.add(getIteratorSetting(node.get(i))); + } + + return list; + } + + private IteratorSetting getIteratorSetting(JsonNode node) throws JsonProcessingException { + int priority = -1; + String name = null; + String iteratorClass = null; + Map options = null; + JsonNode child = node.get("priority"); + if (child != null) { + priority = child.asInt(); + } + child = node.get("name"); + if (child != null) { + name = child.asText(); + } + child = node.get("iteratorClass"); + if (child != null) { + iteratorClass = child.asText(); + } + child = node.get("options"); + if (child == null) { + child = node.get("properties"); + } + if (child != null) { + options = mapper.treeToValue(child, HashMap.class); + } + IteratorSetting setting = new IteratorSetting(priority, name, iteratorClass); + if (options != null) { + setting.addOptions(options); + } + return setting; + } + } + + /** + * A json deserializer for a list of Range which handles the json deserialization issues. The accumulo Range and Key classes do not have appropriate + * setters. + */ + public static class RangeListSerializer extends StdSerializer> { + private ObjectMapper mapper = new ObjectMapper(); + + public RangeListSerializer() { + this(null); + } + + public RangeListSerializer(Class> type) { + super(type); + } + + @Override + public void serialize(Collection ranges, JsonGenerator jgen, SerializerProvider provider) throws IOException { + jgen.writeStartArray(ranges == null ? 0 : ranges.size()); + if (ranges != null) { + for (Range range : ranges) { + serialize(range, jgen, provider); + } + } + jgen.writeEndArray(); + } + + public void serialize(Range range, JsonGenerator jgen, SerializerProvider provider) throws IOException { + jgen.writeStartObject(); + if (range.getStartKey() != null) { + jgen.writeFieldName("startKey"); + new KeySerializer().serialize(range.getStartKey(), jgen, provider); + } + jgen.writeBooleanField("startKeyInclusive", range.isStartKeyInclusive()); + if (range.getEndKey() != null) { + jgen.writeFieldName("endKey"); + new KeySerializer().serialize(range.getEndKey(), jgen, provider); + } + jgen.writeBooleanField("endKeyInclusive", range.isEndKeyInclusive()); + jgen.writeEndObject(); + } + } + + /** + * A json deserializer for a list of Range which handles the json deserialization issues. The accumulo Range and Key classes do not have appropriate + * setters. + */ + public static class RangeListDeserializer extends StdDeserializer> { + public RangeListDeserializer() { + this(null); + } + + public RangeListDeserializer(Class valueClass) { + super(valueClass); + } + + @Override + public Collection deserialize(JsonParser parser, DeserializationContext deserializer) throws IOException { + ObjectCodec codec = parser.getCodec(); + JsonNode node = codec.readTree(parser); + return deserialize(node); + } + + public Collection deserialize(JsonNode node) throws IOException { + Collection list = new ArrayList<>(); + for (int i = 0; i < node.size(); i++) { + list.add(getRange(node.get(i))); + } + return list; + } + + private Range getRange(JsonNode node) throws IOException { + JsonNode start = node.get("startKey"); + JsonNode startInclusive = node.get("startKeyInclusive"); + JsonNode end = node.get("endKey"); + JsonNode endInclusive = node.get("endKeyInclusive"); + return new Range(getKey(start), startInclusive.asBoolean(), getKey(end), endInclusive.asBoolean()); + } + + private Key getKey(JsonNode node) throws IOException { + return new KeyDeserializer().deserialize(node); + } + } + + /** + * A json deserializer for a list of Range which handles the json deserialization issues. The accumulo Range and Key classes do not have appropriate + * setters. + */ + public static class KeySerializer extends StdSerializer { + public KeySerializer() { + this(null); + } + + public KeySerializer(Class type) { + super(type); + } + + @Override + public void serialize(Key key, JsonGenerator jgen, SerializerProvider provider) throws IOException { + jgen.writeStartObject(); + jgen.writeBinaryField("row", key.getRowData().getBackingArray()); + jgen.writeBinaryField("cf", key.getColumnFamilyData().getBackingArray()); + jgen.writeBinaryField("cq", key.getColumnQualifierData().getBackingArray()); + jgen.writeBinaryField("cv", key.getColumnVisibility().getBytes()); + jgen.writeNumberField("ts", key.getTimestamp()); + jgen.writeBooleanField("d", key.isDeleted()); + jgen.writeEndObject(); + } + } + + /** + * A json deserializer for a list of Range which handles the json deserialization issues. The accumulo Range and Key classes do not have appropriate + * setters. + */ + public static class KeyDeserializer extends StdDeserializer { + public KeyDeserializer() { + this(null); + } + + public KeyDeserializer(Class type) { + super(type); + } + + @Override + public Key deserialize(JsonParser parser, DeserializationContext deserializer) throws IOException, JsonProcessingException { + ObjectCodec codec = parser.getCodec(); + JsonNode node = codec.readTree(parser); + return deserialize(node); + } + + public Key deserialize(JsonNode node) throws IOException { + if (node == null) { + return null; + } + JsonNode row = node.get("row"); + JsonNode cf = node.get("cf"); + JsonNode cq = node.get("cq"); + JsonNode cv = node.get("cv"); + JsonNode ts = node.get("ts"); + JsonNode d = node.get("d"); + return new Key(row.binaryValue(), cf.binaryValue(), cq.binaryValue(), cv.binaryValue(), ts.longValue(), d.booleanValue()); + } + } +} diff --git a/core/query/src/main/java/datawave/core/query/configuration/Result.java b/core/query/src/main/java/datawave/core/query/configuration/Result.java new file mode 100644 index 00000000000..37930e50c54 --- /dev/null +++ b/core/query/src/main/java/datawave/core/query/configuration/Result.java @@ -0,0 +1,109 @@ +package datawave.core.query.configuration; + +import java.util.Iterator; +import java.util.Map; + +import javax.annotation.Nullable; + +import org.apache.accumulo.core.data.Key; +import org.apache.accumulo.core.data.Value; +import org.apache.commons.lang3.builder.EqualsBuilder; +import org.apache.commons.lang3.builder.HashCodeBuilder; + +import com.google.common.base.Function; +import com.google.common.base.Predicate; +import com.google.common.collect.Iterators; +import com.google.common.collect.Maps; + +public class Result implements Map.Entry { + private final T context; + private final Key key; + private Value value; + + public Result(Key k, Value v) { + this(null, k, v); + } + + public Result(T context, Key k, Value v) { + this.context = context; + this.key = k; + this.value = v; + } + + public T getContext() { + return context; + } + + @Override + public Key getKey() { + return key; + } + + @Override + public Value getValue() { + return value; + } + + @Override + public Value setValue(Value value) { + throw new UnsupportedOperationException("This value is immutable"); + } + + @Override + public boolean equals(Object o) { + if (o instanceof Result) { + Result other = (Result) o; + return new EqualsBuilder().append(context, other.context).append(key, other.key).append(value, other.value).isEquals(); + } + return false; + } + + @Override + public int hashCode() { + return new HashCodeBuilder().append(context).append(key).append(value).toHashCode(); + } + + public Map.Entry returnKeyValue() { + Map.Entry entry = (key == null ? null : Maps.immutableEntry(key, value)); + if (context != null) { + context.setLastResult(entry == null ? null : entry.getKey()); + } + return entry; + } + + public static Iterator> keyValueIterator(Iterator it) { + return Iterators.filter(Iterators.transform(it, new Function>() { + @Override + public Map.Entry apply(@Nullable Result input) { + if (input == null) { + return null; + } + return input.returnKeyValue(); + } + }), new Predicate>() { + @Override + public boolean apply(@Nullable Map.Entry keyValueEntry) { + return keyValueEntry != null; + } + }); + } + + public static Iterator resultIterator(final ResultContext context, Iterator> it) { + return Iterators.filter(Iterators.transform(it, new Function,Result>() { + @Nullable + @Override + public Result apply(@Nullable Map.Entry keyValueEntry) { + if (keyValueEntry == null) { + return null; + } + return new Result(context, keyValueEntry.getKey(), keyValueEntry.getValue()); + } + }), new Predicate() { + + @Override + public boolean apply(@Nullable Result result) { + return result != null; + } + }); + } +} diff --git a/core/query/src/main/java/datawave/core/query/configuration/ResultContext.java b/core/query/src/main/java/datawave/core/query/configuration/ResultContext.java new file mode 100644 index 00000000000..72cc4e5ea05 --- /dev/null +++ b/core/query/src/main/java/datawave/core/query/configuration/ResultContext.java @@ -0,0 +1,15 @@ +package datawave.core.query.configuration; + +import org.apache.accumulo.core.data.Key; + +public interface ResultContext { + /** + * Set the last result returned. Setting a result of null denotes this scan is finished. + * + * @param result + * The last result + */ + void setLastResult(Key result); + + boolean isFinished(); +} diff --git a/web-services/query/src/main/java/datawave/webservice/query/dashboard/DashboardFields.java b/core/query/src/main/java/datawave/core/query/dashboard/DashboardFields.java similarity index 98% rename from web-services/query/src/main/java/datawave/webservice/query/dashboard/DashboardFields.java rename to core/query/src/main/java/datawave/core/query/dashboard/DashboardFields.java index 60a423ecf7d..fdf21e7f221 100644 --- a/web-services/query/src/main/java/datawave/webservice/query/dashboard/DashboardFields.java +++ b/core/query/src/main/java/datawave/core/query/dashboard/DashboardFields.java @@ -1,4 +1,4 @@ -package datawave.webservice.query.dashboard; +package datawave.core.query.dashboard; import java.util.Arrays; import java.util.List; diff --git a/web-services/query/src/main/java/datawave/webservice/query/dashboard/DashboardSummary.java b/core/query/src/main/java/datawave/core/query/dashboard/DashboardSummary.java similarity index 99% rename from web-services/query/src/main/java/datawave/webservice/query/dashboard/DashboardSummary.java rename to core/query/src/main/java/datawave/core/query/dashboard/DashboardSummary.java index 43490002463..ca650cde5e8 100644 --- a/web-services/query/src/main/java/datawave/webservice/query/dashboard/DashboardSummary.java +++ b/core/query/src/main/java/datawave/core/query/dashboard/DashboardSummary.java @@ -1,4 +1,4 @@ -package datawave.webservice.query.dashboard; +package datawave.core.query.dashboard; import java.util.Date; import java.util.Objects; diff --git a/web-services/query/src/main/java/datawave/webservice/query/exception/EmptyObjectException.java b/core/query/src/main/java/datawave/core/query/exception/EmptyObjectException.java similarity index 81% rename from web-services/query/src/main/java/datawave/webservice/query/exception/EmptyObjectException.java rename to core/query/src/main/java/datawave/core/query/exception/EmptyObjectException.java index a85d2bae3cf..4aba8872adf 100644 --- a/web-services/query/src/main/java/datawave/webservice/query/exception/EmptyObjectException.java +++ b/core/query/src/main/java/datawave/core/query/exception/EmptyObjectException.java @@ -1,4 +1,4 @@ -package datawave.webservice.query.exception; +package datawave.core.query.exception; // used when a transformer gets a non-null empty object // and the TransformIterator should call next instead of returning null diff --git a/web-services/query/src/main/java/datawave/webservice/query/iterator/DatawaveTransformIterator.java b/core/query/src/main/java/datawave/core/query/iterator/DatawaveTransformIterator.java similarity index 91% rename from web-services/query/src/main/java/datawave/webservice/query/iterator/DatawaveTransformIterator.java rename to core/query/src/main/java/datawave/core/query/iterator/DatawaveTransformIterator.java index 3e667c77720..05e658369ee 100644 --- a/web-services/query/src/main/java/datawave/webservice/query/iterator/DatawaveTransformIterator.java +++ b/core/query/src/main/java/datawave/core/query/iterator/DatawaveTransformIterator.java @@ -1,4 +1,4 @@ -package datawave.webservice.query.iterator; +package datawave.core.query.iterator; import java.util.Iterator; @@ -6,8 +6,8 @@ import org.apache.commons.collections4.iterators.TransformIterator; import org.apache.log4j.Logger; -import datawave.webservice.query.exception.EmptyObjectException; -import datawave.webservice.query.logic.Flushable; +import datawave.core.query.exception.EmptyObjectException; +import datawave.core.query.logic.Flushable; public class DatawaveTransformIterator extends TransformIterator { diff --git a/web-services/query/src/main/java/datawave/webservice/query/logic/AbstractQueryLogicTransformer.java b/core/query/src/main/java/datawave/core/query/logic/AbstractQueryLogicTransformer.java similarity index 93% rename from web-services/query/src/main/java/datawave/webservice/query/logic/AbstractQueryLogicTransformer.java rename to core/query/src/main/java/datawave/core/query/logic/AbstractQueryLogicTransformer.java index 132829b9280..70418539fc1 100644 --- a/web-services/query/src/main/java/datawave/webservice/query/logic/AbstractQueryLogicTransformer.java +++ b/core/query/src/main/java/datawave/core/query/logic/AbstractQueryLogicTransformer.java @@ -1,8 +1,8 @@ -package datawave.webservice.query.logic; +package datawave.core.query.logic; import java.util.List; -import datawave.webservice.query.cache.ResultsPage; +import datawave.core.query.cache.ResultsPage; import datawave.webservice.result.BaseQueryResponse; public abstract class AbstractQueryLogicTransformer implements QueryLogicTransformer { diff --git a/web-services/query/src/main/java/datawave/webservice/query/logic/BaseQueryLogic.java b/core/query/src/main/java/datawave/core/query/logic/BaseQueryLogic.java similarity index 87% rename from web-services/query/src/main/java/datawave/webservice/query/logic/BaseQueryLogic.java rename to core/query/src/main/java/datawave/core/query/logic/BaseQueryLogic.java index b2249c2eda2..652d02536bb 100644 --- a/web-services/query/src/main/java/datawave/webservice/query/logic/BaseQueryLogic.java +++ b/core/query/src/main/java/datawave/core/query/logic/BaseQueryLogic.java @@ -1,6 +1,6 @@ -package datawave.webservice.query.logic; +package datawave.core.query.logic; -import java.security.Principal; +import java.util.Collection; import java.util.Collections; import java.util.Iterator; import java.util.List; @@ -15,14 +15,14 @@ import org.springframework.beans.factory.annotation.Required; import datawave.audit.SelectorExtractor; +import datawave.core.query.configuration.GenericQueryConfiguration; +import datawave.core.query.iterator.DatawaveTransformIterator; import datawave.marking.MarkingFunctions; +import datawave.microservice.query.Query; +import datawave.security.authorization.ProxiedUserDetails; import datawave.security.authorization.UserOperations; import datawave.webservice.common.audit.Auditor.AuditType; import datawave.webservice.common.connection.AccumuloClientConfiguration; -import datawave.webservice.query.Query; -import datawave.webservice.query.configuration.GenericQueryConfiguration; -import datawave.webservice.query.exception.QueryException; -import datawave.webservice.query.iterator.DatawaveTransformIterator; import datawave.webservice.query.result.event.ResponseObjectFactory; public abstract class BaseQueryLogic implements QueryLogic { @@ -34,6 +34,7 @@ public abstract class BaseQueryLogic implements QueryLogic { private Map dnResultLimits = null; private Map systemFromResultLimits = null; protected long maxResults = -1L; + protected int maxConcurrentTasks = -1; protected ScannerBase scanner; @SuppressWarnings("unchecked") protected Iterator iterator = (Iterator) Collections.emptyList().iterator(); @@ -42,8 +43,11 @@ public abstract class BaseQueryLogic implements QueryLogic { private boolean collectQueryMetrics = true; private String _connPoolName; private Set authorizedDNs; - protected Principal principal; - protected RoleManager roleManager; + + protected ProxiedUserDetails currentUser; + protected ProxiedUserDetails serverUser; + + protected Set requiredRoles; protected MarkingFunctions markingFunctions; protected ResponseObjectFactory responseObjectFactory; protected SelectorExtractor selectorExtractor; @@ -76,9 +80,10 @@ public BaseQueryLogic(BaseQueryLogic other) { setCollectQueryMetrics(other.getCollectQueryMetrics()); this.authorizedDNs = other.authorizedDNs; setConnPoolName(other.getConnPoolName()); - setPrincipal(other.getPrincipal()); - setRoleManager(other.getRoleManager()); + setRequiredRoles(other.getRequiredRoles()); setSelectorExtractor(other.getSelectorExtractor()); + setCurrentUser(other.getCurrentUser()); + setServerUser(other.getServerUser()); setResponseEnricherBuilder(other.getResponseEnricherBuilder()); } @@ -113,12 +118,28 @@ public void setResponseObjectFactory(ResponseObjectFactory responseObjectFactory this.responseObjectFactory = responseObjectFactory; } - public Principal getPrincipal() { - return principal; + public ProxiedUserDetails getCurrentUser() { + return currentUser; + } + + public void setCurrentUser(ProxiedUserDetails currentUser) { + this.currentUser = currentUser; + } + + public ProxiedUserDetails getServerUser() { + return serverUser; } - public void setPrincipal(Principal principal) { - this.principal = principal; + public void setServerUser(ProxiedUserDetails serverUser) { + this.serverUser = serverUser; + } + + public Set getRequiredRoles() { + return requiredRoles; + } + + public void setRequiredRoles(Set requiredRoles) { + this.requiredRoles = requiredRoles; } @Override @@ -131,6 +152,11 @@ public long getMaxResults() { return this.maxResults; } + @Override + public int getMaxConcurrentTasks() { + return this.maxConcurrentTasks; + } + @Override @Deprecated public long getMaxRowsToScan() { @@ -152,6 +178,11 @@ public void setMaxResults(long maxResults) { this.maxResults = maxResults; } + @Override + public void setMaxConcurrentTasks(int maxConcurrentTasks) { + this.maxConcurrentTasks = maxConcurrentTasks; + } + @Override @Deprecated public void setMaxRowsToScan(long maxRowsToScan) { @@ -207,7 +238,8 @@ public final QueryLogicTransformer getEnrichedTransformer(Query settings) { .withConfig(getConfig()) .withMarkingFunctions(getMarkingFunctions()) .withResponseObjectFactory(responseObjectFactory) - .withPrincipal(getPrincipal()) + .withCurrentUser(getCurrentUser()) + .withServerUser(getServerUser()) .build(); //@formatter:on transformer.setResponseEnricher(enricher); @@ -295,14 +327,6 @@ public void setCollectQueryMetrics(boolean collectQueryMetrics) { this.collectQueryMetrics = collectQueryMetrics; } - public RoleManager getRoleManager() { - return roleManager; - } - - public void setRoleManager(RoleManager roleManager) { - this.roleManager = roleManager; - } - /** {@inheritDoc} */ @Override public String getConnPoolName() { @@ -315,13 +339,9 @@ public void setConnPoolName(final String connPoolName) { _connPoolName = connPoolName; } - public boolean canRunQuery() { - return this.canRunQuery(this.getPrincipal()); - } - /** {@inheritDoc} */ - public boolean canRunQuery(Principal principal) { - return this.roleManager == null || this.roleManager.canRunQuery(this, principal); + public boolean canRunQuery(Collection userRoles) { + return this.requiredRoles == null || userRoles.containsAll(requiredRoles); } @Override diff --git a/web-services/query/src/main/java/datawave/webservice/query/logic/BaseQueryLogicTransformer.java b/core/query/src/main/java/datawave/core/query/logic/BaseQueryLogicTransformer.java similarity index 92% rename from web-services/query/src/main/java/datawave/webservice/query/logic/BaseQueryLogicTransformer.java rename to core/query/src/main/java/datawave/core/query/logic/BaseQueryLogicTransformer.java index b552892f20a..d7928802e41 100644 --- a/web-services/query/src/main/java/datawave/webservice/query/logic/BaseQueryLogicTransformer.java +++ b/core/query/src/main/java/datawave/core/query/logic/BaseQueryLogicTransformer.java @@ -1,4 +1,4 @@ -package datawave.webservice.query.logic; +package datawave.core.query.logic; import datawave.marking.MarkingFunctions; diff --git a/core/query/src/main/java/datawave/core/query/logic/CheckpointableQueryLogic.java b/core/query/src/main/java/datawave/core/query/logic/CheckpointableQueryLogic.java new file mode 100644 index 00000000000..3fefb160178 --- /dev/null +++ b/core/query/src/main/java/datawave/core/query/logic/CheckpointableQueryLogic.java @@ -0,0 +1,60 @@ +package datawave.core.query.logic; + +import java.util.List; + +import org.apache.accumulo.core.client.AccumuloClient; + +import datawave.core.query.configuration.GenericQueryConfiguration; + +public interface CheckpointableQueryLogic { + + /** + * This will allow us to check if a query logic is actually checkpointable. Even if the query logic supports it, the caller may have to tell the query logic + * that it is going to be checkpointed. + * + * @return true if checkpointable + */ + boolean isCheckpointable(); + + /** + * This will tell the query logic that is is going to be checkpointed. + * + * @param checkpointable + * true if this query logic is to be trated as checkpointable + */ + void setCheckpointable(boolean checkpointable); + + /** + * This can be called at any point to get a checkpoint such that this query logic instance can be torn down to be rebuilt later. + * + * @param queryKey + * - the query key to include in the checkpoint + * @return The query checkpoints + */ + List checkpoint(QueryKey queryKey); + + /** + * This can be called at any point to update a checkpoint with its updated state. This will be called periodically while pulling results for a query task + * handling a previously returned checkpoint. + * + * @param checkpoint + * @return The updated checkpoint + */ + QueryCheckpoint updateCheckpoint(QueryCheckpoint checkpoint); + + /** + * Implementations use the configuration to setup execution of a portion of their query. getTransformIterator should be used to get the partial results if + * any. + * + * @param client + * - The accumulo connector + * @param config + * - The query configuration + * @param checkpoint + * - the checkpoint + * @throws Exception + * on failure + */ + void setupQuery(AccumuloClient client, GenericQueryConfiguration config, QueryCheckpoint checkpoint) throws Exception; + +} diff --git a/web-services/query/src/main/java/datawave/webservice/query/logic/DelegatingQueryLogic.java b/core/query/src/main/java/datawave/core/query/logic/DelegatingQueryLogic.java similarity index 88% rename from web-services/query/src/main/java/datawave/webservice/query/logic/DelegatingQueryLogic.java rename to core/query/src/main/java/datawave/core/query/logic/DelegatingQueryLogic.java index 14fb79bcc0f..4a32a3324c2 100644 --- a/web-services/query/src/main/java/datawave/webservice/query/logic/DelegatingQueryLogic.java +++ b/core/query/src/main/java/datawave/core/query/logic/DelegatingQueryLogic.java @@ -1,6 +1,5 @@ -package datawave.webservice.query.logic; +package datawave.core.query.logic; -import java.security.Principal; import java.util.Collection; import java.util.Iterator; import java.util.List; @@ -12,13 +11,14 @@ import org.apache.commons.collections4.iterators.TransformIterator; import datawave.audit.SelectorExtractor; +import datawave.core.common.connection.AccumuloConnectionFactory; +import datawave.core.query.configuration.GenericQueryConfiguration; import datawave.marking.MarkingFunctions; +import datawave.microservice.query.Query; +import datawave.security.authorization.ProxiedUserDetails; import datawave.security.authorization.UserOperations; import datawave.webservice.common.audit.Auditor; import datawave.webservice.common.connection.AccumuloClientConfiguration; -import datawave.webservice.common.connection.AccumuloConnectionFactory; -import datawave.webservice.query.Query; -import datawave.webservice.query.configuration.GenericQueryConfiguration; import datawave.webservice.query.exception.QueryException; import datawave.webservice.query.result.event.ResponseObjectFactory; @@ -228,16 +228,6 @@ public void setCollectQueryMetrics(boolean collectQueryMetrics) { delegate.setCollectQueryMetrics(collectQueryMetrics); } - @Override - public void setRoleManager(RoleManager roleManager) { - delegate.setRoleManager(roleManager); - } - - @Override - public RoleManager getRoleManager() { - return delegate.getRoleManager(); - } - @Override public Set getOptionalQueryParameters() { return delegate.getOptionalQueryParameters(); @@ -253,26 +243,6 @@ public String getConnPoolName() { return delegate.getConnPoolName(); } - @Override - public boolean canRunQuery(Principal principal) { - return delegate.canRunQuery(principal); - } - - @Override - public boolean canRunQuery() { - return delegate.canRunQuery(); - } - - @Override - public void setPrincipal(Principal principal) { - delegate.setPrincipal(principal); - } - - @Override - public Principal getPrincipal() { - return delegate.getPrincipal(); - } - @Override public MarkingFunctions getMarkingFunctions() { return delegate.getMarkingFunctions(); @@ -353,6 +323,51 @@ public void validate(Map> parameters) throws IllegalArgument delegate.validate(parameters); } + @Override + public int getMaxConcurrentTasks() { + return delegate.getMaxConcurrentTasks(); + } + + @Override + public void setMaxConcurrentTasks(int maxConcurrentTasks) { + delegate.setMaxConcurrentTasks(maxConcurrentTasks); + } + + @Override + public boolean canRunQuery(Collection userRoles) { + return delegate.canRunQuery(userRoles); + } + + @Override + public void setRequiredRoles(Set requiredRoles) { + delegate.setRequiredRoles(requiredRoles); + } + + @Override + public Set getRequiredRoles() { + return delegate.getRequiredRoles(); + } + + @Override + public ProxiedUserDetails getCurrentUser() { + return delegate.getCurrentUser(); + } + + @Override + public void setCurrentUser(ProxiedUserDetails currentUser) { + delegate.setCurrentUser(currentUser); + } + + @Override + public ProxiedUserDetails getServerUser() { + return delegate.getServerUser(); + } + + @Override + public void setServerUser(ProxiedUserDetails serverUser) { + delegate.setServerUser(serverUser); + } + @Override public UserOperations getUserOperations() { return delegate.getUserOperations(); diff --git a/web-services/query/src/main/java/datawave/webservice/query/logic/Flushable.java b/core/query/src/main/java/datawave/core/query/logic/Flushable.java similarity index 84% rename from web-services/query/src/main/java/datawave/webservice/query/logic/Flushable.java rename to core/query/src/main/java/datawave/core/query/logic/Flushable.java index 0f1ce5f216f..9b803ed0f29 100644 --- a/web-services/query/src/main/java/datawave/webservice/query/logic/Flushable.java +++ b/core/query/src/main/java/datawave/core/query/logic/Flushable.java @@ -1,6 +1,6 @@ -package datawave.webservice.query.logic; +package datawave.core.query.logic; -import datawave.webservice.query.exception.EmptyObjectException; +import datawave.core.query.exception.EmptyObjectException; public interface Flushable { diff --git a/core/query/src/main/java/datawave/core/query/logic/QueryCheckpoint.java b/core/query/src/main/java/datawave/core/query/logic/QueryCheckpoint.java new file mode 100644 index 00000000000..7322fbb1fd9 --- /dev/null +++ b/core/query/src/main/java/datawave/core/query/logic/QueryCheckpoint.java @@ -0,0 +1,80 @@ +package datawave.core.query.logic; + +import java.io.Serializable; +import java.util.ArrayList; +import java.util.Collection; + +import org.apache.commons.lang3.builder.EqualsBuilder; +import org.apache.commons.lang3.builder.HashCodeBuilder; + +import datawave.core.query.configuration.QueryData; + +/** + * A query checkpoint will be very different depending on the query logic. It is expected that whatever the query state is can be encoded in a map of + * properties. + */ +public class QueryCheckpoint implements Serializable { + private static final long serialVersionUID = -9201879510622137934L; + + private final QueryKey queryKey; + private final Collection queries; + + public QueryCheckpoint(String queryPool, String queryId, String queryLogic, Collection queries) { + this(new QueryKey(queryPool, queryId, queryLogic), queries); + } + + public QueryCheckpoint(QueryKey queryKey) { + this.queryKey = queryKey; + this.queries = null; + } + + public QueryCheckpoint(QueryKey queryKey, Collection queries) { + this.queryKey = queryKey; + this.queries = queries; + } + + public QueryCheckpoint(QueryCheckpoint checkpoint) { + this.queryKey = new QueryKey(checkpoint.queryKey.toString()); + this.queries = new ArrayList<>(checkpoint.queries.size()); + for (QueryData query : checkpoint.queries) { + this.queries.add(new QueryData(query)); + } + } + + /** + * Get the query key + * + * @return the query key + */ + public QueryKey getQueryKey() { + return queryKey; + } + + /** + * Get the QueryData objects representing the state of the query. + * + * @return The QueryData objects representing the query checkpoint + */ + public Collection getQueries() { + return queries; + } + + @Override + public String toString() { + return getQueryKey() + ": " + getQueries(); + } + + @Override + public boolean equals(Object o) { + if (o instanceof QueryCheckpoint) { + QueryCheckpoint other = (QueryCheckpoint) o; + return new EqualsBuilder().append(getQueryKey(), other.getQueryKey()).append(getQueries(), other.getQueries()).isEquals(); + } + return false; + } + + @Override + public int hashCode() { + return new HashCodeBuilder().append(getQueryKey()).append(getQueries()).toHashCode(); + } +} diff --git a/core/query/src/main/java/datawave/core/query/logic/QueryKey.java b/core/query/src/main/java/datawave/core/query/logic/QueryKey.java new file mode 100644 index 00000000000..c2e5e199f98 --- /dev/null +++ b/core/query/src/main/java/datawave/core/query/logic/QueryKey.java @@ -0,0 +1,104 @@ +package datawave.core.query.logic; + +import java.io.Serializable; + +import org.apache.commons.lang3.StringUtils; +import org.apache.commons.lang3.builder.EqualsBuilder; +import org.apache.commons.lang3.builder.HashCodeBuilder; + +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonProperty; + +public class QueryKey implements Serializable { + private static final long serialVersionUID = -2589618312956104322L; + + public static final String QUERY_ID_PREFIX = "Q-"; + public static final String POOL_PREFIX = "P-"; + public static final String LOGIC_PREFIX = "L-"; + + @JsonProperty + private String queryPool; + @JsonProperty + private String queryId; + @JsonProperty + private String queryLogic; + + /** + * Default constructor for deserialization + */ + public QueryKey() {} + + /** + * This method id to allow deserialization of the toKey() or toString() value used when this is in a map + * + * @param value + * The toString() from a task key + */ + public QueryKey(String value) { + String[] parts = StringUtils.split(value, '.'); + for (String part : parts) { + setPart(part); + } + } + + protected void setPart(String part) { + if (part.startsWith(QUERY_ID_PREFIX)) { + queryId = part.substring(QUERY_ID_PREFIX.length()); + } else if (part.startsWith(POOL_PREFIX)) { + queryPool = part.substring(POOL_PREFIX.length()); + } else if (part.startsWith(LOGIC_PREFIX)) { + queryLogic = part.substring(LOGIC_PREFIX.length()); + } + } + + @JsonCreator + public QueryKey(@JsonProperty("queryPool") String queryPool, @JsonProperty("queryId") String queryId, @JsonProperty("queryLogic") String queryLogic) { + this.queryPool = queryPool; + this.queryId = queryId; + this.queryLogic = queryLogic; + } + + public String getQueryPool() { + return queryPool; + } + + public String getQueryId() { + return queryId; + } + + public String getQueryLogic() { + return queryLogic; + } + + public static String toUUIDKey(String queryId) { + return QUERY_ID_PREFIX + queryId; + } + + public String toUUIDKey() { + return toUUIDKey(queryId); + } + + public String toKey() { + return toUUIDKey() + '.' + POOL_PREFIX + queryPool + '.' + LOGIC_PREFIX + queryLogic; + } + + @Override + public String toString() { + return toKey(); + } + + @Override + public boolean equals(Object o) { + if (o instanceof QueryKey) { + QueryKey other = (QueryKey) o; + return new EqualsBuilder().append(getQueryPool(), other.getQueryPool()).append(getQueryId(), other.getQueryId()) + .append(getQueryLogic(), other.getQueryLogic()).isEquals(); + } + return false; + } + + @Override + public int hashCode() { + return new HashCodeBuilder().append(getQueryPool()).append(getQueryId()).append(getQueryLogic()).toHashCode(); + } +} diff --git a/web-services/query/src/main/java/datawave/webservice/query/logic/QueryLogic.java b/core/query/src/main/java/datawave/core/query/logic/QueryLogic.java similarity index 91% rename from web-services/query/src/main/java/datawave/webservice/query/logic/QueryLogic.java rename to core/query/src/main/java/datawave/core/query/logic/QueryLogic.java index a6b2e353366..a85ddfb3f29 100644 --- a/web-services/query/src/main/java/datawave/webservice/query/logic/QueryLogic.java +++ b/core/query/src/main/java/datawave/core/query/logic/QueryLogic.java @@ -1,6 +1,5 @@ -package datawave.webservice.query.logic; +package datawave.core.query.logic; -import java.security.Principal; import java.util.Collection; import java.util.List; import java.util.Map; @@ -11,16 +10,16 @@ import org.apache.commons.collections4.iterators.TransformIterator; import datawave.audit.SelectorExtractor; +import datawave.core.common.connection.AccumuloConnectionFactory; +import datawave.core.query.cache.ResultsPage; +import datawave.core.query.configuration.GenericQueryConfiguration; import datawave.marking.MarkingFunctions; +import datawave.microservice.query.Query; +import datawave.security.authorization.ProxiedUserDetails; import datawave.security.authorization.UserOperations; import datawave.validation.ParameterValidator; import datawave.webservice.common.audit.Auditor.AuditType; import datawave.webservice.common.connection.AccumuloClientConfiguration; -import datawave.webservice.common.connection.AccumuloConnectionFactory; -import datawave.webservice.query.Query; -import datawave.webservice.query.QueryImpl; -import datawave.webservice.query.cache.ResultsPage; -import datawave.webservice.query.configuration.GenericQueryConfiguration; import datawave.webservice.query.exception.DatawaveErrorCode; import datawave.webservice.query.exception.QueryException; import datawave.webservice.query.result.event.ResponseObjectFactory; @@ -30,7 +29,7 @@ public interface QueryLogic extends Iterable, Cloneable, ParameterValidato /** * A mechanism to get the normalized query without actually setting up the query. This can be called with having to call initialize. - * + *

* The default implementation is to return the query string as the normalized query * * @param client @@ -66,7 +65,6 @@ String getPlan(AccumuloClient client, Query settings, Set runtim GenericQueryConfiguration initialize(AccumuloClient client, Query settings, Set runtimeQueryAuthorizations) throws Exception; /** - * * @param settings * - query settings (query, begin date, end date, etc.) * @return list of selectors used in the Query @@ -106,6 +104,10 @@ String getPlan(AccumuloClient client, Query settings, Set runtim QueryLogicTransformer getEnrichedTransformer(Query settings); + default ResultPostprocessor getResultPostprocessor(GenericQueryConfiguration config) { + return new ResultPostprocessor.IdentityResultPostprocessor(); + } + default String getResponseClass(Query query) throws QueryException { try { QueryLogicTransformer t = this.getEnrichedTransformer(query); @@ -137,7 +139,9 @@ default String getResponseClass(Query query) throws QueryException { */ void close(); - /** @return the tableName */ + /** + * @return the tableName + */ String getTableName(); /** @@ -145,6 +149,11 @@ default String getResponseClass(Query query) throws QueryException { */ long getMaxResults(); + /** + * @return max number of concurrent tasks to run for this query + */ + int getMaxConcurrentTasks(); + /** * @return the results of getMaxWork */ @@ -185,6 +194,12 @@ default String getResponseClass(Query query) throws QueryException { */ void setMaxResults(long maxResults); + /** + * @param maxConcurrentTasks + * max number of concurrent tasks to run for this query + */ + void setMaxConcurrentTasks(int maxConcurrentTasks); + /** * @param maxRowsToScan * This is now deprecated and setMaxWork should be used instead. This is equivalent to setMaxWork. @@ -269,10 +284,6 @@ default String getResponseClass(Query query) throws QueryException { */ void setCollectQueryMetrics(boolean collectQueryMetrics); - void setRoleManager(RoleManager roleManager); - - RoleManager getRoleManager(); - /** * List of parameters that can be used in the 'params' parameter to Query/create * @@ -286,23 +297,23 @@ default String getResponseClass(Query query) throws QueryException { */ void setConnPoolName(String connPoolName); - /** @return the connPoolName */ + /** + * @return the connPoolName + */ String getConnPoolName(); /** - * Check that the user has one of the required roles principal my be null when there is no intent to control access to QueryLogic + * Check that the user has one of the required roles. userRoles may be null when there is no intent to control access to QueryLogic * - * @param principal - * the principal + * @param userRoles + * The user's roles * @return true/false */ - boolean canRunQuery(Principal principal); - - boolean canRunQuery(); // uses member Principal + boolean canRunQuery(Collection userRoles); - void setPrincipal(Principal principal); + void setRequiredRoles(Set requiredRoles); - Principal getPrincipal(); + Set getRequiredRoles(); MarkingFunctions getMarkingFunctions(); @@ -461,4 +472,13 @@ default void preInitialize(Query settings, Set userAuthorization * @return client configuration */ AccumuloClientConfiguration getClientConfig(); + + ProxiedUserDetails getCurrentUser(); + + void setCurrentUser(ProxiedUserDetails currentUser); + + ProxiedUserDetails getServerUser(); + + void setServerUser(ProxiedUserDetails serverUser); + } diff --git a/core/query/src/main/java/datawave/core/query/logic/QueryLogicFactory.java b/core/query/src/main/java/datawave/core/query/logic/QueryLogicFactory.java new file mode 100644 index 00000000000..d2b0fa3031e --- /dev/null +++ b/core/query/src/main/java/datawave/core/query/logic/QueryLogicFactory.java @@ -0,0 +1,29 @@ +package datawave.core.query.logic; + +import java.util.List; + +import datawave.security.authorization.ProxiedUserDetails; +import datawave.webservice.query.exception.QueryException; + +public interface QueryLogicFactory { + + /** + * + * @param name + * name of query logic + * @param currentUser + * the current user + * @return new instance of QueryLogic class + * @throws IllegalArgumentException + * if query logic name does not exist + * @throws QueryException + * if query not available for user's roles + * @throws CloneNotSupportedException + * if the query logic object failed to clone + */ + QueryLogic getQueryLogic(String name, ProxiedUserDetails currentUser) throws QueryException, IllegalArgumentException, CloneNotSupportedException; + + QueryLogic getQueryLogic(String name) throws QueryException, IllegalArgumentException, CloneNotSupportedException; + + List> getQueryLogicList(); +} diff --git a/web-services/query/src/main/java/datawave/webservice/query/logic/QueryLogicTransformer.java b/core/query/src/main/java/datawave/core/query/logic/QueryLogicTransformer.java similarity index 86% rename from web-services/query/src/main/java/datawave/webservice/query/logic/QueryLogicTransformer.java rename to core/query/src/main/java/datawave/core/query/logic/QueryLogicTransformer.java index 7e5c36f2c94..2ffed854293 100644 --- a/web-services/query/src/main/java/datawave/webservice/query/logic/QueryLogicTransformer.java +++ b/core/query/src/main/java/datawave/core/query/logic/QueryLogicTransformer.java @@ -1,9 +1,9 @@ -package datawave.webservice.query.logic; +package datawave.core.query.logic; import org.apache.commons.collections4.Transformer; -import datawave.webservice.query.cache.ResultsPage; -import datawave.webservice.query.exception.EmptyObjectException; +import datawave.core.query.cache.ResultsPage; +import datawave.core.query.exception.EmptyObjectException; import datawave.webservice.result.BaseQueryResponse; public interface QueryLogicTransformer extends Transformer { diff --git a/web-services/query/src/main/java/datawave/webservice/query/logic/ResponseEnricher.java b/core/query/src/main/java/datawave/core/query/logic/ResponseEnricher.java similarity index 79% rename from web-services/query/src/main/java/datawave/webservice/query/logic/ResponseEnricher.java rename to core/query/src/main/java/datawave/core/query/logic/ResponseEnricher.java index f79abab728b..5d39b014929 100644 --- a/web-services/query/src/main/java/datawave/webservice/query/logic/ResponseEnricher.java +++ b/core/query/src/main/java/datawave/core/query/logic/ResponseEnricher.java @@ -1,4 +1,4 @@ -package datawave.webservice.query.logic; +package datawave.core.query.logic; import datawave.webservice.result.BaseQueryResponse; diff --git a/web-services/query/src/main/java/datawave/webservice/query/logic/ResponseEnricherBuilder.java b/core/query/src/main/java/datawave/core/query/logic/ResponseEnricherBuilder.java similarity index 59% rename from web-services/query/src/main/java/datawave/webservice/query/logic/ResponseEnricherBuilder.java rename to core/query/src/main/java/datawave/core/query/logic/ResponseEnricherBuilder.java index bb9a0407346..8983a9b0d48 100644 --- a/web-services/query/src/main/java/datawave/webservice/query/logic/ResponseEnricherBuilder.java +++ b/core/query/src/main/java/datawave/core/query/logic/ResponseEnricherBuilder.java @@ -1,9 +1,8 @@ -package datawave.webservice.query.logic; - -import java.security.Principal; +package datawave.core.query.logic; +import datawave.core.query.configuration.GenericQueryConfiguration; import datawave.marking.MarkingFunctions; -import datawave.webservice.query.configuration.GenericQueryConfiguration; +import datawave.security.authorization.ProxiedUserDetails; import datawave.webservice.query.result.event.ResponseObjectFactory; public interface ResponseEnricherBuilder { @@ -13,7 +12,9 @@ public interface ResponseEnricherBuilder { public ResponseEnricherBuilder withResponseObjectFactory(ResponseObjectFactory factory); - public ResponseEnricherBuilder withPrincipal(Principal principal); + public ResponseEnricherBuilder withCurrentUser(ProxiedUserDetails user); + + public ResponseEnricherBuilder withServerUser(ProxiedUserDetails user); public ResponseEnricher build(); } diff --git a/core/query/src/main/java/datawave/core/query/logic/ResultPostprocessor.java b/core/query/src/main/java/datawave/core/query/logic/ResultPostprocessor.java new file mode 100644 index 00000000000..a68620aa619 --- /dev/null +++ b/core/query/src/main/java/datawave/core/query/logic/ResultPostprocessor.java @@ -0,0 +1,24 @@ +package datawave.core.query.logic; + +import java.util.List; + +/** + * Result Postprocessors are needed by the query microservices for certain query logics which need their results manipulated in some way. An example would be + * the CountingShardQueryLogic, which needs its events combined into a single event representing the final count for the query. Other query logics may have + * other uses for postprocessing aside from reducing/combining results. + */ +public interface ResultPostprocessor { + /** + * The apply method is called each time a result is added to the list. + * + * @param results + * The results to be returned to the user + */ + void apply(List results); + + class IdentityResultPostprocessor implements ResultPostprocessor { + public void apply(List results) { + // do nothing + } + } +} diff --git a/core/query/src/main/java/datawave/core/query/logic/WritesQueryMetrics.java b/core/query/src/main/java/datawave/core/query/logic/WritesQueryMetrics.java new file mode 100644 index 00000000000..d8ecf4a2313 --- /dev/null +++ b/core/query/src/main/java/datawave/core/query/logic/WritesQueryMetrics.java @@ -0,0 +1,24 @@ +package datawave.core.query.logic; + +import datawave.microservice.querymetric.BaseQueryMetric; + +public interface WritesQueryMetrics { + + void writeQueryMetrics(BaseQueryMetric metric); + + public boolean hasMetrics(); + + public long getSourceCount(); + + public long getNextCount(); + + public long getSeekCount(); + + public long getYieldCount(); + + public long getDocRanges(); + + public long getFiRanges(); + + public void resetMetrics(); +} diff --git a/web-services/query/src/main/java/datawave/webservice/query/logic/WritesResultCardinalities.java b/core/query/src/main/java/datawave/core/query/logic/WritesResultCardinalities.java similarity index 67% rename from web-services/query/src/main/java/datawave/webservice/query/logic/WritesResultCardinalities.java rename to core/query/src/main/java/datawave/core/query/logic/WritesResultCardinalities.java index 6d990992a1c..57bb4388ad6 100644 --- a/web-services/query/src/main/java/datawave/webservice/query/logic/WritesResultCardinalities.java +++ b/core/query/src/main/java/datawave/core/query/logic/WritesResultCardinalities.java @@ -1,4 +1,4 @@ -package datawave.webservice.query.logic; +package datawave.core.query.logic; public interface WritesResultCardinalities { diff --git a/web-services/query/src/main/java/datawave/webservice/query/logic/composite/CompositeLogicException.java b/core/query/src/main/java/datawave/core/query/logic/composite/CompositeLogicException.java similarity index 85% rename from web-services/query/src/main/java/datawave/webservice/query/logic/composite/CompositeLogicException.java rename to core/query/src/main/java/datawave/core/query/logic/composite/CompositeLogicException.java index c57abd66a55..9d8b153ac8b 100644 --- a/web-services/query/src/main/java/datawave/webservice/query/logic/composite/CompositeLogicException.java +++ b/core/query/src/main/java/datawave/core/query/logic/composite/CompositeLogicException.java @@ -1,4 +1,4 @@ -package datawave.webservice.query.logic.composite; +package datawave.core.query.logic.composite; import java.util.Collection; import java.util.Collections; @@ -14,7 +14,7 @@ public CompositeLogicException(String message, String logicName, Exception excep public CompositeLogicException(String message, Map exceptions) { super(getMessage(message, exceptions), getQueryException(exceptions.values())); if (exceptions.size() > 1) { - exceptions.values().stream().forEach(e -> addSuppressed(e)); + exceptions.values().forEach(this::addSuppressed); } } @@ -44,7 +44,7 @@ private static boolean isQueryException(Exception e) { private static String getMessage(String message, Map exceptions) { StringBuilder builder = new StringBuilder(); builder.append(message).append(":"); - exceptions.entrySet().stream().forEach(e -> builder.append('\n').append(e.getKey()).append(": ").append(e.getValue().getMessage())); + exceptions.forEach((key, value) -> builder.append('\n').append(key).append(": ").append(value.getMessage())); return builder.toString(); } } diff --git a/core/query/src/main/java/datawave/core/query/logic/composite/CompositeQueryCheckpoint.java b/core/query/src/main/java/datawave/core/query/logic/composite/CompositeQueryCheckpoint.java new file mode 100644 index 00000000000..623248635cd --- /dev/null +++ b/core/query/src/main/java/datawave/core/query/logic/composite/CompositeQueryCheckpoint.java @@ -0,0 +1,22 @@ +package datawave.core.query.logic.composite; + +import java.io.Serializable; + +import datawave.core.query.logic.QueryCheckpoint; + +public class CompositeQueryCheckpoint extends QueryCheckpoint implements Serializable { + protected String delegateQueryLogic; + + public CompositeQueryCheckpoint(String delegateQueryLogic, QueryCheckpoint checkpoint) { + super(checkpoint); + this.delegateQueryLogic = delegateQueryLogic; + } + + public String getDelegateQueryLogic() { + return delegateQueryLogic; + } + + public void setDelegateQueryLogic(String delegateQueryLogic) { + this.delegateQueryLogic = delegateQueryLogic; + } +} diff --git a/web-services/query/src/main/java/datawave/webservice/query/logic/composite/CompositeQueryConfiguration.java b/core/query/src/main/java/datawave/core/query/logic/composite/CompositeQueryConfiguration.java similarity index 81% rename from web-services/query/src/main/java/datawave/webservice/query/logic/composite/CompositeQueryConfiguration.java rename to core/query/src/main/java/datawave/core/query/logic/composite/CompositeQueryConfiguration.java index e7bf6cc3a98..3931adc67a5 100644 --- a/web-services/query/src/main/java/datawave/webservice/query/logic/composite/CompositeQueryConfiguration.java +++ b/core/query/src/main/java/datawave/core/query/logic/composite/CompositeQueryConfiguration.java @@ -1,14 +1,16 @@ -package datawave.webservice.query.logic.composite; +package datawave.core.query.logic.composite; import java.io.Serializable; +import java.util.HashMap; +import java.util.Map; -import datawave.webservice.query.Query; -import datawave.webservice.query.QueryImpl; -import datawave.webservice.query.configuration.GenericQueryConfiguration; +import datawave.core.query.configuration.GenericQueryConfiguration; +import datawave.microservice.query.Query; +import datawave.microservice.query.QueryImpl; public class CompositeQueryConfiguration extends GenericQueryConfiguration implements Serializable { - private Query query = null; + private Map configs = new HashMap<>(); // Specifies whether all queries must succeed initialization private boolean allMustInitialize = false; @@ -18,7 +20,7 @@ public class CompositeQueryConfiguration extends GenericQueryConfiguration imple public CompositeQueryConfiguration() { super(); - query = new QueryImpl(); + setQuery(new QueryImpl()); } /** @@ -82,12 +84,16 @@ public static CompositeQueryConfiguration create(CompositeQueryLogic compositeQu return config; } - public Query getQuery() { - return query; + public GenericQueryConfiguration getConfig(String logicName) { + return configs != null ? configs.get(logicName) : null; } - public void setQuery(Query query) { - this.query = query; + public Map getConfigs() { + return configs; + } + + public void setConfigs(Map configs) { + this.configs = configs; } public boolean isAllMustInitialize() { diff --git a/web-services/query/src/main/java/datawave/webservice/query/logic/composite/CompositeQueryLogic.java b/core/query/src/main/java/datawave/core/query/logic/composite/CompositeQueryLogic.java similarity index 77% rename from web-services/query/src/main/java/datawave/webservice/query/logic/composite/CompositeQueryLogic.java rename to core/query/src/main/java/datawave/core/query/logic/composite/CompositeQueryLogic.java index 4d69f57785a..ad2d5dc0430 100644 --- a/web-services/query/src/main/java/datawave/webservice/query/logic/composite/CompositeQueryLogic.java +++ b/core/query/src/main/java/datawave/core/query/logic/composite/CompositeQueryLogic.java @@ -1,6 +1,5 @@ -package datawave.webservice.query.logic.composite; +package datawave.core.query.logic.composite; -import java.security.Principal; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; @@ -22,22 +21,25 @@ import org.apache.log4j.Logger; import com.google.common.base.Joiner; +import com.google.common.collect.Iterables; import datawave.audit.SelectorExtractor; +import datawave.core.common.connection.AccumuloConnectionFactory.Priority; +import datawave.core.query.cache.ResultsPage; +import datawave.core.query.configuration.GenericQueryConfiguration; +import datawave.core.query.exception.EmptyObjectException; +import datawave.core.query.logic.BaseQueryLogic; +import datawave.core.query.logic.CheckpointableQueryLogic; +import datawave.core.query.logic.QueryCheckpoint; +import datawave.core.query.logic.QueryKey; +import datawave.core.query.logic.QueryLogic; +import datawave.core.query.logic.QueryLogicTransformer; +import datawave.core.query.logic.filtered.FilteredQueryLogic; +import datawave.microservice.authorization.util.AuthorizationsUtil; +import datawave.microservice.query.Query; import datawave.security.authorization.AuthorizationException; -import datawave.security.authorization.DatawavePrincipal; +import datawave.security.authorization.ProxiedUserDetails; import datawave.security.authorization.UserOperations; -import datawave.security.util.WSAuthorizationsUtil; -import datawave.webservice.common.connection.AccumuloConnectionFactory.Priority; -import datawave.webservice.query.Query; -import datawave.webservice.query.cache.ResultsPage; -import datawave.webservice.query.configuration.GenericQueryConfiguration; -import datawave.webservice.query.exception.EmptyObjectException; -import datawave.webservice.query.exception.QueryException; -import datawave.webservice.query.logic.BaseQueryLogic; -import datawave.webservice.query.logic.QueryLogic; -import datawave.webservice.query.logic.QueryLogicTransformer; -import datawave.webservice.query.logic.filtered.FilteredQueryLogic; import datawave.webservice.query.result.event.EventBase; import datawave.webservice.result.BaseResponse; @@ -47,10 +49,9 @@ * object. If configured to run sequentially, then the execution will terminate after the first query that returns results. Query logics will be sorted by their * configured name. */ -public class CompositeQueryLogic extends BaseQueryLogic { +public class CompositeQueryLogic extends BaseQueryLogic implements CheckpointableQueryLogic { private class QueryLogicHolder extends Thread { - private GenericQueryConfiguration config; private String logicName; private QueryLogic logic; private TransformIterator transformIterator; @@ -86,14 +87,6 @@ public void setLogic(QueryLogic logic) { this.logic = logic; } - public GenericQueryConfiguration getConfig() { - return config; - } - - public void setConfig(GenericQueryConfiguration config) { - this.config = config; - } - public void setTransformIterator(TransformIterator transformIterator) { this.transformIterator = transformIterator; } @@ -213,26 +206,26 @@ public CompositeQueryLogic(CompositeQueryLogic other) { throw new RuntimeException(e); } } - setPrincipal(other.getPrincipal()); + setCurrentUser(other.getCurrentUser()); } public Set updateRuntimeAuthorizationsAndQueryAuths(QueryLogic logic, Query settings) throws AuthorizationException { - Set requestedAuths = new HashSet<>(WSAuthorizationsUtil.splitAuths(settings.getQueryAuthorizations())); + Set requestedAuths = new HashSet<>(AuthorizationsUtil.splitAuths(settings.getQueryAuthorizations())); // determine the valid authorizations for this call to be the user's auths for this logic - DatawavePrincipal principal = (DatawavePrincipal) logic.getPrincipal(); - DatawavePrincipal queryPrincipal = principal; + ProxiedUserDetails currentUser = logic.getCurrentUser(); + ProxiedUserDetails queryUser = currentUser; UserOperations userOperations = getUserOperations(); if (userOperations != null) { - principal = userOperations.getRemoteUser(principal); + currentUser = userOperations.getRemoteUser(currentUser); } - logic.preInitialize(settings, WSAuthorizationsUtil.buildAuthorizations(Collections.singleton(requestedAuths))); + logic.preInitialize(settings, AuthorizationsUtil.buildAuthorizations(Collections.singleton(requestedAuths))); if (logic.getUserOperations() != null) { - queryPrincipal = logic.getUserOperations().getRemoteUser(queryPrincipal); + queryUser = logic.getUserOperations().getRemoteUser(queryUser); } // get the valid auths from the query user - Collection validAuths = queryPrincipal.getPrimaryUser().getAuths(); + Collection validAuths = queryUser.getPrimaryUser().getAuths(); Set validRequestedAuths = new HashSet<>(requestedAuths); validRequestedAuths.retainAll(validAuths); String validQueryAuthorizations = Joiner.on(',').join(validRequestedAuths); @@ -241,9 +234,9 @@ public Set updateRuntimeAuthorizationsAndQueryAuths(QueryLogic downgradedAuths = WSAuthorizationsUtil.getDowngradedAuthorizations(validQueryAuthorizations, principal, queryPrincipal); + Set downgradedAuths = AuthorizationsUtil.getDowngradedAuthorizations(validQueryAuthorizations, currentUser, queryUser); if (log.isTraceEnabled()) { - log.trace("Principal auths for user " + principal.getPrimaryUser().getCommonName() + " are " + principal.getPrimaryUser().getAuths()); + log.trace("Principal auths for user " + currentUser.getPrimaryUser().getCommonName() + " are " + currentUser.getPrimaryUser().getAuths()); log.trace("Query principal auths for " + logic.getLogicName() + " are " + validAuths); log.trace("Requested auths were " + requestedAuths + " of which the valid query auths are " + validQueryAuthorizations); log.trace("Downgraded auths are " + downgradedAuths); @@ -263,6 +256,7 @@ public GenericQueryConfiguration initialize(AccumuloClient client, Query setting Map exceptions = new HashMap<>(); if (!getUninitializedLogics().isEmpty()) { + Map configs = new HashMap<>(); for (Map.Entry> next : getUninitializedLogics().entrySet()) { String logicName = next.getKey(); QueryLogic logic = next.getValue(); @@ -289,11 +283,10 @@ public GenericQueryConfiguration initialize(AccumuloClient client, Query setting logicQueryStringBuilder.append("( filtered = true )"); } else { logicQueryStringBuilder.append(config.getQueryString()); - QueryLogicHolder holder = new QueryLogicHolder(logicName, logic); - holder.setConfig(config); holder.setSettings(settingsCopy); holder.setMaxResults(logic.getMaxResults()); + configs.put(logicName, config); logicState.put(logicName, holder); // if doing sequential execution, then stop since we have one initialized @@ -347,6 +340,7 @@ public GenericQueryConfiguration initialize(AccumuloClient client, Query setting final String compositeQueryString = logicQueryStringBuilder.toString(); CompositeQueryConfiguration config = getConfig(); + config.setConfigs(configs); config.setQueryString(compositeQueryString); config.setClient(client); config.setQuery(settings); @@ -393,9 +387,12 @@ public String getPlan(AccumuloClient client, Query settings, Set public void setupQuery(GenericQueryConfiguration configuration) throws Exception { int count = 0; + CompositeQueryConfiguration compositeConfig = (CompositeQueryConfiguration) configuration; + for (QueryLogicHolder holder : logicState.values()) { if (!holder.wasStarted()) { - holder.getLogic().setupQuery(holder.getConfig()); + GenericQueryConfiguration config = compositeConfig != null ? compositeConfig.getConfig(holder.getLogicName()) : null; + holder.getLogic().setupQuery(config); TransformIterator transformIterator = holder.getLogic().getTransformIterator(holder.getSettings()); holder.setTransformIterator(transformIterator); count++; @@ -456,9 +453,13 @@ public Iterator iterator() { @Override public TransformIterator getTransformIterator(Query settings) { - // The objects put into the pageQueue have already been transformed. - // We will iterate over the pagequeue with the No-Op transformer - return new TransformIterator(results.iterator(), NOPTransformer.nopTransformer()); + if (isCheckpointable()) { + return Iterables.getOnlyElement(queryLogics.values()).getTransformIterator(settings); + } else { + // The objects put into the pageQueue have already been transformed. + // We will iterate over the pagequeue with the No-Op transformer + return new TransformIterator(results.iterator(), NOPTransformer.nopTransformer()); + } } @Override @@ -557,10 +558,10 @@ public UserOperations getUserOperations() { } @Override - public boolean canRunQuery(Principal principal) { + public boolean canRunQuery(Collection userRoles) { // user can run this composite query if they can run at least one of the configured query logics for (Map.Entry> entry : getUninitializedLogics().entrySet()) { - if (!entry.getValue().canRunQuery(principal)) { + if (!entry.getValue().canRunQuery(userRoles)) { queryLogics.remove(entry.getKey()); } } @@ -598,21 +599,83 @@ public Set getExampleQueries() { return params.isEmpty() ? null : params; } - /** - * We can run the query if we can and at least of one of the children can. - * - * @return canRunQuery - */ @Override - public boolean canRunQuery() { - if (super.canRunQuery()) { - for (QueryLogic logic : getQueryLogics().values()) { - if (logic.canRunQuery()) { - return true; - } + public boolean isCheckpointable() { + boolean checkpointable = true; + for (QueryLogicHolder logicHolder : logicState.values()) { + QueryLogic logic = logicHolder.getLogic(); + if (!(logic instanceof CheckpointableQueryLogic && ((CheckpointableQueryLogic) logic).isCheckpointable())) { + checkpointable = false; + break; } } - return false; + return checkpointable; + } + + public void setCheckpointable(boolean checkpointable) { + for (QueryLogicHolder queryLogicHolder : logicState.values()) { + QueryLogic queryLogic = queryLogicHolder.getLogic(); + if (queryLogic instanceof CheckpointableQueryLogic) { + ((CheckpointableQueryLogic) queryLogic).setCheckpointable(checkpointable); + } else { + throw new UnsupportedOperationException("Cannot set checkpointable for a query logic that is not checkpointable."); + } + } + } + + @Override + public List checkpoint(QueryKey queryKey) { + if (!isCheckpointable()) { + throw new UnsupportedOperationException("Cannot checkpoint a query that is not checkpointable. Try calling setCheckpointable(true) first."); + } + + List checkpoints = new ArrayList<>(); + for (Map.Entry entry : logicState.entrySet()) { + for (QueryCheckpoint checkpoint : ((CheckpointableQueryLogic) entry.getValue().getLogic()).checkpoint(queryKey)) { + checkpoints.add(new CompositeQueryCheckpoint(entry.getKey(), checkpoint)); + } + } + return checkpoints; + } + + @Override + public QueryCheckpoint updateCheckpoint(QueryCheckpoint checkpoint) { + if (!isCheckpointable() || !(checkpoint instanceof CompositeQueryCheckpoint)) { + throw new UnsupportedOperationException("Cannot update a non-composite query checkpoint with the composite query logic."); + } + + CompositeQueryCheckpoint compositeCheckpoint = (CompositeQueryCheckpoint) checkpoint; + + CheckpointableQueryLogic logic = (CheckpointableQueryLogic) queryLogics.get(compositeCheckpoint.getDelegateQueryLogic()); + if (logic == null) { + throw new UnsupportedOperationException( + "Cannot update query checkpoint because delegate query logic [" + compositeCheckpoint.getDelegateQueryLogic() + "] does not exist"); + } + + return logic.updateCheckpoint(checkpoint); + } + + @Override + public void setupQuery(AccumuloClient client, GenericQueryConfiguration config, QueryCheckpoint checkpoint) throws Exception { + if (!isCheckpointable() || !(checkpoint instanceof CompositeQueryCheckpoint) || !(config instanceof CompositeQueryConfiguration)) { + throw new UnsupportedOperationException("Cannot setup a non-composite query checkpoint with the composite query logic."); + } + + CompositeQueryConfiguration compositeConfig = (CompositeQueryConfiguration) config; + + CompositeQueryCheckpoint compositeCheckpoint = (CompositeQueryCheckpoint) checkpoint; + + CheckpointableQueryLogic logic = (CheckpointableQueryLogic) queryLogics.get(compositeCheckpoint.getDelegateQueryLogic()); + if (logic == null) { + throw new UnsupportedOperationException( + "Cannot update query checkpoint because delegate query logic [" + compositeCheckpoint.getDelegateQueryLogic() + "] does not exist"); + } + + // we are setting up a checkpoint, with a single query data, against a single query logic, so just keep the one we need + queryLogics.clear(); + queryLogics.put(compositeCheckpoint.getDelegateQueryLogic(), (BaseQueryLogic) logic); + + logic.setupQuery(client, compositeConfig.getConfig(compositeCheckpoint.getDelegateQueryLogic()), checkpoint); } /** @@ -632,16 +695,28 @@ public SelectorExtractor getSelectorExtractor() { } /** - * Setting the principal is called after the logic is created. Pass this on to the children. + * Setting the current user is called after the logic is created. Pass this on to the children. + * + * @param user + */ + @Override + public void setCurrentUser(ProxiedUserDetails user) { + super.setCurrentUser(user); + for (QueryLogic logic : getQueryLogics().values()) { + logic.setCurrentUser(user); + } + } + + /** + * /** Setting the server user is called after the logic is created. Pass this on to the children. * - * @param principal - * the principal + * @param user */ @Override - public void setPrincipal(Principal principal) { - super.setPrincipal(principal); + public void setServerUser(ProxiedUserDetails user) { + super.setServerUser(user); for (QueryLogic logic : getQueryLogics().values()) { - logic.setPrincipal(principal); + logic.setServerUser(user); } } diff --git a/web-services/query/src/main/java/datawave/webservice/query/logic/composite/CompositeQueryLogicResults.java b/core/query/src/main/java/datawave/core/query/logic/composite/CompositeQueryLogicResults.java similarity index 95% rename from web-services/query/src/main/java/datawave/webservice/query/logic/composite/CompositeQueryLogicResults.java rename to core/query/src/main/java/datawave/core/query/logic/composite/CompositeQueryLogicResults.java index 8858b85e2e0..8ad475aa052 100644 --- a/web-services/query/src/main/java/datawave/webservice/query/logic/composite/CompositeQueryLogicResults.java +++ b/core/query/src/main/java/datawave/core/query/logic/composite/CompositeQueryLogicResults.java @@ -1,4 +1,4 @@ -package datawave.webservice.query.logic.composite; +package datawave.core.query.logic.composite; import java.util.ArrayList; import java.util.Iterator; @@ -7,7 +7,7 @@ import java.util.concurrent.ArrayBlockingQueue; import java.util.concurrent.CountDownLatch; -import org.apache.commons.collections.keyvalue.UnmodifiableMapEntry; +import org.apache.commons.collections4.keyvalue.UnmodifiableMapEntry; public class CompositeQueryLogicResults implements Iterable, Thread.UncaughtExceptionHandler { diff --git a/web-services/query/src/main/java/datawave/webservice/query/logic/composite/CompositeQueryLogicResultsIterator.java b/core/query/src/main/java/datawave/core/query/logic/composite/CompositeQueryLogicResultsIterator.java similarity index 95% rename from web-services/query/src/main/java/datawave/webservice/query/logic/composite/CompositeQueryLogicResultsIterator.java rename to core/query/src/main/java/datawave/core/query/logic/composite/CompositeQueryLogicResultsIterator.java index 63c74579664..99728bcf46c 100644 --- a/web-services/query/src/main/java/datawave/webservice/query/logic/composite/CompositeQueryLogicResultsIterator.java +++ b/core/query/src/main/java/datawave/core/query/logic/composite/CompositeQueryLogicResultsIterator.java @@ -1,4 +1,4 @@ -package datawave.webservice.query.logic.composite; +package datawave.core.query.logic.composite; import java.util.Iterator; import java.util.concurrent.ArrayBlockingQueue; @@ -8,8 +8,8 @@ import com.google.common.base.Throwables; -import datawave.webservice.query.configuration.GenericQueryConfiguration; -import datawave.webservice.query.exception.EmptyObjectException; +import datawave.core.query.configuration.GenericQueryConfiguration; +import datawave.core.query.exception.EmptyObjectException; public class CompositeQueryLogicResultsIterator implements Iterator, Thread.UncaughtExceptionHandler { diff --git a/web-services/query/src/main/java/datawave/webservice/query/logic/composite/CompositeQueryLogicTransformer.java b/core/query/src/main/java/datawave/core/query/logic/composite/CompositeQueryLogicTransformer.java similarity index 81% rename from web-services/query/src/main/java/datawave/webservice/query/logic/composite/CompositeQueryLogicTransformer.java rename to core/query/src/main/java/datawave/core/query/logic/composite/CompositeQueryLogicTransformer.java index 8219789b5a9..893388b4d02 100644 --- a/web-services/query/src/main/java/datawave/webservice/query/logic/composite/CompositeQueryLogicTransformer.java +++ b/core/query/src/main/java/datawave/core/query/logic/composite/CompositeQueryLogicTransformer.java @@ -1,4 +1,4 @@ -package datawave.webservice.query.logic.composite; +package datawave.core.query.logic.composite; import java.util.List; @@ -6,12 +6,11 @@ import com.google.common.base.Throwables; -import datawave.webservice.query.cache.ResultsPage; -import datawave.webservice.query.cachedresults.CacheableLogic; +import datawave.core.query.cachedresults.CacheableLogic; +import datawave.core.query.logic.AbstractQueryLogicTransformer; +import datawave.core.query.logic.QueryLogicTransformer; import datawave.webservice.query.cachedresults.CacheableQueryRow; import datawave.webservice.query.exception.QueryException; -import datawave.webservice.query.logic.AbstractQueryLogicTransformer; -import datawave.webservice.query.logic.QueryLogicTransformer; import datawave.webservice.result.BaseQueryResponse; public class CompositeQueryLogicTransformer extends AbstractQueryLogicTransformer implements CacheableLogic { @@ -31,8 +30,8 @@ public O transform(I input) { } @Override - public List writeToCache(Object o) throws QueryException { - List result = null; + public CacheableQueryRow writeToCache(Object o) throws QueryException { + CacheableQueryRow result = null; for (QueryLogicTransformer t : delegates) { if (t instanceof CacheableLogic) { CacheableLogic c = (CacheableLogic) t; @@ -47,13 +46,13 @@ public List writeToCache(Object o) throws QueryException { } @Override - public List readFromCache(List row) { - List result = null; + public Object readFromCache(CacheableQueryRow cacheableQueryRow) { + Object result = null; for (QueryLogicTransformer t : delegates) { if (t instanceof CacheableLogic) { CacheableLogic c = (CacheableLogic) t; try { - result = c.readFromCache(row); + result = c.readFromCache(cacheableQueryRow); } catch (Exception e) { log.warn("Error calling writeToCache on delegate, continuing...", e); } diff --git a/web-services/query/src/main/java/datawave/webservice/query/logic/composite/CompositeUserOperations.java b/core/query/src/main/java/datawave/core/query/logic/composite/CompositeUserOperations.java similarity index 89% rename from web-services/query/src/main/java/datawave/webservice/query/logic/composite/CompositeUserOperations.java rename to core/query/src/main/java/datawave/core/query/logic/composite/CompositeUserOperations.java index 21ec0077aa0..620da4a1de9 100644 --- a/web-services/query/src/main/java/datawave/webservice/query/logic/composite/CompositeUserOperations.java +++ b/core/query/src/main/java/datawave/core/query/logic/composite/CompositeUserOperations.java @@ -1,4 +1,4 @@ -package datawave.webservice.query.logic.composite; +package datawave.core.query.logic.composite; import java.util.ArrayList; import java.util.Collections; @@ -10,12 +10,13 @@ import com.google.common.collect.Sets; +import datawave.microservice.authorization.util.AuthorizationsUtil; import datawave.security.authorization.AuthorizationException; import datawave.security.authorization.DatawavePrincipal; import datawave.security.authorization.DatawaveUser; +import datawave.security.authorization.ProxiedUserDetails; import datawave.security.authorization.SubjectIssuerDNPair; import datawave.security.authorization.UserOperations; -import datawave.security.util.WSAuthorizationsUtil; import datawave.user.AuthorizationsListBase; import datawave.webservice.query.exception.QueryException; import datawave.webservice.query.exception.QueryExceptionType; @@ -42,7 +43,7 @@ public CompositeUserOperations(List remoteOperations, boolean in } @Override - public AuthorizationsListBase listEffectiveAuthorizations(Object callerObject) throws AuthorizationException { + public AuthorizationsListBase listEffectiveAuthorizations(ProxiedUserDetails callerObject) throws AuthorizationException { AuthorizationsListBase auths = responseObjectFactory.getAuthorizationsList(); final DatawavePrincipal principal = getDatawavePrincipal(callerObject); Map> authMap = new HashMap<>(); @@ -87,7 +88,7 @@ public static AuthorizationsListBase.SubjectIssuerDNPair dn(SubjectIssuerDNPair } @Override - public GenericResponse flushCachedCredentials(Object callerObject) throws AuthorizationException { + public GenericResponse flushCachedCredentials(ProxiedUserDetails callerObject) throws AuthorizationException { GenericResponse response = new GenericResponse<>(); response.setResult(""); String separator = ""; @@ -114,14 +115,14 @@ public GenericResponse flushCachedCredentials(Object callerObject) throw } @Override - public DatawavePrincipal getRemoteUser(DatawavePrincipal principal) throws AuthorizationException { - List principals = new ArrayList<>(); + public ProxiedUserDetails getRemoteUser(ProxiedUserDetails currentUser) throws AuthorizationException { + List userDetails = new ArrayList<>(); if (includeLocal) { - principals.add(principal); + userDetails.add(currentUser); } for (UserOperations ops : userOperations) { try { - principals.add(ops.getRemoteUser(principal)); + userDetails.add(ops.getRemoteUser(currentUser)); } catch (Exception e) { // ignore the exception if shortCircuitExecution is specified as we may never even call that remote logic if (!shortCircuitExecution) { @@ -130,7 +131,7 @@ public DatawavePrincipal getRemoteUser(DatawavePrincipal principal) throws Autho } } - return WSAuthorizationsUtil.mergePrincipals(principals.toArray(new DatawavePrincipal[0])); + return AuthorizationsUtil.mergeProxiedUserDetails(userDetails.toArray(new ProxiedUserDetails[0])); } public static Exception getException(QueryExceptionType qet) { diff --git a/web-services/query/src/main/java/datawave/webservice/query/logic/filtered/FilteredQueryLogic.java b/core/query/src/main/java/datawave/core/query/logic/filtered/FilteredQueryLogic.java similarity index 91% rename from web-services/query/src/main/java/datawave/webservice/query/logic/filtered/FilteredQueryLogic.java rename to core/query/src/main/java/datawave/core/query/logic/filtered/FilteredQueryLogic.java index a4aa53282b3..5b81cbfef86 100644 --- a/web-services/query/src/main/java/datawave/webservice/query/logic/filtered/FilteredQueryLogic.java +++ b/core/query/src/main/java/datawave/core/query/logic/filtered/FilteredQueryLogic.java @@ -1,4 +1,4 @@ -package datawave.webservice.query.logic.filtered; +package datawave.core.query.logic.filtered; import java.util.Collections; import java.util.Iterator; @@ -9,13 +9,12 @@ import org.apache.commons.collections4.iterators.TransformIterator; import org.apache.log4j.Logger; +import datawave.core.query.configuration.GenericQueryConfiguration; +import datawave.core.query.iterator.DatawaveTransformIterator; +import datawave.core.query.logic.DelegatingQueryLogic; +import datawave.core.query.logic.QueryLogic; +import datawave.microservice.query.Query; import datawave.security.authorization.UserOperations; -import datawave.webservice.query.Query; -import datawave.webservice.query.configuration.GenericQueryConfiguration; -import datawave.webservice.query.exception.QueryException; -import datawave.webservice.query.iterator.DatawaveTransformIterator; -import datawave.webservice.query.logic.DelegatingQueryLogic; -import datawave.webservice.query.logic.QueryLogic; /** * A filtered query logic will only actually execute the delegate query logic if the filter passes. Otherwise this will do nothing and return no results. diff --git a/web-services/query/src/main/java/datawave/webservice/query/logic/filtered/QueryLogicFilterByAuth.java b/core/query/src/main/java/datawave/core/query/logic/filtered/QueryLogicFilterByAuth.java similarity index 91% rename from web-services/query/src/main/java/datawave/webservice/query/logic/filtered/QueryLogicFilterByAuth.java rename to core/query/src/main/java/datawave/core/query/logic/filtered/QueryLogicFilterByAuth.java index 25af6c71e19..955f93b60cf 100644 --- a/web-services/query/src/main/java/datawave/webservice/query/logic/filtered/QueryLogicFilterByAuth.java +++ b/core/query/src/main/java/datawave/core/query/logic/filtered/QueryLogicFilterByAuth.java @@ -1,12 +1,12 @@ -package datawave.webservice.query.logic.filtered; +package datawave.core.query.logic.filtered; import java.util.Set; import java.util.stream.Collectors; import org.apache.accumulo.core.security.Authorizations; -import datawave.webservice.query.Query; -import datawave.webservice.query.predicate.ProxiedAuthorizationsPredicate; +import datawave.core.query.predicate.ProxiedAuthorizationsPredicate; +import datawave.microservice.query.Query; /** * This is a filter for the FilteredQueryLogic that will run the delegate query logic if the auths requested match a specified visibility (as defined by diff --git a/web-services/query/src/main/java/datawave/webservice/query/logic/filtered/QueryLogicFilterByParameter.java b/core/query/src/main/java/datawave/core/query/logic/filtered/QueryLogicFilterByParameter.java similarity index 89% rename from web-services/query/src/main/java/datawave/webservice/query/logic/filtered/QueryLogicFilterByParameter.java rename to core/query/src/main/java/datawave/core/query/logic/filtered/QueryLogicFilterByParameter.java index cec60195708..85b0801c0e6 100644 --- a/web-services/query/src/main/java/datawave/webservice/query/logic/filtered/QueryLogicFilterByParameter.java +++ b/core/query/src/main/java/datawave/core/query/logic/filtered/QueryLogicFilterByParameter.java @@ -1,12 +1,11 @@ -package datawave.webservice.query.logic.filtered; +package datawave.core.query.logic.filtered; import java.util.Set; import org.apache.accumulo.core.security.Authorizations; -import datawave.webservice.query.Query; -import datawave.webservice.query.QueryImpl; -import datawave.webservice.query.predicate.QueryParameterPredicate; +import datawave.core.query.predicate.QueryParameterPredicate; +import datawave.microservice.query.Query; /** * This is a filter for the FilteredQueryLogic that will run the delegate query logic if a specified query parameter matches a specified value. If no value is diff --git a/core/query/src/main/java/datawave/core/query/logic/lookup/LookupQueryLogic.java b/core/query/src/main/java/datawave/core/query/logic/lookup/LookupQueryLogic.java new file mode 100644 index 00000000000..4216e668551 --- /dev/null +++ b/core/query/src/main/java/datawave/core/query/logic/lookup/LookupQueryLogic.java @@ -0,0 +1,394 @@ +package datawave.core.query.logic.lookup; + +import java.util.Collection; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.Spliterator; +import java.util.function.Consumer; + +import org.apache.accumulo.core.client.AccumuloClient; +import org.apache.accumulo.core.security.Authorizations; +import org.apache.commons.collections4.iterators.TransformIterator; +import org.springframework.beans.factory.annotation.Required; +import org.springframework.util.MultiValueMap; + +import datawave.audit.SelectorExtractor; +import datawave.core.common.connection.AccumuloConnectionFactory; +import datawave.core.query.configuration.GenericQueryConfiguration; +import datawave.core.query.logic.BaseQueryLogic; +import datawave.core.query.logic.CheckpointableQueryLogic; +import datawave.core.query.logic.QueryCheckpoint; +import datawave.core.query.logic.QueryKey; +import datawave.core.query.logic.QueryLogicTransformer; +import datawave.microservice.query.Query; +import datawave.security.authorization.ProxiedUserDetails; +import datawave.webservice.common.audit.Auditor; +import datawave.webservice.query.exception.QueryException; + +public abstract class LookupQueryLogic extends BaseQueryLogic implements CheckpointableQueryLogic { + public static final String LOOKUP_KEY_VALUE_DELIMITER = ":"; + + // The underlying query logic to use for the lookup + private final BaseQueryLogic delegateQueryLogic; + + public LookupQueryLogic(BaseQueryLogic delegateQueryLogic) { + this.delegateQueryLogic = delegateQueryLogic; + } + + @SuppressWarnings("unchecked") + public LookupQueryLogic(LookupQueryLogic other) throws CloneNotSupportedException { + this((BaseQueryLogic) other.delegateQueryLogic.clone()); + } + + public abstract boolean isEventLookupRequired(MultiValueMap lookupTerms); + + public abstract Set getContentLookupTerms(MultiValueMap lookupTerms); + + @Override + public GenericQueryConfiguration initialize(AccumuloClient client, Query settings, Set runtimeQueryAuthorizations) throws Exception { + return delegateQueryLogic.initialize(client, settings, runtimeQueryAuthorizations); + } + + @Override + public void setupQuery(GenericQueryConfiguration configuration) throws Exception { + delegateQueryLogic.setupQuery(configuration); + } + + @Override + public GenericQueryConfiguration getConfig() { + if (delegateQueryLogic != null) { + return delegateQueryLogic.getConfig(); + } else { + return super.getConfig(); + } + } + + @Override + public String getPlan(AccumuloClient client, Query settings, Set runtimeQueryAuthorizations, boolean expandFields, boolean expandValues) + throws Exception { + return delegateQueryLogic.getPlan(client, settings, runtimeQueryAuthorizations, expandFields, expandValues); + } + + @Override + public Set getRequiredRoles() { + return delegateQueryLogic.getRequiredRoles(); + } + + @Override + public void setRequiredRoles(Set requiredRoles) { + delegateQueryLogic.setRequiredRoles(requiredRoles); + } + + @Override + public String getTableName() { + return delegateQueryLogic.getTableName(); + } + + @Override + public long getMaxResults() { + return delegateQueryLogic.getMaxResults(); + } + + @Override + public int getMaxConcurrentTasks() { + return delegateQueryLogic.getMaxConcurrentTasks(); + } + + @Override + @Deprecated + public long getMaxRowsToScan() { + return delegateQueryLogic.getMaxRowsToScan(); + } + + @Override + public long getMaxWork() { + return delegateQueryLogic.getMaxWork(); + } + + @Override + public void setTableName(String tableName) { + delegateQueryLogic.setTableName(tableName); + } + + @Override + public void setMaxResults(long maxResults) { + delegateQueryLogic.setMaxResults(maxResults); + } + + @Override + public void setMaxConcurrentTasks(int maxConcurrentTasks) { + delegateQueryLogic.setMaxConcurrentTasks(maxConcurrentTasks); + } + + @Override + @Deprecated + public void setMaxRowsToScan(long maxRowsToScan) { + delegateQueryLogic.setMaxRowsToScan(maxRowsToScan); + } + + @Override + public void setMaxWork(long maxWork) { + delegateQueryLogic.setMaxWork(maxWork); + } + + @Override + public int getMaxPageSize() { + return delegateQueryLogic.getMaxPageSize(); + } + + @Override + public void setMaxPageSize(int maxPageSize) { + delegateQueryLogic.setMaxPageSize(maxPageSize); + } + + @Override + public long getPageByteTrigger() { + return delegateQueryLogic.getPageByteTrigger(); + } + + @Override + public void setPageByteTrigger(long pageByteTrigger) { + delegateQueryLogic.setPageByteTrigger(pageByteTrigger); + } + + @Override + public int getBaseIteratorPriority() { + return delegateQueryLogic.getBaseIteratorPriority(); + } + + @Override + public void setBaseIteratorPriority(int baseIteratorPriority) { + delegateQueryLogic.setBaseIteratorPriority(baseIteratorPriority); + } + + @Override + public Iterator iterator() { + return delegateQueryLogic.iterator(); + } + + @Override + public TransformIterator getTransformIterator(Query settings) { + return delegateQueryLogic.getTransformIterator(settings); + } + + @Override + public boolean getBypassAccumulo() { + return delegateQueryLogic.getBypassAccumulo(); + } + + @Override + public void setBypassAccumulo(boolean bypassAccumulo) { + delegateQueryLogic.setBypassAccumulo(bypassAccumulo); + } + + @Override + public void close() { + delegateQueryLogic.close(); + } + + @Override + public Auditor.AuditType getAuditType(Query query) { + return delegateQueryLogic.getAuditType(query); + } + + @Override + public Auditor.AuditType getAuditType() { + return delegateQueryLogic.getAuditType(); + } + + @Override + @Required + public void setAuditType(Auditor.AuditType auditType) { + delegateQueryLogic.setAuditType(auditType); + } + + @Override + public boolean getCollectQueryMetrics() { + return delegateQueryLogic.getCollectQueryMetrics(); + } + + @Override + public void setCollectQueryMetrics(boolean collectQueryMetrics) { + delegateQueryLogic.setCollectQueryMetrics(collectQueryMetrics); + } + + @Override + public String getConnPoolName() { + return delegateQueryLogic.getConnPoolName(); + } + + @Override + public void setConnPoolName(String connPoolName) { + delegateQueryLogic.setConnPoolName(connPoolName); + } + + @Override + public boolean canRunQuery(Collection userRoles) { + return delegateQueryLogic.canRunQuery(userRoles); + } + + @Override + public List getSelectors(Query settings) throws IllegalArgumentException { + return delegateQueryLogic.getSelectors(settings); + } + + @Override + public void setSelectorExtractor(SelectorExtractor selectorExtractor) { + delegateQueryLogic.setSelectorExtractor(selectorExtractor); + } + + @Override + public SelectorExtractor getSelectorExtractor() { + return delegateQueryLogic.getSelectorExtractor(); + } + + @Override + public Set getAuthorizedDNs() { + return delegateQueryLogic.getAuthorizedDNs(); + } + + @Override + public void setAuthorizedDNs(Set authorizedDNs) { + delegateQueryLogic.setAuthorizedDNs(authorizedDNs); + } + + @Override + public void setDnResultLimits(Map dnResultLimits) { + delegateQueryLogic.setDnResultLimits(dnResultLimits); + } + + @Override + public Map getDnResultLimits() { + return delegateQueryLogic.getDnResultLimits(); + } + + @Override + public AccumuloConnectionFactory.Priority getConnectionPriority() { + return delegateQueryLogic.getConnectionPriority(); + } + + @Override + public QueryLogicTransformer getTransformer(Query settings) { + return delegateQueryLogic.getTransformer(settings); + } + + @Override + public String getResponseClass(Query query) throws QueryException { + return delegateQueryLogic.getResponseClass(query); + } + + @Override + public Set getOptionalQueryParameters() { + return delegateQueryLogic.getOptionalQueryParameters(); + } + + @Override + public Set getRequiredQueryParameters() { + return delegateQueryLogic.getRequiredQueryParameters(); + } + + @Override + public Set getExampleQueries() { + return delegateQueryLogic.getExampleQueries(); + } + + @Override + public boolean containsDNWithAccess(Collection dns) { + return delegateQueryLogic.containsDNWithAccess(dns); + } + + @Override + public long getResultLimit(Query settings) { + return delegateQueryLogic.getResultLimit(settings); + } + + @Override + public void forEach(Consumer action) { + delegateQueryLogic.forEach(action); + } + + @Override + public Spliterator spliterator() { + return delegateQueryLogic.spliterator(); + } + + @Override + public String getLogicName() { + return delegateQueryLogic.getLogicName(); + } + + @Override + public void setLogicName(String logicName) { + delegateQueryLogic.setLogicName(logicName); + } + + @Override + public void setLogicDescription(String logicDescription) { + delegateQueryLogic.setLogicDescription(logicDescription); + } + + @Override + public String getLogicDescription() { + return delegateQueryLogic.getLogicDescription(); + } + + @Override + public void setCurrentUser(ProxiedUserDetails currentUser) { + super.setCurrentUser(currentUser); + delegateQueryLogic.setCurrentUser(currentUser); + } + + @Override + public void setServerUser(ProxiedUserDetails serverUser) { + super.setServerUser(serverUser); + delegateQueryLogic.setServerUser(currentUser); + } + + public BaseQueryLogic getDelegateQueryLogic() { + return delegateQueryLogic; + } + + @Override + public boolean isCheckpointable() { + if (delegateQueryLogic instanceof CheckpointableQueryLogic) { + return ((CheckpointableQueryLogic) delegateQueryLogic).isCheckpointable(); + } + return false; + } + + @Override + public void setCheckpointable(boolean checkpointable) { + if (delegateQueryLogic instanceof CheckpointableQueryLogic) { + ((CheckpointableQueryLogic) delegateQueryLogic).setCheckpointable(checkpointable); + } + } + + @Override + public List checkpoint(QueryKey queryKey) { + if (!isCheckpointable()) { + throw new UnsupportedOperationException("Cannot create checkpoints because the query logic is not checkpointable."); + } + + return ((CheckpointableQueryLogic) delegateQueryLogic).checkpoint(queryKey); + } + + @Override + public QueryCheckpoint updateCheckpoint(QueryCheckpoint checkpoint) { + if (!isCheckpointable()) { + throw new UnsupportedOperationException("Cannot update the query checkpoint because the query logic is not checkpointable."); + } + + return ((CheckpointableQueryLogic) delegateQueryLogic).updateCheckpoint(checkpoint); + } + + @Override + public void setupQuery(AccumuloClient client, GenericQueryConfiguration config, QueryCheckpoint checkpoint) throws Exception { + if (!isCheckpointable()) { + throw new UnsupportedOperationException("Cannot setup a query checkpoint because the query logic is not checkpointable."); + } + + ((CheckpointableQueryLogic) delegateQueryLogic).setupQuery(client, config, checkpoint); + } +} diff --git a/core/query/src/main/java/datawave/core/query/logic/lookup/uid/LookupUIDQueryLogic.java b/core/query/src/main/java/datawave/core/query/logic/lookup/uid/LookupUIDQueryLogic.java new file mode 100644 index 00000000000..c785ded98fd --- /dev/null +++ b/core/query/src/main/java/datawave/core/query/logic/lookup/uid/LookupUIDQueryLogic.java @@ -0,0 +1,38 @@ +package datawave.core.query.logic.lookup.uid; + +import java.util.Collection; +import java.util.Set; +import java.util.stream.Collectors; + +import org.springframework.util.MultiValueMap; + +import datawave.core.query.logic.BaseQueryLogic; +import datawave.core.query.logic.lookup.LookupQueryLogic; + +public class LookupUIDQueryLogic extends LookupQueryLogic { + public static final String UID_TERM_SEPARATOR = " "; + private static final String EVENT_FIELD = "event"; + + public LookupUIDQueryLogic(BaseQueryLogic delegateQueryLogic) { + super(delegateQueryLogic); + } + + public LookupUIDQueryLogic(LookupQueryLogic other) throws CloneNotSupportedException { + super(other); + } + + @Override + public boolean isEventLookupRequired(MultiValueMap lookupTerms) { + return !(lookupTerms.keySet().size() == 1 && lookupTerms.containsKey(EVENT_FIELD)); + } + + @Override + public Set getContentLookupTerms(MultiValueMap lookupTerms) { + return lookupTerms.values().stream().flatMap(Collection::stream).collect(Collectors.toSet()); + } + + @Override + public Object clone() throws CloneNotSupportedException { + return new LookupUIDQueryLogic<>(this); + } +} diff --git a/core/query/src/main/java/datawave/core/query/logic/lookup/uuid/LookupUUIDQueryLogic.java b/core/query/src/main/java/datawave/core/query/logic/lookup/uuid/LookupUUIDQueryLogic.java new file mode 100644 index 00000000000..d7d04a132eb --- /dev/null +++ b/core/query/src/main/java/datawave/core/query/logic/lookup/uuid/LookupUUIDQueryLogic.java @@ -0,0 +1,38 @@ +package datawave.core.query.logic.lookup.uuid; + +import java.util.Set; +import java.util.stream.Collectors; + +import org.springframework.util.MultiValueMap; + +import datawave.core.query.logic.BaseQueryLogic; +import datawave.core.query.logic.lookup.LookupQueryLogic; + +public class LookupUUIDQueryLogic extends LookupQueryLogic { + private static final String UUID_TERM_SEPARATOR = " OR "; + + public LookupUUIDQueryLogic(BaseQueryLogic delegateQueryLogic) { + super(delegateQueryLogic); + } + + public LookupUUIDQueryLogic(LookupQueryLogic other) throws CloneNotSupportedException { + super(other); + } + + @Override + public boolean isEventLookupRequired(MultiValueMap lookupTerms) { + // always, regardless of the terms + return true; + } + + @Override + public Set getContentLookupTerms(MultiValueMap lookupTerms) throws UnsupportedOperationException { + throw new UnsupportedOperationException("Cannot convert lookup terms to event lookups for LookupUUIDQueryLogic"); + } + + @Override + public Object clone() throws CloneNotSupportedException { + return new LookupUUIDQueryLogic<>(this); + } + +} diff --git a/web-services/query/src/main/java/datawave/webservice/query/map/QueryGeometryHandler.java b/core/query/src/main/java/datawave/core/query/map/QueryGeometryHandler.java similarity index 71% rename from web-services/query/src/main/java/datawave/webservice/query/map/QueryGeometryHandler.java rename to core/query/src/main/java/datawave/core/query/map/QueryGeometryHandler.java index c35c1b015ae..9044224ced1 100644 --- a/web-services/query/src/main/java/datawave/webservice/query/map/QueryGeometryHandler.java +++ b/core/query/src/main/java/datawave/core/query/map/QueryGeometryHandler.java @@ -1,8 +1,9 @@ -package datawave.webservice.query.map; +package datawave.core.query.map; import java.util.List; import datawave.microservice.querymetric.BaseQueryMetric; +import datawave.webservice.query.map.QueryGeometryResponse; public interface QueryGeometryHandler { diff --git a/web-services/query/src/main/java/datawave/webservice/query/metric/QueryMetricHandler.java b/core/query/src/main/java/datawave/core/query/metric/QueryMetricHandler.java similarity index 96% rename from web-services/query/src/main/java/datawave/webservice/query/metric/QueryMetricHandler.java rename to core/query/src/main/java/datawave/core/query/metric/QueryMetricHandler.java index f245d5382d2..5132c244909 100644 --- a/web-services/query/src/main/java/datawave/webservice/query/metric/QueryMetricHandler.java +++ b/core/query/src/main/java/datawave/core/query/metric/QueryMetricHandler.java @@ -1,4 +1,4 @@ -package datawave.webservice.query.metric; +package datawave.core.query.metric; import java.util.Date; import java.util.Map; diff --git a/web-services/query/src/main/java/datawave/webservice/query/predicate/ProxiedAuthorizationsPredicate.java b/core/query/src/main/java/datawave/core/query/predicate/ProxiedAuthorizationsPredicate.java similarity index 95% rename from web-services/query/src/main/java/datawave/webservice/query/predicate/ProxiedAuthorizationsPredicate.java rename to core/query/src/main/java/datawave/core/query/predicate/ProxiedAuthorizationsPredicate.java index f392a116fb6..6da9184d0f6 100644 --- a/web-services/query/src/main/java/datawave/webservice/query/predicate/ProxiedAuthorizationsPredicate.java +++ b/core/query/src/main/java/datawave/core/query/predicate/ProxiedAuthorizationsPredicate.java @@ -1,4 +1,4 @@ -package datawave.webservice.query.predicate; +package datawave.core.query.predicate; import java.util.List; import java.util.function.Predicate; @@ -6,6 +6,8 @@ import org.apache.accumulo.core.security.Authorizations; import org.apache.accumulo.core.security.ColumnVisibility; +import datawave.security.authorization.predicate.AuthorizationsPredicate; + /** * This is a predicate that will test the auths against a specified visibility (as defined by accumulo's ColumnVisibility). In addition to the visibility, one * can specify that only the first of the authorizations is matched (presumably the user). diff --git a/web-services/query/src/main/java/datawave/webservice/query/predicate/QueryParameterPredicate.java b/core/query/src/main/java/datawave/core/query/predicate/QueryParameterPredicate.java similarity index 92% rename from web-services/query/src/main/java/datawave/webservice/query/predicate/QueryParameterPredicate.java rename to core/query/src/main/java/datawave/core/query/predicate/QueryParameterPredicate.java index f5825e1903b..76eef670607 100644 --- a/web-services/query/src/main/java/datawave/webservice/query/predicate/QueryParameterPredicate.java +++ b/core/query/src/main/java/datawave/core/query/predicate/QueryParameterPredicate.java @@ -1,9 +1,9 @@ -package datawave.webservice.query.predicate; +package datawave.core.query.predicate; import java.util.function.Predicate; -import datawave.webservice.query.Query; -import datawave.webservice.query.QueryImpl; +import datawave.microservice.query.Query; +import datawave.microservice.query.QueryImpl; /** * This is a predicate that will test a specified query parameter matches a specified value. If no value is specified then the parameter is treated as a boolean diff --git a/core/query/src/main/java/datawave/core/query/predict/NoOpQueryPredictor.java b/core/query/src/main/java/datawave/core/query/predict/NoOpQueryPredictor.java new file mode 100644 index 00000000000..16cc497f369 --- /dev/null +++ b/core/query/src/main/java/datawave/core/query/predict/NoOpQueryPredictor.java @@ -0,0 +1,13 @@ +package datawave.core.query.predict; + +import java.util.Set; + +import datawave.microservice.querymetric.BaseQueryMetric; + +public class NoOpQueryPredictor implements QueryPredictor { + + @Override + public Set predict(BaseQueryMetric query) throws PredictionException { + return null; + } +} diff --git a/web-services/query/src/main/java/datawave/webservice/query/runner/QueryPredictor.java b/core/query/src/main/java/datawave/core/query/predict/QueryPredictor.java similarity index 94% rename from web-services/query/src/main/java/datawave/webservice/query/runner/QueryPredictor.java rename to core/query/src/main/java/datawave/core/query/predict/QueryPredictor.java index 5c0c59e59a3..6bdc525e300 100644 --- a/web-services/query/src/main/java/datawave/webservice/query/runner/QueryPredictor.java +++ b/core/query/src/main/java/datawave/core/query/predict/QueryPredictor.java @@ -1,4 +1,4 @@ -package datawave.webservice.query.runner; +package datawave.core.query.predict; import java.io.Serializable; import java.util.Set; diff --git a/web-services/common/src/main/java/datawave/webservice/common/remote/RemoteQueryService.java b/core/query/src/main/java/datawave/core/query/remote/RemoteQueryService.java similarity index 67% rename from web-services/common/src/main/java/datawave/webservice/common/remote/RemoteQueryService.java rename to core/query/src/main/java/datawave/core/query/remote/RemoteQueryService.java index 681a07f4f03..aff8427ac1d 100644 --- a/web-services/common/src/main/java/datawave/webservice/common/remote/RemoteQueryService.java +++ b/core/query/src/main/java/datawave/core/query/remote/RemoteQueryService.java @@ -1,9 +1,11 @@ -package datawave.webservice.common.remote; +package datawave.core.query.remote; import java.net.URI; import java.util.List; import java.util.Map; +import datawave.security.authorization.ProxiedUserDetails; +import datawave.webservice.query.exception.QueryException; import datawave.webservice.result.BaseQueryResponse; import datawave.webservice.result.GenericResponse; import datawave.webservice.result.VoidResponse; @@ -24,7 +26,7 @@ public interface RemoteQueryService { * the caller * @return the generic response */ - public GenericResponse createQuery(String queryLogicName, Map> queryParameters, Object callerObject); + GenericResponse createQuery(String queryLogicName, Map> queryParameters, ProxiedUserDetails callerObject) throws QueryException; /** * Call next on a remote query service @@ -35,7 +37,7 @@ public interface RemoteQueryService { * the caller * @return the base query response */ - public BaseQueryResponse next(String id, Object callerObject); + BaseQueryResponse next(String id, ProxiedUserDetails callerObject) throws QueryException; /** * Call close on a remote query service @@ -46,7 +48,7 @@ public interface RemoteQueryService { * the caller * @return the void response */ - public VoidResponse close(String id, Object callerObject); + VoidResponse close(String id, ProxiedUserDetails callerObject) throws QueryException; /** * Plan a query using a remote query service @@ -59,7 +61,7 @@ public interface RemoteQueryService { * the caller * @return the generic response */ - public GenericResponse planQuery(String queryLogicName, Map> queryParameters, Object callerObject); + GenericResponse planQuery(String queryLogicName, Map> queryParameters, ProxiedUserDetails callerObject) throws QueryException; /** * Get the plan from a remote query service @@ -70,7 +72,7 @@ public interface RemoteQueryService { * the caller * @return a generic response */ - public GenericResponse planQuery(String id, Object callerObject); + GenericResponse planQuery(String id, ProxiedUserDetails callerObject) throws QueryException; /** * Get the URI for the query metrics @@ -79,5 +81,5 @@ public interface RemoteQueryService { * the id * @return the query metrics uri */ - public URI getQueryMetricsURI(String id); + URI getQueryMetricsURI(String id); } diff --git a/web-services/query/src/main/java/datawave/webservice/query/result/event/DefaultResponseObjectFactory.java b/core/query/src/main/java/datawave/core/query/result/event/DefaultResponseObjectFactory.java similarity index 79% rename from web-services/query/src/main/java/datawave/webservice/query/result/event/DefaultResponseObjectFactory.java rename to core/query/src/main/java/datawave/core/query/result/event/DefaultResponseObjectFactory.java index 46b90202793..8ea65cd6eed 100644 --- a/web-services/query/src/main/java/datawave/webservice/query/result/event/DefaultResponseObjectFactory.java +++ b/core/query/src/main/java/datawave/core/query/result/event/DefaultResponseObjectFactory.java @@ -1,5 +1,8 @@ -package datawave.webservice.query.result.event; +package datawave.core.query.result.event; +import datawave.core.query.cachedresults.CacheableQueryRowImpl; +import datawave.microservice.query.Query; +import datawave.microservice.query.QueryImpl; import datawave.user.AuthorizationsListBase; import datawave.user.DefaultAuthorizationsList; import datawave.webservice.dictionary.data.DataDictionaryBase; @@ -10,13 +13,19 @@ import datawave.webservice.dictionary.data.FieldsBase; import datawave.webservice.metadata.DefaultMetadataField; import datawave.webservice.metadata.MetadataFieldBase; -import datawave.webservice.query.Query; -import datawave.webservice.query.QueryImpl; import datawave.webservice.query.cachedresults.CacheableQueryRow; -import datawave.webservice.query.cachedresults.CacheableQueryRowImpl; import datawave.webservice.query.result.EdgeQueryResponseBase; import datawave.webservice.query.result.edge.DefaultEdge; import datawave.webservice.query.result.edge.EdgeBase; +import datawave.webservice.query.result.event.DefaultEvent; +import datawave.webservice.query.result.event.DefaultFacets; +import datawave.webservice.query.result.event.DefaultField; +import datawave.webservice.query.result.event.DefaultFieldCardinality; +import datawave.webservice.query.result.event.EventBase; +import datawave.webservice.query.result.event.FacetsBase; +import datawave.webservice.query.result.event.FieldBase; +import datawave.webservice.query.result.event.FieldCardinalityBase; +import datawave.webservice.query.result.event.ResponseObjectFactory; import datawave.webservice.response.objects.DefaultKey; import datawave.webservice.response.objects.KeyBase; import datawave.webservice.result.DefaultEdgeQueryResponse; diff --git a/web-services/query/src/main/java/datawave/webservice/query/util/QueryUtil.java b/core/query/src/main/java/datawave/core/query/util/QueryUtil.java similarity index 96% rename from web-services/query/src/main/java/datawave/webservice/query/util/QueryUtil.java rename to core/query/src/main/java/datawave/core/query/util/QueryUtil.java index b0cfec5fbea..bca140d2097 100644 --- a/web-services/query/src/main/java/datawave/webservice/query/util/QueryUtil.java +++ b/core/query/src/main/java/datawave/core/query/util/QueryUtil.java @@ -1,4 +1,4 @@ -package datawave.webservice.query.util; +package datawave.core.query.util; import java.util.HashSet; import java.util.Set; @@ -11,8 +11,8 @@ import com.google.protobuf.InvalidProtocolBufferException; -import datawave.webservice.query.Query; -import datawave.webservice.query.QueryImpl.Parameter; +import datawave.microservice.query.Query; +import datawave.microservice.query.QueryImpl.Parameter; import io.protostuff.LinkedBuffer; import io.protostuff.ProtobufIOUtil; import io.protostuff.Schema; diff --git a/core/query/src/main/resources/META-INF/beans.xml b/core/query/src/main/resources/META-INF/beans.xml new file mode 100644 index 00000000000..4ca201f8ff2 --- /dev/null +++ b/core/query/src/main/resources/META-INF/beans.xml @@ -0,0 +1,9 @@ + + + + \ No newline at end of file diff --git a/core/query/src/main/resources/META-INF/jboss-ejb3.xml b/core/query/src/main/resources/META-INF/jboss-ejb3.xml new file mode 100644 index 00000000000..8cf49db8c87 --- /dev/null +++ b/core/query/src/main/resources/META-INF/jboss-ejb3.xml @@ -0,0 +1,16 @@ + + + + + + + * + datawave + + + + \ No newline at end of file diff --git a/core/utils/accumulo-utils b/core/utils/accumulo-utils index bfb8e26a2c9..087a6590bb8 160000 --- a/core/utils/accumulo-utils +++ b/core/utils/accumulo-utils @@ -1 +1 @@ -Subproject commit bfb8e26a2c9f7343530689ce530fe5e452b5f771 +Subproject commit 087a6590bb8d0e0b6c783ce5f22db242a42480ee diff --git a/core/utils/common-utils b/core/utils/common-utils index 77b05026d1d..69dee4a5391 160000 --- a/core/utils/common-utils +++ b/core/utils/common-utils @@ -1 +1 @@ -Subproject commit 77b05026d1dc5bfffc5d425a7303e23ab9ab2b4a +Subproject commit 69dee4a5391895297247e261b00f98cee7833339 diff --git a/core/utils/metadata-utils b/core/utils/metadata-utils index 04f5e725a87..1c90817bb03 160000 --- a/core/utils/metadata-utils +++ b/core/utils/metadata-utils @@ -1 +1 @@ -Subproject commit 04f5e725a877808a081f8951fac3fce5fef1eeaa +Subproject commit 1c90817bb03f491b3dd24c7ade2cbb3c6bf605cc diff --git a/core/utils/pom.xml b/core/utils/pom.xml index 133ae6d9fb1..53a9aa8dc21 100644 --- a/core/utils/pom.xml +++ b/core/utils/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.core datawave-core-parent - 6.14.0-SNAPSHOT + 7.0.0-SNAPSHOT gov.nsa.datawave.core datawave-utils-parent diff --git a/core/utils/type-utils b/core/utils/type-utils index 4d23ef749bc..f821e78309b 160000 --- a/core/utils/type-utils +++ b/core/utils/type-utils @@ -1 +1 @@ -Subproject commit 4d23ef749bcaac5cb87d5f562db9bfd301baa555 +Subproject commit f821e78309b6331024fda04afcc09a84f34c1e3f diff --git a/docker/.gitignore b/docker/.gitignore index f9c2da7c13c..f73687e9947 100644 --- a/docker/.gitignore +++ b/docker/.gitignore @@ -1,14 +1,29 @@ hadoop **/scripts/query_* **/scripts/errorQuery_* +**/scripts/edge_* +**/scripts/plan_* +**/scripts/prediction_* **/scripts/lookup_* **/scripts/batchLookup_* **/scripts/lookupContent_* **/scripts/batchLookupContent_* +**/scripts/streamingQuery_* **/scripts/discovery_* **/scripts/errorDiscovery_* **/scripts/count_* **/scripts/errorCount_* **/scripts/fieldIndexCount_* **/scripts/errorFieldIndexCount_* +**/scripts/poundit_* +**/scripts/executor_* +**/scripts/termFrequency_* **/scripts/hitHighlights_* +**/scripts/edgeEvent_* +**/scripts/metrics_* +**/scripts/shutdown_* +**/scripts/health_* +**/scripts/modification_* +**/scripts/mapReduceQuery_* +**/scripts/oozieQuery_* +**/scripts/cachedResultsQuery_* diff --git a/docker/README.md b/docker/README.md index abfbef14a44..ece924ddc3b 100644 --- a/docker/README.md +++ b/docker/README.md @@ -63,26 +63,78 @@ You will need to build the docker image for this service on your local machine f You will need to build the docker image for this service on your local machine following the instructions in the audit service README. -### Dictionary - -[Datawave Dictionary Service](https://github.com/NationalSecurityAgency/datawave-dictionary-service/tree/main) provides access to the data dictionary and edge dictionary for Datawave. - -You will need to build the docker image for this service on your local machine following the instructions in the dictionary service README. - ### Metrics [Datawave Query Metric Service](https://github.com/NationalSecurityAgency/datawave-query-metric-service/tree/main) provides metrics caching, storage, and retrieval capabilities for Datawave. You will need to build the docker image for this service on your local machine following the instructions in the query metrics service README. +### Zookeeper + +Zookeeper is a prepacked docker image used for distributed synchronization. + +### Kafka + +Kafka is a prepacked docker image used for messaging between the various services. + +### Query + +Datawave Query Service v1.0-SNAPSHOT is a user-facing interface for Datawave query. + +This microservice is in development, and can be found in this repo. + +You will need to build the docker image for this service on your local machine following the instructions in the config service README. + +### Executor Pool 1 + +Datawave Executor Service v1.0-SNAPSHOT is the back-end worker for Datawave queries. + +This microservice is in development, and can be found in this repo. + +You will need to build the docker image for this service on your local machine following the instructions in the config service README. + +### Executor Pool 2 + +Enabled via the 'pool2', or 'full' profile. + +Datawave Executor Service v1.0-SNAPSHOT is the back-end worker for Datawave queries. + +This microservice is in development, and can be found in this repo. + +You will need to build the docker image for this service on your local machine following the instructions in the config service README. + +### Query Storage + +Enabled via the 'storage', or 'full' profile. + +Datawave Query Storage Service v1.0-SNAPSHOT is a utility service used to inspect the storage cache. + +This microservice is in development, and can be found in this repo. + +You will need to build the docker image for this service on your local machine following the instructions in the config service README. + ## Optional Components +### Kafdrop + +Enabled via the 'management', or 'full' profile. + +Kafdrop is a prepacked docker image used for kafka cluster management. + ### Hazelcast Management Center Enabled via the 'management', or 'full' profile. Hazelcast Management Center v4.2021.06 is a prepacked docker image used for hazelcast cluster management. +### Dictionary + +Enabled via the 'dictionary', or 'full' profile. + +[Datawave Dictionary Service](https://github.com/NationalSecurityAgency/datawave-dictionary-service/tree/main) provides access to the data dictionary and edge dictionary for Datawave. + +You will need to build the docker image for this service on your local machine following the instructions in the dictionary service README. + ## Usage Please read through these instructions in their entirety before attempting to build or deploy Datawave. @@ -197,7 +249,7 @@ This command can be combined with default Datawave Quickstart build command to b ### Bootstrap -The audit, dictionary, and query metric services all need to connect to Zookeeper, Hadoop and/or Accumulo. In order to make that work, there are some environment variables which need to be configured. +The audit, dictionary, query executor, and query metric services all need to connect to Zookeeper, Hadoop and/or Accumulo. In order to make that work, there are some environment variables which need to be configured. #### Default Bootstrap @@ -255,14 +307,26 @@ DW_HADOOP_HOST="" ### Start services -Start the default services: +Start the default services (with the Kafka as the backend): ```docker compose up -d``` +Start the default services (with RabbitMQ as the backend): + +```BACKEND=rabbitmq docker compose up -d``` + +Start the default services (with Hazelcast as the backend): + +```BACKEND=hazelcast docker compose up -d``` + Start the default services, and the dictionary service: ```docker compose --profile quickstart --profile dictionary up -d``` +Start the default services, the kafka services, and the dictionary service: + +```docker compose --profile quickstart --profile dictionary --profile kafka up -d``` + Start all services: ```docker compose --profile quickstart --profile full up -d``` diff --git a/docker/cleanup.sh b/docker/cleanup.sh new file mode 100755 index 00000000000..a909b27201c --- /dev/null +++ b/docker/cleanup.sh @@ -0,0 +1,9 @@ +#!/bin/sh +if [[ "${@/keepdata}" == "$@" ]]; then + docker volume rm docker_quickstart_data +fi +docker image prune -f +docker system prune -f +if [[ "${@/keeplog}" == "$@" ]]; then + sudo find logs -type f -name '*log*' -delete +fi diff --git a/docker/config/application-cachedresults.yml b/docker/config/application-cachedresults.yml new file mode 100755 index 00000000000..ce58aa98ad2 --- /dev/null +++ b/docker/config/application-cachedresults.yml @@ -0,0 +1,79 @@ +spring: + datasource: + cachedResults: + url: 'jdbc:mysql://${datawave.mysql.host}:3306/${datawave.mysql.dbname}?zeroDateTimeBehavior=convertToNull' + username: '${datawave.mysql.username}' + password: '${datawave.mysql.password}' + driver-class-name: 'com.mysql.cj.jdbc.Driver' + hikari: + # default: 30000 + connection-timeout: 5000 + # default: 600000 + idle-timeout: 900000 + # default: maximum-pool-size + minimum-idle: ${datawave.mysql.pool.min-size} + # default: 10 + maximum-pool-size: ${datawave.mysql.pool.max-size} + +datawave: + mysql: + host: 'mysql' + dbname: 'cachedresults' + pool: + min-size: '5' + max-size: '20' + username: 'datawave' + password: 'secret' + query: + cachedResults: + enabled: ${CACHED_RESULTS:false} + remoteQuery: + queryServiceUri: "https://query:8443/query/v1/query" + # unlimited + maxBytesToBuffer: -1 + numFields: 900 + statementTemplates: + createTableTemplate: | + CREATE TABLE IF NOT EXISTS template ( + _user_ VARCHAR(200) NOT NULL, + _queryId_ VARCHAR(200) NOT NULL, + _logicName_ VARCHAR(200) NOT NULL, + _datatype_ VARCHAR(35) NOT NULL, + _eventId_ VARCHAR(50) NOT NULL, + _row_ LONGTEXT NOT NULL, + _colf_ LONGTEXT NOT NULL, + _markings_ VARCHAR(400) NOT NULL, + _column_markings_ LONGTEXT NOT NULL, + _column_timestamps_ LONGTEXT NOT NULL, + %FIELD_DEFINITIONS% + ) ENGINE = MyISAM + createTable: "CREATE TABLE %TABLE% LIKE template" + dropTable: "DROP TABLE %TABLE%" + dropView: "DROP VIEW %TABLE%" + insert: | + INSERT INTO %TABLE% ( + _user_, + _queryId_, + _logicName_, + _datatype_, + _eventId_, + _row_, + _colf_, + _markings_, + _column_markings_, + _column_timestamps_, + %PREPARED_FIELDS% + ) VALUES ( + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ?, + ?, + %PREPARED_VALUES% + ) + createView: "CREATE VIEW %VIEW%(%VIEW_COLS%) AS SELECT %TABLE_COLS% FROM %TABLE%" diff --git a/docker/config/application-compose.yml b/docker/config/application-compose.yml index a1603c75e45..7b3835bbc2e 100755 --- a/docker/config/application-compose.yml +++ b/docker/config/application-compose.yml @@ -34,3 +34,9 @@ accumulo: instanceName: 'my-instance-01' username: 'root' password: 'secret' + +# Configuration placeholders which 1) determines what backend will be used for transmitting query results +# and 2) determines the message size limit before claim checks are used with RabbitMQ messaging +messaging: + backend: ${BACKEND:kafka} + maxMessageSizeBytes: 536870912 diff --git a/docker/config/application-federation.yml b/docker/config/application-federation.yml new file mode 100644 index 00000000000..d88f8c5ec64 --- /dev/null +++ b/docker/config/application-federation.yml @@ -0,0 +1,16 @@ +# This serves as a set of sensible defaults for authorization and query federation. + +datawave: + authorization: + federation: + # Each entry in the following map will be registered as a FederatedAuthorizationService bean, named after the key + services: + FederatedAuthorizationService: + federatedAuthorizationUri: "https://authorization:8443/authorization/v2" + query: + federation: + # Each entry in the following map will be registered as a FederatedQueryService (RemoteQueryService) bean, named after the key + services: + FederatedQueryService: + queryServiceUri: 'https://query:8443/query/v1' + queryMetricServiceUri: 'https://querymetric:8443/querymetric/v1/id' diff --git a/docker/config/application-metricssource.yml b/docker/config/application-metricssource.yml new file mode 100755 index 00000000000..b1bdb1196c7 --- /dev/null +++ b/docker/config/application-metricssource.yml @@ -0,0 +1,23 @@ +# This profile should be added to your service if you depend on the +# query metric starter to send metrics to the query metric service. +spring: + cloud: + stream: + bindings: + queryMetricSource-out-0: + destination: queryMetricChannel + producer: + requiredGroups: queryMetricService + errorChannelEnabled: true + # NOTE: When defining your functions, be sure to include busConsumer, or else spring cloud bus will not work + function: + definition: queryMetricSource;busConsumer + +datawave: + query: + metric: + client: + confirmAckTimeoutMillis: 30000 +# To send metrics via REST, uncomment the following +# host: metrics +# transport: HTTPS diff --git a/docker/config/application-mrquery.yml b/docker/config/application-mrquery.yml new file mode 100755 index 00000000000..6b2c940e5fc --- /dev/null +++ b/docker/config/application-mrquery.yml @@ -0,0 +1,54 @@ +datawave: + query: + mapreduce: + fsConfigResources: + - ${HADOOP_CONF_DIR:/etc/hadoop/conf}/core-site.xml + - ${HADOOP_CONF_DIR:/etc/hadoop/conf}/hdfs-site.xml + - ${HADOOP_CONF_DIR:/etc/hadoop/conf}/mapred-site.xml + - ${HADOOP_CONF_DIR:/etc/hadoop/conf}/yarn-site.xml + callbackServletURL: "http://query:8080/query/v1/mapreduce/updateState" + mapReduceBaseDirectory: "/datawave/MapReduceService" + restrictInputFormats: true + validInputFormats: + - "org.apache.accumulo.core.client.mapreduce.AccumuloInputFormat" + - "datawave.mr.bulk.BulkInputFormat" + jobs: + 'BulkResultsJob': + startingClass: datawave.microservice.query.mapreduce.MapReduce + jobJarName: "MapReduceQueryCoreJob.jar" + description: "MapReduce job that runs a query and either puts the results into a table or files in HDFS" + hdfsUri: "hdfs://${HADOOP_HOST}:9000/" + jobTracker: "${HADOOP_HOST}:8021" + requiredRuntimeParameters: + queryId: java.lang.String + format: datawave.microservice.mapreduce.bulkresults.map.SerializationFormat + optionalRuntimeParameters: + outputTableName: java.lang.String + outputFormat: java.lang.String + jobConfigurationProperties: + "mapreduce.map.speculative": "false" + "mapreduce.map.output.compress": "false" + "mapreduce.output.fileoutputformat.compress": "false" + "mapreduce.job.user.classpath.first": "true" + # NOTE: Disable spring components which should not be run in a map-reduce context + jobSystemProperties: + "datawave.table.cache.enabled": "false" + "spring.profiles.active": "query,mrquery" + "spring.cloud.bus.enabled": "false" + "spring.cloud.discovery.enabled": "false" + "spring.cloud.consul.enabled": "false" + "spring.rabbitmq.discovery.enabled": "false" + "datawave.query.messaging.backend": "none" + "datawave.query.messaging.claimCheck.enabled": "false" + "datawave.query.storage.cache.enabled": "false" + "hazelcast.client.enabled": "false" + "spring.cloud.config.enabled": "false" + "datawave.query.metric.client.enabled": "false" + accumulo: + zookeepers: '${accumulo.zookeepers}' + instanceName: '${accumulo.instanceName}' + username: '${accumulo.username}' + password: '${accumulo.password}' + 'OozieJob': + hdfsUri: "hdfs://${HADOOP_HOST}:9000/" + jobTracker: "${HADOOP_HOST}:8021" \ No newline at end of file diff --git a/docker/config/application-query.yml b/docker/config/application-query.yml new file mode 100755 index 00000000000..875e592df0b --- /dev/null +++ b/docker/config/application-query.yml @@ -0,0 +1,568 @@ +# This profile should be included by any service which depends on the query starter. This +# file contains all of the configuration required to use the QueryLogicFactory. +warehouse: + accumulo: + zookeepers: '${accumulo.zookeepers}' + instanceName: '${accumulo.instanceName}' + username: '${accumulo.username}' + password: '${accumulo.password}' + statsd: + host: localhost + port: 8125 + tables: + shard: + name: 'datawave.shard' + index: + name: 'datawave.shardIndex' + reverseIndex: + name: 'datawave.shardReverseIndex' + dateIndex: + name: 'datawave.dateIndex' + metadata: + name: 'datawave.metadata' + model: + name: 'datawave.metadata' + edge: + name: 'datawave.edge' + errorTables: + shard: + name: "datawave.error_s" + index: + name: "datawave.error_i" + reverseIndex: + name: "datawave.error_r" + dateIndex: + name: "" + metadata: + name: "datawave.error_m" + model: + name: "datawave.error_m" + metricTables: + shard: + name: "datawave.queryMetrics_s" + index: + name: "datawave.queryMetrics_i" + reverseIndex: + name: "datawave.queryMetrics_r" + dateIndex: + name: "" + metadata: + name: "datawave.queryMetrics_m" + model: + name: "datawave.queryMetrics_m" + defaults: + checkpointable: true + queryThreads: 100 + indexLookupThreads: 100 + dateIndexThreads: 20 + fullTableScanEnabled: false + baseIteratorPriority: 100 + maxIndexScanTimeMillis: 31536000000 + eventPerDayThreshold: 40000 + shardsPerDayThreshold: 20 + initialMaxTermThreshold: 2000 + finalMaxTermThreshold: 2000 + maxDepthThreshold: 2000 + maxUnfieldedExpansionThreshold: 50 + maxValueExpansionThreshold: 50 + maxOrExpansionThreshold: 500 + maxOrRangeThreshold: 10 + maxRangesPerRangeIvarator: 5 + maxOrRangeIvarators: 10 + maxOrExpansionFstThreshold: 750 + maxFieldIndexRangeSplit: 16 + maxIvaratorSources: 20 + maxEvaluationPipelines: 16 + maxPipelineCachedResults: 16 + hdfsSiteConfigURLs: 'file://${HADOOP_CONF_DIR:/etc/hadoop/conf}/core-site.xml,file://${HADOOP_CONF_DIR:/etc/hadoop/conf}/hdfs-site.xml' + ivaratorFstHdfsBaseURIs: "hdfs://${HADOOP_HOST:localhost}:9000/IvaratorCache" + ivaratorCacheBufferSize: 10000 + ivaratorMaxOpenFiles: 100 + ivaratorCacheScanPersistThreshold: 100000 + ivaratorCacheScanTimeoutMinutes: 60 + modelName: 'DATAWAVE' + edgeModelName: 'DATAWAVE_EDGE' + +datawave: + metadata: + all-metadata-auths: + - PRIVATE,PUBLIC + type-substitutions: + "[datawave.data.type.DateType]": "datawave.data.type.RawDateType" + + query: + parser: + skipTokenizeUnfieldedFields: + - "DOMETA" + tokenizedFields: + - "CONTENT" + logic: + factory: + enabled: true + # Uncomment the following line to override the query logic beans to load + # xmlBeansPath: "classpath:MyTestQueryLogicFactory.xml" + + # If desired, you may populate this map to redefine the name for each query logic. + # This can also be used to limit the available query logics. + queryLogicsByName: + "EventQuery": "EventQuery" + "ErrorEventQuery": "ErrorEventQuery" + "DiscoveryQuery": "DiscoveryQuery" + "ErrorDiscoveryQuery": "ErrorDiscoveryQuery" + "LuceneUUIDEventQuery": "LuceneUUIDEventQuery" + "ContentQuery": "ContentQuery" + "EdgeQuery": "EdgeQuery" + "CountQuery": "CountQuery" + "ErrorCountQuery": "ErrorCountQuery" + "FieldIndexCountQuery": "FieldIndexCountQuery" + "ErrorFieldIndexCountQuery": "ErrorFieldIndexCountQuery" + "TermFrequencyQuery": "TermFrequencyQuery" + "IndexStatsQuery": "IndexStatsQuery" + "QueryMetricsQuery": "QueryMetricsQuery" + "InternalQueryMetricsQuery": "InternalQueryMetricsQuery" + "FacetedQuery": "FacetedQuery" + "HitHighlights": "HitHighlights" + "EdgeEventQuery": "EdgeEventQuery" + "FederatedEventQuery": "FederatedEventQuery" + + # The max page size that a user can request. 0 turns off this feature + maxPageSize: 10000 + + # The number of bytes at which a page will be returned, event if the pagesize has not been reached. 0 turns off this feature + pageByteTrigger: 0 + logics: + BaseEventQuery: + checkpointable: ${warehouse.defaults.checkpointable} + accumuloPassword: ${warehouse.accumulo.password} + tableName: ${warehouse.tables.shard.name} + dateIndexTableName: ${warehouse.tables.dateIndex.name} + defaultDateTypeName: "EVENT" + metadataTableName: ${warehouse.tables.metadata.name} + indexTableName: ${warehouse.tables.index.name} + reverseIndexTableName: ${warehouse.tables.reverseIndex.name} + maxResults: -1 + queryThreads: ${warehouse.defaults.queryThreads} + indexLookupThreads: ${warehouse.defaults.indexLookupThreads} + dateIndexThreads: ${warehouse.defaults.dateIndexThreads} + fullTableScanEnabled: ${warehouse.defaults.fullTableScanEnabled} + includeDataTypeAsField: false + disableIndexOnlyDocuments: false + indexOnlyFilterFunctionsEnabled: false + includeHierarchyFields: false + hierarchyFieldOptions: + "FOO": "BAR" + baseIteratorPriority: ${warehouse.defaults.baseIteratorPriority} + maxIndexScanTimeMillis: ${warehouse.defaults.maxIndexScanTimeMillis} + collapseUids: false + collapseUidsThreshold: -1 + useEnrichers: true + contentFieldNames: + - 'CONTENT' + realmSuffixExclusionPatterns: + - '<.*>$' + minimumSelectivity: .2 + enricherClassNames: + - 'datawave.query.enrich.DatawaveTermFrequencyEnricher' + useFilters: false + filterClassNames: + - 'foo.bar' + filterOptions: + 'bar': "foo" + auditType: "ACTIVE" + logicDescription: "Retrieve sharded events/documents, leveraging the global index tables as needed" + eventPerDayThreshold: ${warehouse.defaults.eventPerDayThreshold} + shardsPerDayThreshold: ${warehouse.defaults.shardsPerDayThreshold} + initialMaxTermThreshold: ${warehouse.defaults.initialMaxTermThreshold} + finalMaxTermThreshold: ${warehouse.defaults.finalMaxTermThreshold} + maxDepthThreshold: ${warehouse.defaults.maxDepthThreshold} + maxUnfieldedExpansionThreshold: ${warehouse.defaults.maxUnfieldedExpansionThreshold} + maxValueExpansionThreshold: ${warehouse.defaults.maxValueExpansionThreshold} + maxOrExpansionThreshold: ${warehouse.defaults.maxOrExpansionThreshold} + maxOrRangeThreshold: ${warehouse.defaults.maxOrRangeThreshold} + maxOrExpansionFstThreshold: ${warehouse.defaults.maxOrExpansionFstThreshold} + maxFieldIndexRangeSplit: ${warehouse.defaults.maxFieldIndexRangeSplit} + maxIvaratorSources: ${warehouse.defaults.maxIvaratorSources} + maxEvaluationPipelines: ${warehouse.defaults.maxEvaluationPipelines} + maxPipelineCachedResults: ${warehouse.defaults.maxPipelineCachedResults} + hdfsSiteConfigURLs: ${warehouse.defaults.hdfsSiteConfigURLs} + zookeeperConfig: ${warehouse.accumulo.zookeepers} + ivaratorCacheDirConfigs: + - basePathURI: "hdfs://${HADOOP_HOST:localhost}:9000/IvaratorCache" + ivaratorFstHdfsBaseURIs: ${warehouse.defaults.ivaratorFstHdfsBaseURIs} + ivaratorCacheBufferSize: ${warehouse.defaults.ivaratorCacheBufferSize} + ivaratorMaxOpenFiles: ${warehouse.defaults.ivaratorMaxOpenFiles} + ivaratorCacheScanPersistThreshold: ${warehouse.defaults.ivaratorCacheScanPersistThreshold} + ivaratorCacheScanTimeoutMinutes: ${warehouse.defaults.ivaratorCacheScanTimeoutMinutes} + eventQueryDataDecoratorTransformer: + requestedDecorators: + - "CSV" + - "WIKIPEDIA" + dataDecorators: + "CSV": + "EVENT_ID": "https://localhost:8443/DataWave/Query/lookupUUID/EVENT_ID?uuid=@field_value@&parameters=data.decorators:CSV" + "UUID": "https://localhost:8443/DataWave/Query/lookupUUID/UUID?uuid=@field_value@&parameters=data.decorators:CSV" + "PARENT_UUID": "https://localhost:8443/DataWave/Query/lookupUUID/PARENT_UUID?uuid=@field_value@&parameters=data.decorators:CSV" + "WIKIPEDIA": + "PAGE_ID": "https://localhost:8443/DataWave/Query/lookupUUID/PAGE_ID?uuid=@field_value@&parameters=data.decorators:WIKIPEDIA" + "PAGE_TITLE": "https://localhost:8443/DataWave/Query/lookupUUID/PAGE_TITLE?uuid=@field_value@&parameters=data.decorators:WIKIPEDIA" + modelTableName: ${warehouse.tables.model.name} + modelName: ${warehouse.defaults.modelName} + querySyntaxParsers: + JEXL: "" + LUCENE: "LuceneToJexlQueryParser" + LUCENE-UUID: "LuceneToJexlUUIDQueryParser" + TOKENIZED-LUCENE: "TokenizedLuceneToJexlQueryParser" + sendTimingToStatsd: false + collectQueryMetrics: true + logTimingDetails: true + statsdHost: ${warehouse.statsd.host} + statsdPort: ${warehouse.statsd.port} + evaluationOnlyFields: "" + maxConcurrentTasks: 10 + requiredRoles: + - "AuthorizedUser" + + EventQuery: + checkpointable: ${warehouse.defaults.checkpointable} + logicDescription: "Query the sharded event/document schema, leveraging the global index tables as needed" + + ErrorEventQuery: + checkpointable: ${warehouse.defaults.checkpointable} + logicDescription: "Retrieve events/documents that encountered one or more errors during ingest" + tableName: ${warehouse.errorTables.shard.name} + metadataTableName: ${warehouse.errorTables.metadata.name} + dateIndexTableName: "" + indexTableName: ${warehouse.errorTables.index.name} + reverseIndexTableName: ${warehouse.errorTables.reverseIndex.name} + includeHierarchyFields: false + + DiscoveryQuery: + checkpointable: ${warehouse.defaults.checkpointable} + tableName: ${warehouse.tables.shard.name} + indexTableName: ${warehouse.tables.index.name} + reverseIndexTableName: ${warehouse.tables.reverseIndex.name} + metadataTableName: ${warehouse.tables.metadata.name} + modelTableName: ${warehouse.tables.model.name} + modelName: ${warehouse.defaults.modelName} + fullTableScanEnabled: ${warehouse.defaults.fullTableScanEnabled} + allowLeadingWildcard: true + auditType: "NONE" + maxResults: -1 + maxWork: -1 + logicDescription: "Discovery query that returns information from the index about the supplied term(s)" + + ErrorDiscoveryQuery: + checkpointable: ${warehouse.defaults.checkpointable} + tableName: ${warehouse.errorTables.shard.name} + indexTableName: ${warehouse.errorTables.index.name} + reverseIndexTableName: ${warehouse.errorTables.reverseIndex.name} + maxResults: -1 + maxWork: -1 + metadataTableName: ${warehouse.errorTables.metadata.name} + modelTableName: ${warehouse.errorTables.model.name} + modelName: ${warehouse.defaults.modelName} + fullTableScanEnabled: ${warehouse.defaults.fullTableScanEnabled} + allowLeadingWildcard: true + auditType: "NONE" + logicDescription: "Discovery query that returns information from the ingest errors index about the supplied term(s)" + + LuceneUUIDEventQuery: + checkpointable: ${warehouse.defaults.checkpointable} + logicDescription: "Composite query logic that retrieves records from the event and error tables, based on known UUID fields, ie, those configured via UUIDTypeList in QueryLogicFactory.xml" + auditType: "NONE" + eventQuery: + auditType: "NONE" + logicDescription: "Lucene query for event/document UUIDs" + mandatoryQuerySyntax: + - "LUCENE-UUID" + connPoolName: "UUID" + errorEventQuery: + auditType: "NONE" + logicDescription: "Lucene query for event/document UUIDs for events that encountered errors at ingest time" + mandatoryQuerySyntax: + - "LUCENE-UUID" + connPoolName: "UUID" + tableName: ${warehouse.errorTables.shard.name} + dateIndexTableName: ${warehouse.errorTables.dateIndex.name} + metadataTableName: ${warehouse.errorTables.metadata.name} + indexTableName: ${warehouse.errorTables.index.name} + reverseIndexTableName: ${warehouse.errorTables.reverseIndex.name} + + ContentQuery: + checkpointable: ${warehouse.defaults.checkpointable} + tableName: ${warehouse.tables.shard.name} + maxResults: -1 + maxWork: -1 + auditType: "NONE" + logicDescription: "Query that returns a document given the document identifier" + + EdgeQuery: + checkpointable: ${warehouse.defaults.checkpointable} + tableName: ${warehouse.tables.edge.name} + metadataTableName: ${warehouse.tables.metadata.name} + modelTableName: ${warehouse.tables.model.name} + modelName: ${warehouse.defaults.edgeModelName} + maxResults: 25000 + maxWork: -1 + queryThreads: 16 + auditType: "NONE" + logicDescription: "Retrieve graph edges matching the search term(s)" + querySyntaxParsers: + "JEXL": "" + "LIST": "" + "LIMITED_JEXL": "" + "LUCENE": "" + + CountQuery: + checkpointable: ${warehouse.defaults.checkpointable} + logicDescription: "Retrieve event/document counts based on your search criteria" + + ErrorCountQuery: + checkpointable: ${warehouse.defaults.checkpointable} + logicDescription: "Retrieve counts of errored events based on your search criteria" + tableName: ${warehouse.errorTables.shard.name} + metadataTableName: ${warehouse.errorTables.metadata.name} + indexTableName: ${warehouse.errorTables.index.name} + reverseIndexTableName: ${warehouse.errorTables.reverseIndex.name} + + FieldIndexCountQuery: + checkpointable: false + tableName: ${warehouse.tables.shard.name} + indexTableName: ${warehouse.tables.index.name} + reverseIndexTableName: ${warehouse.tables.reverseIndex.name} + metadataTableName: ${warehouse.tables.metadata.name} + maxResults: -1 + maxWork: -1 + queryThreads: ${warehouse.defaults.queryThreads} + modelTableName: ${warehouse.tables.model.name} + modelName: "DATAWAVE" + maxUniqueValues: 20000 + auditType: "NONE" + logicDescription: "Indexed Fields Only: Given FIELDNAME returns counts for each unique value. Given FIELDNAME:FIELDVALUE returns count for only that value." + + ErrorFieldIndexCountQuery: + checkpointable: false + tableName: ${warehouse.errorTables.shard.name} + indexTableName: ${warehouse.errorTables.index.name} + reverseIndexTableName: ${warehouse.errorTables.reverseIndex.name} + metadataTableName: ${warehouse.errorTables.metadata.name} + maxResults: -1 + maxWork: -1 + queryThreads: ${warehouse.defaults.queryThreads} + modelTableName: ${warehouse.errorTables.model.name} + modelName: "DATAWAVE" + maxUniqueValues: 20000 + auditType: "NONE" + logicDescription: "FieldIndex count query (experimental)" + + TermFrequencyQuery: + tableName: ${warehouse.tables.shard.name} + maxResults: -1 + maxWork: -14 + auditType: "NONE" + logicDescription: "Query that returns data from the term frequency query table" + + IndexStatsQuery: + auditType: "NONE" + + QueryMetricsQuery: + checkpointable: ${warehouse.defaults.checkpointable} + logicDescription: "Retrieve query metrics based on the given search term(s)" + includeHierarchyFields: false + modelTableName: ${warehouse.metricTables.model.name} + modelName: "NONE" + tableName: ${warehouse.metricTables.shard.name} + dateIndexTableName: ${warehouse.metricTables.dateIndex.name} + metadataTableName: ${warehouse.metricTables.metadata.name} + indexTableName: ${warehouse.metricTables.index.name} + reverseIndexTableName: ${warehouse.metricTables.reverseIndex.name} + auditType: "NONE" + collectQueryMetrics: true + + InternalQueryMetricsQuery: + collectQueryMetrics: false + requiredRoles: + - "AuthorizedServer" + + FacetedQuery: + checkpointable: ${warehouse.defaults.checkpointable} + auditType: "NONE" + logicDescription: "Faceted search over indexed fields, returning aggregate counts for field values" + facetedSearchType: "FIELD_VALUE_FACETS" + facetTableName: "datawave.facets" + facetMetadataTableName: "datawave.facetMetadata" + facetHashTableName: "datawave.facetHashes" + maximumFacetGrouping: 200 + minimumFacet: 1 + streaming: true + querySyntaxParsers: + JEXL: "" + LUCENE: "LuceneToJexlQueryParser" + LUCENE-UUID: "LuceneToJexlUUIDQueryParser" + + HitHighlights: + checkpointable: ${warehouse.defaults.checkpointable} + accumuloPassword: ${warehouse.accumulo.password} + tableName: ${warehouse.tables.shard.name} + dateIndexTableName: ${warehouse.tables.dateIndex.name} + defaultDateTypeName: "EVENT" + metadataTableName: ${warehouse.tables.metadata.name} + indexTableName: ${warehouse.tables.index.name} + reverseIndexTableName: ${warehouse.tables.reverseIndex.name} + queryThreads: ${warehouse.defaults.indexLookupThreads} + fullTableScanEnabled: ${warehouse.defaults.fullTableScanEnabled} + minimumSelectivity: .2 + includeDataTypeAsField: false + includeGroupingContext: false + useEnrichers: false + auditType: "NONE" + logicDescription: "Fast boolean query over indexed fields, only returning fields queried on" + eventPerDayThreshold: 40000 + shardsPerDayThreshold: ${warehouse.defaults.shardsPerDayThreshold} + initialMaxTermThreshold: ${warehouse.defaults.initialMaxTermThreshold} + finalMaxTermThreshold: ${warehouse.defaults.finalMaxTermThreshold} + maxDepthThreshold: ${warehouse.defaults.maxDepthThreshold} + maxUnfieldedExpansionThreshold: ${warehouse.defaults.maxUnfieldedExpansionThreshold} + maxValueExpansionThreshold: ${warehouse.defaults.maxValueExpansionThreshold} + maxOrExpansionThreshold: ${warehouse.defaults.maxOrExpansionThreshold} + maxOrRangeThreshold: ${warehouse.defaults.maxOrRangeThreshold} + maxRangesPerRangeIvarator: ${warehouse.defaults.maxRangesPerRangeIvarator} + maxOrRangeIvarators: ${warehouse.defaults.maxOrRangeIvarators} + maxOrExpansionFstThreshold: ${warehouse.defaults.maxOrExpansionFstThreshold} + maxFieldIndexRangeSplit: ${warehouse.defaults.maxFieldIndexRangeSplit} + maxEvaluationPipelines: ${warehouse.defaults.maxEvaluationPipelines} + maxPipelineCachedResults: ${warehouse.defaults.maxPipelineCachedResults} + hdfsSiteConfigURLs: ${warehouse.defaults.hdfsSiteConfigURLs} + zookeeperConfig: ${warehouse.accumulo.zookeepers} + ivaratorCacheDirConfigs: + - basePathURI: "hdfs://${HADOOP_HOST:localhost}:9000/IvaratorCache" + ivaratorFstHdfsBaseURIs: ${warehouse.defaults.ivaratorFstHdfsBaseURIs} + ivaratorCacheBufferSize: 10000 + ivaratorMaxOpenFiles: ${warehouse.defaults.ivaratorMaxOpenFiles} + ivaratorCacheScanPersistThreshold: 100000 + ivaratorCacheScanTimeoutMinutes: ${warehouse.defaults.ivaratorCacheScanTimeoutMinutes} + querySyntaxParsers: + JEXL: "" + LUCENE: "LuceneToJexlQueryParser" + LUCENE-UUID: "LuceneToJexlUUIDQueryParser" + + EdgeEventQuery: + checkpointable: ${warehouse.defaults.checkpointable} + logicDescription: "Use results of an EdgeQuery to obtain events/documents that created the given edge" + edgeModelName: "DATAWAVE_EDGE" + modelTableName: ${warehouse.tables.model.name} + + uuidTypes: &defaultUuidTypes + 'EVENT_ID': + fieldName: 'EVENT_ID' + queryLogics: + 'default': 'LuceneUUIDEventQuery' + allowedWildcardAfter: 28 + 'UUID': + fieldName: 'UUID' + queryLogics: + 'default': 'LuceneUUIDEventQuery' + 'PARENT_UUID': + fieldName: 'PARENT_UUID' + queryLogics: + 'default': 'LuceneUUIDEventQuery' + 'PAGE_ID': + fieldName: 'PAGE_ID' + queryLogics: + 'default': 'LuceneUUIDEventQuery' + 'PAGE_TITLE': + fieldName: 'PAGE_TITLE' + queryLogics: + 'default': 'LuceneUUIDEventQuery' + + lookup: + columnVisibility: "" + beginDate: "20100101 000000.000" + types: *defaultUuidTypes + + translateid: + columnVisibility: "" + beginDate: "20100101 000000.000" + types: *defaultUuidTypes + + edge: + # Uncomment the following line to override the edge beans to load + # xmlBeansPath: "classpath:EdgeBeans.xml" + model: + baseFieldMap: + EDGE_SOURCE: 'SOURCE' + EDGE_SINK: 'SINK' + EDGE_TYPE: 'TYPE' + EDGE_RELATIONSHIP: 'RELATION' + EDGE_ATTRIBUTE1: 'ATTRIBUTE1' + EDGE_ATTRIBUTE2: 'ATTRIBUTE2' + EDGE_ATTRIBUTE3: 'ATTRIBUTE3' + DATE: 'DATE' + STATS_EDGE: 'STATS_TYPE' + keyUtilFieldMap: + ENRICHMENT_TYPE: 'ENRICHMENT_TYPE' + FACT_TYPE: 'FACT_TYPE' + GROUPED_FIELDS: 'GROUPED_FIELDS' + transformFieldMap: + COUNT: 'COUNT' + COUNTS: 'COUNTS' + LOAD_DATE: 'LOADDATE' + ACTIVITY_DATE: 'ACTIVITY_DATE' + fieldMappings: + - fieldName: "SOURCE" + modelFieldName: "VERTEXA" + direction: "REVERSE" + indexOnly: false + - fieldName: "SOURCE" + modelFieldName: "VERTEXA" + direction: "FORWARD" + indexOnly: false + - fieldName: "SINK" + modelFieldName: "VERTEXB" + direction: "REVERSE" + indexOnly: false + - fieldName: "SINK" + modelFieldName: "VERTEXB" + direction: "FORWARD" + indexOnly: false + - fieldName: "RELATION" + modelFieldName: "RELATION" + direction: "REVERSE" + indexOnly: false + - fieldName: "RELATION" + modelFieldName: "RELATION" + direction: "FORWARD" + indexOnly: false + - fieldName: "TYPE" + modelFieldName: "TYPE" + direction: "REVERSE" + indexOnly: false + - fieldName: "TYPE" + modelFieldName: "TYPE" + direction: "FORWARD" + indexOnly: false + - fieldName: "ATTRIBUTE1" + modelFieldName: "ATTR1" + direction: "REVERSE" + indexOnly: false + - fieldName: "ATTRIBUTE1" + modelFieldName: "ATTR1" + direction: "FORWARD" + indexOnly: false + - fieldName: "ATTRIBUTE2" + modelFieldName: "ATTR2" + direction: "REVERSE" + indexOnly: false + - fieldName: "ATTRIBUTE2" + modelFieldName: "ATTR2" + direction: "FORWARD" + indexOnly: false + - fieldName: "ATTRIBUTE3" + modelFieldName: "ATTR3" + direction: "REVERSE" + indexOnly: false + - fieldName: "ATTRIBUTE3" + modelFieldName: "ATTR3" + direction: "FORWARD" + indexOnly: false + diff --git a/docker/config/application-querymessaging.yml b/docker/config/application-querymessaging.yml new file mode 100755 index 00000000000..fd5b0c97e70 --- /dev/null +++ b/docker/config/application-querymessaging.yml @@ -0,0 +1,11 @@ +# This profile should be included by any service which depends on the query starter and +# wants to read/write query stats from/to the query storage cache, or read/write query +# results from/to the query results messaging backend. +datawave: + query: + messaging: + backend: ${messaging.backend} + rabbitmq: + maxMessageSizeBytes: ${messaging.maxMessageSizeBytes} + kafka: + partitions: 2 diff --git a/docker/config/executor-pool1.yml b/docker/config/executor-pool1.yml new file mode 100755 index 00000000000..8850c4ae636 --- /dev/null +++ b/docker/config/executor-pool1.yml @@ -0,0 +1,20 @@ +# This is where you set properties which are specific to pool1 +executor: + poolName: 'pool1' + +datawave: + swagger: + title: "Query Executor Service (Pool 1)" + description: "REST API provided by the Query Executor Service" + connection: + factory: + pools: + 'pool1': + zookeepers: '${accumulo.zookeepers}' + instance: '${accumulo.instanceName}' + username: '${accumulo.username}' + password: '${accumulo.password}' + lowPriorityPoolSize: 40 + normalPriorityPoolSize: 40 + highPriorityPoolSize: 40 + adminPriorityPoolSize: 40 diff --git a/docker/config/executor-pool2.yml b/docker/config/executor-pool2.yml new file mode 100755 index 00000000000..a4757d93235 --- /dev/null +++ b/docker/config/executor-pool2.yml @@ -0,0 +1,20 @@ +# This is where you set properties which are specific to pool2 +executor: + poolName: 'pool2' + +datawave: + swagger: + title: "Query Executor Service (Pool 2)" + description: "REST API provided by the Query Executor Service" + connection: + factory: + pools: + 'pool2': + zookeepers: '${accumulo.zookeepers}' + instance: '${accumulo.instanceName}' + username: '${accumulo.username}' + password: '${accumulo.password}' + lowPriorityPoolSize: 40 + normalPriorityPoolSize: 40 + highPriorityPoolSize: 40 + adminPriorityPoolSize: 40 diff --git a/docker/config/executor.yml b/docker/config/executor.yml new file mode 100755 index 00000000000..da54229b34d --- /dev/null +++ b/docker/config/executor.yml @@ -0,0 +1,67 @@ +# This is where you set properties which are common to all executors +server: + # since the application names include the pools, lets override the context path to simply be executor + servlet.context-path: /executor + cdn-uri: /executor/ + +logging: + level: + root: INFO + datawave.query: DEBUG + datawave.microservice.query: DEBUG + datawave.microservice.query.executor: DEBUG + org.apache.kafka: ERROR + datawave.microservice.query.storage: WARN + +warehouse: + accumulo: + zookeepers: '${accumulo.zookeepers}' + instanceName: '${accumulo.instanceName}' + username: '${accumulo.username}' + password: '${accumulo.password}' + +datawave: + table: + cache: + zookeepers: '${accumulo.zookeepers}' + tableNames: + - '${warehouse.tables.metadata.name}' + poolName: 'default' + reloadInterval: 360000 + evictionReaperIntervalInSeconds: 360 + numLocks: 3 + maxRetries: 3 + reload-crontab: '* * * * * ?' + connection: + factory: + defaultPool: "default" + pools: + "default": + zookeepers: '${accumulo.zookeepers}' + instance: '${accumulo.instanceName}' + username: '${accumulo.username}' + password: '${accumulo.password}' + lowPriorityPoolSize: 40 + normalPriorityPoolSize: 40 + highPriorityPoolSize: 40 + adminPriorityPoolSize: 40 + query: + executor: + pool: "${executor.poolName}" + availableResultsPageMultiplier: 2.5 + maxQueueSize: 400 + coreThreads: 10 + maxThreads: 40 + keepAliveMs: 600000 + queryStatusExpirationMs: 60000 + checkpointFlushMs: 1000 + checkpointFlushResults: 2 + orphanThresholdMs: 60000 + monitorTaskLease: 30 + monitorTaskLeaseUnit: SECONDS + monitor: + enabled: true + scheduler-crontab: '* * * * * ?' + logStatusPeriodMs: 600000 + logStatusWhenChangedMs: 300000 + queryMetricsUrlPrefix: https://localhost:8543/querymetric/v1/id/ diff --git a/docker/config/modification.yml b/docker/config/modification.yml new file mode 100755 index 00000000000..bfcd5b895f8 --- /dev/null +++ b/docker/config/modification.yml @@ -0,0 +1,70 @@ +# This file contains all of the configuration required to use the modification service +logging: + level: + root: INFO + datawave.modification: DEBUG + datawave.microservice.modification: DEBUG + datawave.microservice.modification.query: DEBUG + +datawave: + swagger: + title: "Modification (Mutable Metadata) Service" + description: "REST API provided by the Modification (Mutable Metadata) Service" + + modification: + query: + queryURI: https://query:8443/query/v1 + queryPool: pool1 + remoteQueryTimeout: 1 + remoteQueryTimeoutUnit: MINUTES + data: + tableName: ${warehouse.tables.metadata.name} + poolName: "default" + handlers: + authorizedRoles: + - "AuthorizedUser" + eventTableName: ${warehouse.tables.shard.name} + metadataTableName: ${warehouse.tables.metadata.name} + indexTableName: ${warehouse.tables.index.name} + reverseIndexTableName: ${warehouse.tables.reverseIndex.name} + securityMarkingExemptFields: + - "ExampleExemptField" + requiresAudit: false + indexOnlyMap: + "SomeEventField": "SomeIndexField1,SomeIndexField2" + indexOnlySuffixes: + - "ExampleSuffix" + contentFields: + - "ExampleContentField" + + table: + cache: + zookeepers: '${accumulo.zookeepers}' + tableNames: + - '${warehouse.tables.metadata.name}' + poolName: 'default' + reloadInterval: 360000 + evictionReaperIntervalInSeconds: 360 + numLocks: 3 + maxRetries: 3 + reload-crontab: '* * * * * ?' + connection: + factory: + defaultPool: "default" + pools: + "default": + zookeepers: '${accumulo.zookeepers}' + instance: '${accumulo.instanceName}' + username: '${accumulo.username}' + password: '${accumulo.password}' + lowPriorityPoolSize: 40 + normalPriorityPoolSize: 40 + highPriorityPoolSize: 40 + adminPriorityPoolSize: 40 + + security: + util: + subjectDnPattern: "(?:^|,)\\s*OU\\s*=\\s*My Department\\s*(?:,|$)" + npeOuList: "EXAMPLE_SERVER_OU1,EXAMPLE_SERVER_OU2" + + diff --git a/docker/config/mrquery.yml b/docker/config/mrquery.yml new file mode 100755 index 00000000000..2e3a5736b29 --- /dev/null +++ b/docker/config/mrquery.yml @@ -0,0 +1,31 @@ +logging: + level: + root: INFO + org.springframework.cloud.bootstrap.config: DEBUG + datawave.microservice.query: DEBUG + +datawave: + table: + cache: + zookeepers: '${accumulo.zookeepers}' + tableNames: + - '${warehouse.tables.metadata.name}' + poolName: 'default' + reloadInterval: 360000 + evictionReaperIntervalInSeconds: 360 + numLocks: 3 + maxRetries: 3 + reload-crontab: '* * * * * ?' + connection: + factory: + defaultPool: "default" + pools: + "default": + zookeepers: '${accumulo.zookeepers}' + instance: '${accumulo.instanceName}' + username: '${accumulo.username}' + password: '${accumulo.password}' + lowPriorityPoolSize: 40 + normalPriorityPoolSize: 40 + highPriorityPoolSize: 40 + adminPriorityPoolSize: 40 \ No newline at end of file diff --git a/docker/config/query.yml b/docker/config/query.yml new file mode 100755 index 00000000000..f6490619910 --- /dev/null +++ b/docker/config/query.yml @@ -0,0 +1,74 @@ +spring: + lifecycle: + # Set the shutdown grace period + timeout-per-shutdown-phase: 5s + +management: + endpoint: + # Enable the shutdown endpoint for the query service + shutdown: + enabled: true + +# Query requires a client certificate +server: + ssl: + client-auth: NEED + # Enable graceful shutdown + shutdown: "graceful" + +logging: + level: + root: INFO + org.springframework.cloud.bootstrap.config: DEBUG + datawave.microservice.query: DEBUG + org.apache.kafka: ERROR + datawave.microservice.query.storage: WARN + +datawave: + swagger: + title: "Query Service" + description: "REST API provided by the Query Service" + query: + nextCall: + resultPollInterval: 500 + statusUpdateInterval: 500 + expiration: + callTimeout: 1 + callTimeoutUnit: HOURS + shortCircuitCheckTime: 60 + shortCircuitCheckTimeUnit: MINUTES + shortCircuitTimeout: 55 + shortCircuitTimeoutUnit: MINUTES + longRunningQueryTimeout: 24 + longRunningQueryTimeoutUnit: HOURS + monitor: + monitorInterval: 30 + monitorIntervalUnit: SECONDS + doc: + menu: + extras: '
  • Accumulo
  • ' + table: + cache: + enabled: false + web: + accumulo: + uri: 'https://localhost:9143/accumulo' + cache: + uri: 'https://localhost:8843/cache' + config: + uri: 'https://localhost:8888/configserver' + authorization: + uri: 'https://localhost:8343/authorization' + audit: + uri: 'https://localhost:9043/audit' + metrics: + uri: 'https://localhost:8543/querymetric' + dictionary: + uri: 'https://localhost:8643/dictionary' + executor: + uri: 'https://localhost:8743/executor' + +audit-client: + discovery: + enabled: false + uri: '${AUDIT_SERVER_URL:http://localhost:11111/audit}' diff --git a/docker/debug.yml.example b/docker/debug.yml.example new file mode 100644 index 00000000000..0ee12296de3 --- /dev/null +++ b/docker/debug.yml.example @@ -0,0 +1,5 @@ + +executor-pool1: + entrypoint: ["java","-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=5005","-jar","app.jar"] + ports: + - "5005:5005" diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml index c427896c51e..51cc9e24d78 100644 --- a/docker/docker-compose.yml +++ b/docker/docker-compose.yml @@ -7,8 +7,9 @@ services: quickstart: profiles: - quickstart - command: ["datawave-bootstrap.sh", "--web"] - image: datawave/quickstart-compose:6.13.0-SNAPSHOT + # To run the wildfly webservice, change `--accumulo` to `--web` + command: ["datawave-bootstrap.sh", "--accumulo"] + image: datawave/quickstart-compose:7.0.0-SNAPSHOT environment: - DW_CONTAINER_HOST=quickstart - DW_DATAWAVE_WEB_JAVA_OPTS=-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=*:8787 -Duser.timezone=GMT -Dfile.encoding=UTF-8 -Djava.net.preferIPv4Stack=true @@ -30,9 +31,9 @@ services: # accumulo monitor - "9995:9995" # web server - - "8443:8443" + - "9443:8443" # web server debug port - - "8787:8787" + - "5011:8787" extra_hosts: - "${DW_HOSTNAME}:${DW_HOST_IP}" - "${DW_HOST_FQDN}:${DW_HOST_IP}" @@ -79,9 +80,49 @@ services: consul: condition: service_started + # When auto.create.topics.enable is true, this causes deleted topics to be recreated at random. So, leave it disabled. + kafka: + image: bitnami/kafka:3.2.3 + ports: + - "9094:9094" + networks: + - demo + environment: + - KAFKA_CFG_NODE_ID=1 + - KAFKA_CFG_PROCESS_ROLES=controller,broker + - ALLOW_PLAINTEXT_LISTENER=yes + - KAFKA_CFG_LISTENER_SECURITY_PROTOCOL_MAP=CLIENT:PLAINTEXT,CONTROLLER:PLAINTEXT,EXTERNAL:PLAINTEXT + - KAFKA_CFG_LISTENERS=CLIENT://:9092,CONTROLLER://:9093,EXTERNAL://:9094 + - KAFKA_CFG_ADVERTISED_LISTENERS=CLIENT://kafka:9092,EXTERNAL://${DW_HOSTNAME}:9094 + - KAFKA_CFG_CONTROLLER_QUORUM_VOTERS=1@kafka:9093 + - KAFKA_CFG_CONTROLLER_LISTENER_NAMES=CONTROLLER + - KAFKA_INTER_BROKER_LISTENER_NAME=CLIENT + - KAFKA_CFG_AUTO_CREATE_TOPICS_ENABLE=false + - KAFKA_CFG_DELETE_TOPICS_ENABLE=true + + kafdrop: + profiles: + - management + - full + image: obsidiandynamics/kafdrop + ports: + - "8999:9000" + networks: + - demo + environment: + - "KAFKA_BROKERCONNECT=${DW_HOSTNAME}:9094" + # This mapping is required to enable kafdrop to communicate with + # the external, host-bound port for kafka + extra_hosts: + - "${DW_HOSTNAME}:${DW_HOST_IP}" + - "${DW_HOST_FQDN}:${DW_HOST_IP}" + depends_on: + kafka: + condition: service_started + configuration: - entrypoint: [ "java","-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=*:5005","-jar","app.jar" ] - image: datawave/config-service:3.0.1-SNAPSHOT + entrypoint: [ "java","-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=*:5009","-jar","app.jar" ] + image: datawave/config-service:4.0.0-SNAPSHOT command: - --spring.output.ansi.enabled=ALWAYS - --spring.profiles.active=consul,native,open_actuator @@ -93,7 +134,7 @@ services: - KEY_ALIAS=certificate ports: - "8888:8888" - - "5005:5005" + - "5009:5009" volumes: - ${CONFIG_DIR:-./config}:/microservice-config:ro - ${PKI_DIR:-./pki}:/etc/pki:ro @@ -105,8 +146,7 @@ services: condition: service_started cache: - entrypoint: [ "java","-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=*:5005","-jar","app.jar" ] - image: datawave/hazelcast-service:3.0.1-SNAPSHOT + image: datawave/hazelcast-service:4.0.0-SNAPSHOT scale: 1 command: - --spring.profiles.active=consul,compose,remoteauth @@ -117,7 +157,6 @@ services: - "5701-5703" - "8080" - "8843:8443" - - "5006:5005" volumes: - ${PKI_DIR:-./pki}:/etc/pki:ro - ./logs:/logs @@ -134,17 +173,17 @@ services: condition: service_started authorization: - entrypoint: [ "java","-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=*:5005","-jar","app.jar" ] - image: datawave/authorization-service:3.0.1-SNAPSHOT + entrypoint: [ "java","-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=*:5008","-jar","app.jar" ] + image: datawave/authorization-service:4.0.0-SNAPSHOT command: - --spring.output.ansi.enabled=ALWAYS - - --spring.profiles.active=consul,mock,compose + - --spring.profiles.active=consul,mock,compose,federation - --spring.cloud.consul.host=consul - --spring.cloud.consul.discovery.instance-id=$${spring.application.name}:$${random.value} ports: - "8080" - "8343:8443" - - "5007:5005" + - "5008:5008" volumes: - ${PKI_DIR:-./pki}:/etc/pki:ro - ./logs:/logs @@ -164,8 +203,7 @@ services: profiles: - accumulo - full - entrypoint: [ "java","-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=*:5005","-jar","app.jar" ] - image: datawave/accumulo-service:3.0.1-SNAPSHOT + image: datawave/accumulo-service:4.0.0-SNAPSHOT command: - --spring.output.ansi.enabled=ALWAYS - --spring.profiles.active=consul,compose,remoteauth @@ -182,7 +220,6 @@ services: - "${DW_HOST_FQDN}:${DW_HOST_IP}" ports: - "9143:8443" - - "5008:5005" volumes: - ${PKI_DIR:-./pki}:/etc/pki:ro - ./logs:/logs @@ -199,8 +236,7 @@ services: condition: service_healthy audit: - entrypoint: [ "java","-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=*:5005","-jar","app.jar" ] - image: datawave/audit-service:3.0.2-SNAPSHOT + image: datawave/audit-service:4.0.0-SNAPSHOT command: - --spring.output.ansi.enabled=ALWAYS - --spring.profiles.active=consul,compose,remoteauth @@ -217,7 +253,6 @@ services: ports: - "8080" - "9043:8443" - - "5009:5005" volumes: - ${PKI_DIR:-./pki}:/etc/pki:ro - ./logs:/logs @@ -234,8 +269,8 @@ services: condition: service_healthy metrics: - entrypoint: ["java","-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=*:5005","-jar","app.jar"] - image: datawave/query-metric-service:3.0.10-SNAPSHOT + entrypoint: ["java","-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=*:5007","-jar","app.jar"] + image: datawave/query-metric-service:4.0.0-SNAPSHOT command: - --spring.output.ansi.enabled=ALWAYS - --spring.profiles.active=consul,compose,remoteauth @@ -252,7 +287,7 @@ services: ports: - "8180:8080" - "8543:8443" - - "5010:5005" + - "5007:5007" volumes: - ${PKI_DIR:-./pki}:/etc/pki:ro - ./logs:/logs @@ -269,8 +304,10 @@ services: condition: service_healthy dictionary: - entrypoint: [ "java","-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=*:5005","-jar","app.jar" ] - image: datawave/dictionary-service:3.0.1-SNAPSHOT + profiles: + - dictionary + - full + image: datawave/dictionary-service:4.0.0-SNAPSHOT command: - --spring.output.ansi.enabled=ALWAYS - --spring.profiles.active=consul,compose,remoteauth @@ -287,7 +324,233 @@ services: ports: - "8280:8080" - "8643:8443" - - "5011:5005" + volumes: + - ${PKI_DIR:-./pki}:/etc/pki:ro + - ./logs:/logs + networks: + - demo + healthcheck: + test: curl -f http://localhost:8080/dictionary/mgmt/health + depends_on: + authorization: + condition: service_healthy + + # If you want to test cached results, enable the cachedresults profile + mysql: + profiles: + - cachedresults + image: mysql:8.0.32 + environment: + - MYSQL_RANDOM_ROOT_PASSWORD=true + - MYSQL_DATABASE=cachedresults + - MYSQL_USER=datawave + - MYSQL_PASSWORD=secret + networks: + - demo + healthcheck: + test: ["CMD", "mysqladmin", "ping", "-h", "localhost"] + timeout: 20s + retries: 10 + + # If you want to test cached results, set the CACHED_RESULTS environment variable to 'true' + query: + entrypoint: ["java","-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=*:5005","-jar","app.jar"] + image: datawave/query-service:1.0.0-SNAPSHOT + command: + - --spring.output.ansi.enabled=ALWAYS + - --spring.profiles.active=consul,compose,remoteauth,querymessaging,metricssource,query,mrquery,cachedresults,federation + - --spring.cloud.consul.host=consul + - --spring.cloud.consul.discovery.instance-id=$${spring.application.name}:$${random.value} + environment: + - AUDIT_SERVER_URL=http://audit:8080/audit + - HADOOP_HOST=${DW_HADOOP_HOST} + - HADOOP_CONF_DIR=${HADOOP_CONF_DIR:-/etc/hadoop/conf} + - "BACKEND=${BACKEND:-kafka}" + - CACHED_RESULTS=${CACHED_RESULTS:-false} + ports: + - "8080:8080" + - "8443:8443" + - "5005:5005" + volumes: + - ${PKI_DIR:-./pki}:/etc/pki:ro + - ./logs:/logs + - ${HADOOP_CONF_DIR:-hadoop_conf}:${HADOOP_CONF_DIR:-/etc/hadoop/conf}:ro + networks: + - demo + healthcheck: + test: curl -f http://localhost:8080/query/mgmt/health + interval: 10s + timeout: 1s + start_period: 30s + retries: 3 + depends_on: + audit: + condition: service_healthy + authorization: + condition: service_healthy + metrics: + condition: service_healthy + kafka: + condition: service_started + executor-pool1: + condition: service_started + + mapreduce-query: + profiles: + - full + entrypoint: ["java","-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=*:5005","-jar","app.jar"] + image: datawave/mapreduce-query-service:1.0.0-SNAPSHOT + command: + - --spring.output.ansi.enabled=ALWAYS + - --spring.profiles.active=consul,compose,remoteauth,query,mrquery,federation + - --spring.cloud.consul.host=consul + - --spring.cloud.consul.discovery.instance-id=$${spring.application.name}:$${random.value} + environment: + - ZOOKEEPER_HOST=${DW_ZOOKEEPER_HOST} + - HADOOP_HOST=${DW_HADOOP_HOST} + - HADOOP_CONF_DIR=${HADOOP_CONF_DIR:-/etc/hadoop/conf} + ports: + - "50005:5005" + volumes: + - ${PKI_DIR:-./pki}:/etc/pki:ro + - ./logs:/logs + - ${HADOOP_CONF_DIR:-hadoop_conf}:${HADOOP_CONF_DIR:-/etc/hadoop/conf}:ro + networks: + - demo + healthcheck: + test: curl -f http://localhost:8080/mrquery/mgmt/health + interval: 10s + timeout: 1s + start_period: 30s + retries: 3 + depends_on: + audit: + condition: service_healthy + authorization: + condition: service_healthy + metrics: + condition: service_healthy + kafka: + condition: service_started + executor-pool1: + condition: service_started + + executor-pool1: + entrypoint: ["java","-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=*:5006","-jar","app.jar"] + image: datawave/query-executor-service:1.0.0-SNAPSHOT + command: + - --spring.application.name=executor-pool1 + - --spring.cloud.config.name=executor + - --spring.output.ansi.enabled=ALWAYS + - --spring.profiles.active=consul,compose,remoteauth,querymessaging,metricssource,query,pool1,federation + - --spring.cloud.consul.host=consul + - --spring.cloud.consul.discovery.instance-id=$${spring.application.name}:$${random.value} + environment: + - ZOOKEEPER_HOST=${DW_ZOOKEEPER_HOST} + - HADOOP_HOST=${DW_HADOOP_HOST} + - BACKEND=${BACKEND:-kafka} + - HADOOP_CONF_DIR=${HADOOP_CONF_DIR:-/etc/hadoop/conf} + # This mapping is required to enable the metrics service to communicate + # with host-deployed services like hadoop, zookeeper, and accumulo. + # These values are set locally in .env via bootstrap.sh + extra_hosts: + - "${DW_HOSTNAME}:${DW_HOST_IP}" + - "${DW_HOST_FQDN}:${DW_HOST_IP}" + ports: + - "8380:8080" + - "8743:8443" + - "5006:5006" + volumes: + - ${PKI_DIR:-./pki}:/etc/pki:ro + - ./logs/pool1:/logs + - ${HADOOP_CONF_DIR:-hadoop_conf}:${HADOOP_CONF_DIR:-/etc/hadoop/conf}:ro + networks: + - demo + healthcheck: + test: curl -f http://localhost:8080/executor/mgmt/health + interval: 10s + timeout: 1s + start_period: 30s + retries: 3 + depends_on: + rabbitmq: + condition: service_started + authorization: + condition: service_healthy + metrics: + condition: service_healthy + kafka: + condition: service_started + + executor-pool2: + profiles: + - pool2 + - full + image: datawave/query-executor-service:1.0.0-SNAPSHOT + command: + - --spring.application.name=executor-pool2 + - --spring.cloud.config.name=executor + - --spring.output.ansi.enabled=ALWAYS + - --spring.profiles.active=consul,compose,remoteauth,querymessaging,metricssource,query,pool2,federation + - --spring.cloud.consul.host=consul + - --spring.cloud.consul.discovery.instance-id=$${spring.application.name}:$${random.value} + environment: + - ZOOKEEPER_HOST=${DW_ZOOKEEPER_HOST} + - HADOOP_HOST=${DW_HADOOP_HOST} + - BACKEND=${BACKEND:-kafka} + - HADOOP_CONF_DIR=${HADOOP_CONF_DIR:-/etc/hadoop/conf} + # This mapping is required to enable the metrics service to communicate + # with host-deployed services like hadoop, zookeeper, and accumulo. + # These values are set locally in .env via bootstrap.sh + extra_hosts: + - "${DW_HOSTNAME}:${DW_HOST_IP}" + - "${DW_HOST_FQDN}:${DW_HOST_IP}" + ports: + - "8480:8080" + - "8243:8443" + volumes: + - ${PKI_DIR:-./pki}:/etc/pki:ro + - ./logs/pool2:/logs + - ${HADOOP_CONF_DIR:-hadoop_conf}:${HADOOP_CONF_DIR:-/etc/hadoop/conf}:ro + networks: + - demo + healthcheck: + test: curl -f http://localhost:8080/executor/mgmt/health + interval: 10s + timeout: 1s + start_period: 30s + retries: 3 + depends_on: + rabbitmq: + condition: service_started + authorization: + condition: service_healthy + metrics: + condition: service_healthy + + modification: + entrypoint: ["java","-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=*:5010","-jar","app.jar"] + profiles: + - modification + - full + image: datawave/modification-service:1.0.0-SNAPSHOT + command: + - --spring.output.ansi.enabled=ALWAYS + - --spring.profiles.active=consul,compose,remoteauth,query + - --spring.cloud.consul.host=consul + - --spring.cloud.consul.discovery.instance-id=$${spring.application.name}:$${random.value} + environment: + - ZOOKEEPER_HOST=${DW_ZOOKEEPER_HOST} + # This mapping is required to enable the metrics service to communicate + # with host-deployed services like hadoop, zookeeper, and accumulo. + # These values are set locally in .env via bootstrap.sh + extra_hosts: + - "${DW_HOSTNAME}:${DW_HOST_IP}" + - "${DW_HOST_FQDN}:${DW_HOST_IP}" + ports: + - "8680:8080" + - "9343:8443" + - "5010:5010" volumes: - ${PKI_DIR:-./pki}:/etc/pki:ro - ./logs:/logs diff --git a/docker/restart.sh b/docker/restart.sh new file mode 100755 index 00000000000..c50175aeb91 --- /dev/null +++ b/docker/restart.sh @@ -0,0 +1,7 @@ +#!/bin/sh +services=$@ +for service in $services; do + docker compose stop $service +done +docker compose rm -f +docker compose up -d diff --git a/docker/scripts/cachedResultsQuery.sh b/docker/scripts/cachedResultsQuery.sh new file mode 100755 index 00000000000..dc9daf538b2 --- /dev/null +++ b/docker/scripts/cachedResultsQuery.sh @@ -0,0 +1,18 @@ +#!/bin/bash + +SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) + +source ${SCRIPT_DIR}/common/cachedResultsQuery.sh + +# QUERY PARAMETERS +QUERY_LOGIC='EventQuery' +BEGIN='19660908 000000.000' +END='20161002 235959.999' +COLUMN_VISIBILITY='PUBLIC' +QUERY='GENRES:[Action to Western]' +QUERY_SYNTAX='LUCENE' +AUTHS='PUBLIC,PRIVATE,BAR,FOO' +QUERY_NAME='Developer Test Query' +PAGE_SIZE='10' + +runCachedResultsQuery \ No newline at end of file diff --git a/docker/scripts/cancel.sh b/docker/scripts/cancel.sh new file mode 100755 index 00000000000..0f917b53120 --- /dev/null +++ b/docker/scripts/cancel.sh @@ -0,0 +1,10 @@ +#!/bin/bash + +SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) + +source ${SCRIPT_DIR}/common/common.sh + +createTempPem + +echo "$(date): Canceling query" +curl -X POST -s -k -E ${TMP_PEM} ${DATAWAVE_ENDPOINT}/$1/cancel -w '%{http_code}\n' diff --git a/docker/scripts/cleanup.sh b/docker/scripts/cleanup.sh index f8590782c1b..6d9777c2b9b 100755 --- a/docker/scripts/cleanup.sh +++ b/docker/scripts/cleanup.sh @@ -1,14 +1,26 @@ #!/bin/sh rm -r -f query_* rm -r -f errorQuery_* +rm -r -f edge_* +rm -r -f plan_* +rm -r -f prediction_* rm -r -f lookup_* rm -r -f batchLookup_* rm -r -f lookupContent_* rm -r -f batchLookupContent_* +rm -r -f streamingQuery_* rm -r -f discovery_* rm -r -f errorDiscovery_* rm -r -f count_* rm -r -f errorCount_* rm -r -f fieldIndexCount_* rm -r -f errorFieldIndexCount_* +rm -r -f poundit_* +rm -r -f executor_* +rm -r -f termFrequency_* +rm -r -f edgeEvent_* rm -r -f hitHighlights_* +rm -r -f metrics_* +rm -r -f modification_* +rm -r -f mapReduceQuery_* +rm -r -f oozieQuery_* diff --git a/docker/scripts/close.sh b/docker/scripts/close.sh new file mode 100755 index 00000000000..be6ca3d2af7 --- /dev/null +++ b/docker/scripts/close.sh @@ -0,0 +1,10 @@ +#!/bin/bash + +SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) + +source ${SCRIPT_DIR}/common/common.sh + +createTempPem + +echo "$(date): Closing query" +curl -X POST -s -k -E ${TMP_PEM} ${DATAWAVE_ENDPOINT}/$1/close -w '%{http_code}\n' diff --git a/docker/scripts/common/batchLookup.sh b/docker/scripts/common/batchLookup.sh index 95af7df7d63..e07fa65b6d1 100755 --- a/docker/scripts/common/batchLookup.sh +++ b/docker/scripts/common/batchLookup.sh @@ -3,6 +3,7 @@ source ${SCRIPT_DIR}/common/common.sh PAUSE='false' +POOL="${POOL:-pool1}" MAX_PAGES=100 QUERY_TYPE='batchLookup' @@ -39,6 +40,7 @@ runBatchLookup() { curl -s -D headers_0.txt -k -E ${TMP_PEM} \ -H "Accept: application/xml" \ + -H "Pool: $POOL" \ --data-urlencode "begin=${BEGIN}" \ --data-urlencode "end=${END}" \ --data-urlencode "columnVisibility=${COLUMN_VISIBILITY}" \ diff --git a/docker/scripts/common/batchLookupContent.sh b/docker/scripts/common/batchLookupContent.sh index 19284315dc8..c8866fd9fb1 100755 --- a/docker/scripts/common/batchLookupContent.sh +++ b/docker/scripts/common/batchLookupContent.sh @@ -3,6 +3,7 @@ source ${SCRIPT_DIR}/common/common.sh PAUSE='false' +POOL="${POOL:-pool1}" MAX_PAGES=100 QUERY_TYPE='batchLookupContent' @@ -38,6 +39,7 @@ runBatchLookupContent() { echo "$(date): Running LookupContentUUID query" > querySummary.txt curl -s -D headers_0.txt -k -E ${TMP_PEM} \ -H "Accept: application/xml" \ + -H "Pool: $POOL" \ --data-urlencode "begin=${BEGIN}" \ --data-urlencode "end=${END}" \ --data-urlencode "columnVisibility=${COLUMN_VISIBILITY}" \ diff --git a/docker/scripts/common/cachedResultsQuery.sh b/docker/scripts/common/cachedResultsQuery.sh new file mode 100755 index 00000000000..cd540df8207 --- /dev/null +++ b/docker/scripts/common/cachedResultsQuery.sh @@ -0,0 +1,148 @@ +#!/bin/bash + +source ${SCRIPT_DIR}/common/common.sh + +PAUSE='false' +POOL="${POOL:-pool1}" +MAX_PAGES=100 +QUERY_TYPE='cachedResultsQuery' + +# QUERY PARAMETERS +#QUERY_LOGIC='EventQuery' +#BEGIN='19660908 000000.000' +#END='20161002 235959.999' +#COLUMN_VISIBILITY='PUBLIC' +#QUERY='GENRES:[Action to Western]' +#QUERY_SYNTAX='LUCENE' +#AUTHS='PUBLIC,PRIVATE,BAR,FOO' +#QUERY_NAME='Developer Test Query' +#PAGE_SIZE='10' + +# Override common get_query_id +get_query_id () { + while read_dom; do + if [[ $ENTITY = 'QueryId' ]]; then + echo $CONTENT + break + fi + done +} + +get_result () { + while read_dom; do + if [[ $ENTITY =~ 'Result' ]] && [[ ! $ENTITY =~ 'HasResults' ]]; then + echo $CONTENT + break + fi + done +} + +get_total_num_events () { + while read_dom; do + if [[ $ENTITY = 'TotalEvents' ]]; then + echo $CONTENT + break + fi + done +} + +runCachedResultsQuery() { + createTempPem + + FOLDER="${QUERY_TYPE}_$(date +%Y%m%d_%I%M%S.%N)" + + mkdir $FOLDER + cd $FOLDER + + SYSTEM_FROM=$(hostname) + + echo "$(date): Defining query" + echo "$(date): Defining query" > querySummary.txt + curl -s -D headers_0.txt -k -E ${TMP_PEM} \ + -H "Accept: application/xml" \ + -H "Pool: $POOL" \ + --data-urlencode "begin=${BEGIN}" \ + --data-urlencode "end=${END}" \ + --data-urlencode "columnVisibility=${COLUMN_VISIBILITY}" \ + --data-urlencode "query=${QUERY}" \ + --data-urlencode "query.syntax=${QUERY_SYNTAX}" \ + --data-urlencode "auths=${AUTHS}" \ + --data-urlencode "systemFrom=${SYSTEM_FROM}" \ + --data-urlencode "queryName=${QUERY_NAME}" \ + --data-urlencode "pagesize=${PAGE_SIZE}" \ + ${DATAWAVE_ENDPOINT}/${QUERY_LOGIC}/define -o defineResponse.xml -w '%{http_code}\n' >> querySummary.txt + + QUERY_ID=$(get_result < defineResponse.xml) + + echo "$(date): Loading cached results" + echo "$(date): Loading cached results" > querySummary.txt + curl -s -D headers_1.txt -k -E ${TMP_PEM} \ + -H "Accept: application/xml" \ + -H "Pool: $POOL" \ + ${CACHEDRESULTS_ENDPOINT}/$QUERY_ID/load?alias=alias-${QUERY_ID} -o loadResponse.xml -w '%{http_code}\n' >> querySummary.txt + + VIEW_NAME=$(get_result < loadResponse.xml) + + echo "$(date): Creating the SQL query" + echo "$(date): Creating the SQL query" > querySummary.txt + curl -s -D headers_2.txt -k -X POST -E ${TMP_PEM} \ + -H "Accept: application/xml" \ + -H "Pool: $POOL" \ + --data-urlencode "fields=" \ + --data-urlencode "conditions=" \ + --data-urlencode "grouping=" \ + --data-urlencode "order=" \ + --data-urlencode "fixedFields=" \ + --data-urlencode "pagesize=10" \ + ${CACHEDRESULTS_ENDPOINT}/$VIEW_NAME/create -o createResponse.xml -w '%{http_code}\n' >> querySummary.txt + + METRICS_QUERY_ID=$(get_query_id < createResponse.xml) + + i=1 + TOTAL_NUM_EVENTS=0 + TOTAL_EVENTS=0 + TOTAL_PAGES=0 + + while [ $i -gt 0 ] && [ $i -lt $MAX_PAGES ]; do + if [ "$PAUSE" == "true" ]; then + echo "press any key to continue" + read -n 1 + fi + + echo "$(date): Requesting page $i for $VIEW_NAME" + echo "$(date): Requesting page $i for $VIEW_NAME" >> querySummary.txt + curl -s -D headers_$((i + 3)).txt -k -E ${TMP_PEM} \ + -H "Accept: application/xml" \ + -H "Pool: $POOL" \ + "${CACHEDRESULTS_ENDPOINT}/$VIEW_NAME/getRows?rowBegin=$((TOTAL_PAGES * PAGE_SIZE + 1))&rowEnd=$(((TOTAL_PAGES + 1) * PAGE_SIZE))" -o getRowsResponse_$i.xml -w '%{http_code}\n' >> querySummary.txt + + CONTINUE=`grep 'HTTP/2 200' headers_$((i + 3)).txt` + + if [ -z "$CONTINUE" ]; then + i=-1 + else + NUM_EVENTS=$(get_num_events < getRowsResponse_$i.xml) + TOTAL_NUM_EVENTS=$(get_total_num_events < getRowsResponse_$i.xml) + TOTAL_EVENTS=$((TOTAL_EVENTS + NUM_EVENTS)) + TOTAL_PAGES=$((TOTAL_PAGES + 1)) + echo "$(date): Page $i contained $NUM_EVENTS events" + echo "$(date): Page $i contained $NUM_EVENTS events" >> querySummary.txt + + if [ $TOTAL_EVENTS -ge $TOTAL_NUM_EVENTS ]; then + i=-1 + else + ((i++)) + fi + fi + done + + echo "$(date): Returned $TOTAL_PAGES pages" + echo "$(date): Returned $TOTAL_PAGES pages" >> querySummary.txt + + echo "$(date): Returned $TOTAL_EVENTS events" + echo "$(date): Returned $TOTAL_EVENTS events" >> querySummary.txt + + cd ../ + + logMetrics +} diff --git a/docker/scripts/common/common.sh b/docker/scripts/common/common.sh index 64ce2cd35c9..52bfd47a997 100755 --- a/docker/scripts/common/common.sh +++ b/docker/scripts/common/common.sh @@ -1,6 +1,17 @@ #!/bin/bash -DATAWAVE_ENDPOINT=https://localhost:8443/DataWave/Query +WEBSERVICE="${WEBSERVICE:-false}" + +if [ "$WEBSERVICE" = true ]; then + DATAWAVE_ENDPOINT=https://localhost:9443/DataWave/Query + CACHEDRESULTS_ENDPOINT=https://localhost:9443/DataWave/CachedResults + MAPREDUCE_ENDPOINT=https://localhost:9443/DataWave/MapReduce +else + DATAWAVE_ENDPOINT=https://localhost:8443/query/v1/query + CACHEDRESULTS_ENDPOINT=https://localhost:8443/query/v1/cachedresults + MAPREDUCE_ENDPOINT=https://localhost:8443/query/v1/mapreduce +fi + METRICS_ENDPOINT=https://localhost:8543/querymetric/v1 createTempPem() { diff --git a/docker/scripts/common/edge.sh b/docker/scripts/common/edge.sh new file mode 100755 index 00000000000..5500dfe675a --- /dev/null +++ b/docker/scripts/common/edge.sh @@ -0,0 +1,107 @@ +#!/bin/bash + +source ${SCRIPT_DIR}/common/common.sh + +PAUSE='false' +POOL="${POOL:-pool1}" +MAX_PAGES=100 +QUERY_TYPE='edge' + +# QUERY PARAMETERS +#QUERY_LOGIC='EdgeQuery' +#BEGIN='19660908 000000.000' +#END='20161002 235959.999' +#COLUMN_VISIBILITY='PUBLIC' +#QUERY='SOURCE == 'Jerry Seinfeld'' +#AUTHS='PUBLIC,PRIVATE,BAR,FOO' +#QUERY_NAME='Developer Test Edge Query' +#PAGE_SIZE='100' + +get_num_edges () { + declare -i count=0 + while read_dom; do + if [[ $ENTITY = '/Edge' ]]; then + count=$((count + 1)) + fi + done + echo $count +} + +runEdgeQuery() { + createTempPem + + FOLDER="${QUERY_TYPE}_$(date +%Y%m%d_%I%M%S.%N)" + + mkdir $FOLDER + cd $FOLDER + + SYSTEM_FROM=$(hostname) + + echo "$(date): Creating query" + echo "$(date): Creating query" > querySummary.txt + curl -s -D headers_0.txt -k -E ${TMP_PEM} \ + -H "Accept: application/xml" \ + -H "Pool: $POOL" \ + --data-urlencode "begin=${BEGIN}" \ + --data-urlencode "end=${END}" \ + --data-urlencode "columnVisibility=${COLUMN_VISIBILITY}" \ + --data-urlencode "query=${QUERY}" \ + --data-urlencode "auths=${AUTHS}" \ + --data-urlencode "systemFrom=${SYSTEM_FROM}" \ + --data-urlencode "queryName=${QUERY_NAME}" \ + --data-urlencode "pagesize=${PAGE_SIZE}" \ + ${DATAWAVE_ENDPOINT}/${QUERY_LOGIC}/create -o createResponse.xml -w '%{http_code}\n' >> querySummary.txt + + i=1 + + QUERY_ID=$(get_query_id < createResponse.xml) + + TOTAL_EVENTS=0 + TOTAL_PAGES=0 + + while [ $i -gt 0 ] && [ $i -lt $MAX_PAGES ]; do + echo "$(date): Requesting page $i for $QUERY_ID" + echo "$(date): Requesting page $i for $QUERY_ID" >> querySummary.txt + curl -s -D headers_$i.txt -q -k -E ${TMP_PEM} \ + -H "Accept: application/xml" \ + -H "Pool: $POOL" \ + ${DATAWAVE_ENDPOINT}/$QUERY_ID/next -o nextResponse_$i.xml -w '%{http_code}\n' >> querySummary.txt + + CONTINUE=`grep 'HTTP/2 200' headers_$i.txt` + + if [ -z "$CONTINUE" ]; then + i=-1 + else + NUM_EVENTS=$(get_num_edges < nextResponse_$i.xml) + TOTAL_EVENTS=$((TOTAL_EVENTS + NUM_EVENTS)) + TOTAL_PAGES=$((TOTAL_PAGES + 1)) + echo "$(date): Page $i contained $NUM_EVENTS edges" + echo "$(date): Page $i contained $NUM_EVENTS edges" >> querySummary.txt + + ((i++)) + fi + + if [ "$PAUSE" == "true" ]; then + echo "press any key to continue" + read -n 1 + fi + done + + echo "$(date): Returned $TOTAL_PAGES pages" + echo "$(date): Returned $TOTAL_PAGES pages" >> querySummary.txt + + echo "$(date): Returned $TOTAL_EVENTS events" + echo "$(date): Returned $TOTAL_EVENTS events" >> querySummary.txt + + echo "$(date): Closing $QUERY_ID" + echo "$(date): Closing $QUERY_ID" >> querySummary.txt + # close the query + curl -s -q -k -X POST -E ${TMP_PEM} \ + -H "Accept: application/xml" \ + -H "Pool: $POOL" \ + ${DATAWAVE_ENDPOINT}/$QUERY_ID/close -o closeResponse.xml -w '%{http_code}\n' >> querySummary.txt + + cd ../ + + logMetrics +} diff --git a/docker/scripts/common/lookup.sh b/docker/scripts/common/lookup.sh index 1dd25071faa..e23c92ff373 100755 --- a/docker/scripts/common/lookup.sh +++ b/docker/scripts/common/lookup.sh @@ -3,6 +3,7 @@ source ${SCRIPT_DIR}/common/common.sh PAUSE='false' +POOL="${POOL:-pool1}" MAX_PAGES=100 QUERY_TYPE='lookup' @@ -41,6 +42,7 @@ runLookup() { echo "$(date): Running LookupUUID query" > querySummary.txt curl -s -D headers_0.txt -X GET -k -E ${TMP_PEM} \ -H "Accept: application/xml" \ + -H "Pool: $POOL" \ --data-urlencode "begin=${BEGIN}" \ --data-urlencode "end=${END}" \ --data-urlencode "columnVisibility=${COLUMN_VISIBILITY}" \ diff --git a/docker/scripts/common/lookupContent.sh b/docker/scripts/common/lookupContent.sh index 2887f4b787a..2c8cefa55f1 100755 --- a/docker/scripts/common/lookupContent.sh +++ b/docker/scripts/common/lookupContent.sh @@ -3,6 +3,7 @@ source ${SCRIPT_DIR}/common/common.sh PAUSE='false' +POOL="${POOL:-pool1}" MAX_PAGES=100 QUERY_TYPE='lookupContent' @@ -41,6 +42,7 @@ runLookupContent() { echo "$(date): Running LookupUUID query" > querySummary.txt curl -s -D headers_0.txt -X GET -k -E ${TMP_PEM} \ -H "Accept: application/xml" \ + -H "Pool: $POOL" \ --data-urlencode "begin=${BEGIN}" \ --data-urlencode "end=${END}" \ --data-urlencode "columnVisibility=${COLUMN_VISIBILITY}" \ diff --git a/docker/scripts/common/mapReduceQuery.sh b/docker/scripts/common/mapReduceQuery.sh new file mode 100755 index 00000000000..efae9cf3314 --- /dev/null +++ b/docker/scripts/common/mapReduceQuery.sh @@ -0,0 +1,184 @@ +#!/bin/bash + +source ${SCRIPT_DIR}/common/common.sh + +PAUSE='false' +POOL="${POOL:-pool1}" +MAX_PAGES=100 +QUERY_TYPE='mapReduceQuery' + +# QUERY PARAMETERS +#QUERY_LOGIC='EventQuery' +#JOB_NAME='BulkResultsJob' +#FORMAT=XML +#OUTPUT_FORMAT=TEXT +#BEGIN='19660908 000000.000' +#END='20161002 235959.999' +#COLUMN_VISIBILITY='PUBLIC' +#QUERY='GENRES:[Action to Western]' +#QUERY_SYNTAX='LUCENE' +#AUTHS='PUBLIC,PRIVATE,BAR,FOO' +#QUERY_NAME='Developer Test Query' +#PAGE_SIZE='10' + +get_job_status () { + while read_dom; do + if [[ $ENTITY =~ 'JobExecution' ]]; then + if [[ $ENTITY =~ 'state="DEFINED"' ]]; then + echo "DEFINED" + break + elif [[ $ENTITY =~ 'state="SUBMITTED"' ]]; then + echo "SUBMITTED" + break + elif [[ $ENTITY =~ 'state="RUNNING"' ]]; then + echo "RUNNING" + break + elif [[ $ENTITY =~ 'state="SUCCEEDED"' ]]; then + echo "SUCCEEDED" + break + elif [[ $ENTITY =~ 'state="CLOSED"' ]]; then + echo "CLOSED" + break + elif [[ $ENTITY =~ 'state="CANCELED"' ]]; then + echo "CANCELED" + break + elif [[ $ENTITY =~ 'state="FAILED"' ]]; then + echo "FAILED" + break + fi + fi + done +} + +# Override common get_num_events +get_num_events () { + local EVENTS=0 + while read_dom; do + if [[ $ENTITY = 'ReturnedEvents' ]] || [[ $ENTITY = 'returnedEvents' ]]; then + EVENTS=$((EVENTS + CONTENT)) + fi + done + echo $EVENTS +} + +# Override common logMetrics +logMetrics () { + if [ ! -z "$JOB_ID" ]; then + mv $FOLDER ${QUERY_TYPE}_${JOB_ID} + + echo "$(date): Job status available at: ${MAPREDUCE_ENDPOINT}/${JOB_ID}/list" + echo "$(date): Job status available at: ${MAPREDUCE_ENDPOINT}/${JOB_ID}/list" >> ${QUERY_TYPE}_${JOB_ID}/querySummary.txt + fi +} + +runMapReduceQuery() { + createTempPem + + FOLDER="${QUERY_TYPE}_$(date +%Y%m%d_%I%M%S.%N)" + + mkdir $FOLDER + cd $FOLDER + + SYSTEM_FROM=$(hostname) + + echo "$(date): Defining query" + echo "$(date): Defining query" > querySummary.txt + curl -s -D headers_0.txt -k -E ${TMP_PEM} \ + -H "Accept: application/xml" \ + -H "Pool: $POOL" \ + --data-urlencode "begin=${BEGIN}" \ + --data-urlencode "end=${END}" \ + --data-urlencode "columnVisibility=${COLUMN_VISIBILITY}" \ + --data-urlencode "query=${QUERY}" \ + --data-urlencode "query.syntax=${QUERY_SYNTAX}" \ + --data-urlencode "auths=${AUTHS}" \ + --data-urlencode "systemFrom=${SYSTEM_FROM}" \ + --data-urlencode "queryName=${QUERY_NAME}" \ + --data-urlencode "pagesize=${PAGE_SIZE}" \ + ${DATAWAVE_ENDPOINT}/${QUERY_LOGIC}/define -o defineResponse.xml -w '%{http_code}\n' >> querySummary.txt + + + QUERY_ID=$(get_query_id < defineResponse.xml) + + echo "$(date): Submitting map reduce query" + echo "$(date): Submitting map reduce query" >> querySummary.txt + + # To write the output to a table, add the following parameter + # --data-urlencode "outputTableName=ResultsTable" \ + + curl -s -D headers_1.txt -k -E ${TMP_PEM} \ + -H "Accept: application/xml" \ + -H "Pool: $POOL" \ + --data-urlencode "jobName=${JOB_NAME}" \ + --data-urlencode "queryId=${QUERY_ID}" \ + --data-urlencode "format=${FORMAT}" \ + --data-urlencode "outputFormat=${OUTPUT_FORMAT}" \ + ${MAPREDUCE_ENDPOINT}/submit -o submitResponse.xml -w '%{http_code}\n' >> querySummary.txt + + JOB_ID=$(get_query_id < submitResponse.xml) + + ATTEMPTS=6 + ATTEMPT=1 + TIMEOUT=20 + + JOB_STATUS="DEFINED" + + while [ $ATTEMPT -le $ATTEMPTS ]; do + echo "$(date): Checking map reduce query status (Attempt ${ATTEMPT}/${ATTEMPTS})" + echo "$(date): Checking map reduce query status (Attempt ${ATTEMPT}/${ATTEMPTS})" >> querySummary.txt + + curl -s -k -E ${TMP_PEM} \ + ${MAPREDUCE_ENDPOINT}/${JOB_ID}/list -o listResponse.xml -w '%{http_code}\n' >> querySummary.txt + + JOB_STATUS=$(get_job_status < listResponse.xml) + + echo "$(date): Job Status: $JOB_STATUS" + echo "$(date): Job Status: $JOB_STATUS" >> querySummary.txt + + if [ "$JOB_STATUS" != "DEFINED" ] && [ "$JOB_STATUS" != "SUBMITTED" ] && [ "$JOB_STATUS" != "RUNNING" ]; then + break; + fi + + if [ $ATTEMPT -le $ATTEMPTS ]; then + sleep ${TIMEOUT} + fi + + ((ATTEMPT++)) + done + + TOTAL_EVENTS=0 + TOTAL_FILES=0 + if [ "$JOB_STATUS" == "SUCCEEDED" ]; then + echo "$(date): Downloading results.tar" + echo "$(date): Downloading results.tar" >> querySummary.txt + + curl -s -k -E ${TMP_PEM} \ + ${MAPREDUCE_ENDPOINT}/${JOB_ID}/getAllFiles -o results.tar -w '%{http_code}\n' >> querySummary.txt + + tar -xf results.tar + + cd ${JOB_ID} + + for f in $(ls) + do + NUM_EVENTS=$(get_num_events < $f) + TOTAL_EVENTS=$((TOTAL_EVENTS + NUM_EVENTS)) + TOTAL_FILES=$((TOTAL_FILES + 1)) + + echo "$(date): $f contained $NUM_EVENTS events" + echo "$(date): $f contained $NUM_EVENTS events" >> querySummary.txt + done + + cd .. + fi + + echo "$(date): Returned $TOTAL_FILES files" + echo "$(date): Returned $TOTAL_FILES files" >> querySummary.txt + + echo "$(date): Returned $TOTAL_EVENTS events" + echo "$(date): Returned $TOTAL_EVENTS events" >> querySummary.txt + + cd ../ + + logMetrics +} \ No newline at end of file diff --git a/docker/scripts/common/oozieQuery.sh b/docker/scripts/common/oozieQuery.sh new file mode 100755 index 00000000000..989d70e6942 --- /dev/null +++ b/docker/scripts/common/oozieQuery.sh @@ -0,0 +1,152 @@ +#!/bin/bash + +source ${SCRIPT_DIR}/common/common.sh + +PAUSE='false' +POOL="${POOL:-pool1}" +MAX_PAGES=100 +QUERY_TYPE='oozieQuery' + +# QUERY PARAMETERS +#WORKFLOW='OozieJob' +#COLUMN_VISIBILITY='PUBLIC' + +get_job_status () { + while read_dom; do + if [[ $ENTITY =~ 'JobExecution' ]]; then + if [[ $ENTITY =~ 'state="DEFINED"' ]]; then + echo "DEFINED" + break + elif [[ $ENTITY =~ 'state="SUBMITTED"' ]]; then + echo "SUBMITTED" + break + elif [[ $ENTITY =~ 'state="RUNNING"' ]]; then + echo "RUNNING" + break + elif [[ $ENTITY =~ 'state="SUCCEEDED"' ]]; then + echo "SUCCEEDED" + break + elif [[ $ENTITY =~ 'state="CLOSED"' ]]; then + echo "CLOSED" + break + elif [[ $ENTITY =~ 'state="CANCELED"' ]]; then + echo "CANCELED" + break + elif [[ $ENTITY =~ 'state="FAILED"' ]]; then + echo "FAILED" + break + fi + fi + done +} + +# Override common get_num_events +get_num_events () { + local EVENTS=0 + while read_dom; do + if [[ $ENTITY = 'ReturnedEvents' ]] || [[ $ENTITY = 'returnedEvents' ]]; then + EVENTS=$((EVENTS + CONTENT)) + fi + done + echo $EVENTS +} + +# Override common logMetrics +logMetrics () { + if [ ! -z "$JOB_ID" ]; then + mv $FOLDER ${QUERY_TYPE}_${JOB_ID} + + echo "$(date): Job status available at: ${DATAWAVE_ENDPOINT}/mapreduce/${JOB_ID}/list" + echo "$(date): Job status available at: ${DATAWAVE_ENDPOINT}/mapreduce/${JOB_ID}/list" >> ${QUERY_TYPE}_${JOB_ID}/querySummary.txt + fi +} + +runOozieQuery() { + createTempPem + + FOLDER="${QUERY_TYPE}_$(date +%Y%m%d_%I%M%S.%N)" + + mkdir $FOLDER + cd $FOLDER + + SYSTEM_FROM=$(hostname) + + echo "$(date): Submitting oozie query" + echo "$(date): Submitting oozie query" >> querySummary.txt + + # To write the output to a table, add the following parameter + # --data-urlencode "outputTableName=ResultsTable" \ + + curl -s -D headers_1.txt -k -E ${TMP_PEM} \ + -H "Accept: application/xml" \ + -H "Pool: $POOL" \ + --data-urlencode "workFlow=${WORKFLOW}" \ + --data-urlencode "columnVisibility=${COLUMN_VISIBILITY}" \ + ${MAPREDUCE_ENDPOINT}/oozieSubmit -o submitResponse.xml -w '%{http_code}\n' >> querySummary.txt + + JOB_ID=$(get_query_id < submitResponse.xml) + + ATTEMPTS=6 + ATTEMPT=1 + TIMEOUT=20 + + JOB_STATUS="DEFINED" + + while [ $ATTEMPT -le $ATTEMPTS ]; do + echo "$(date): Checking oozie query status (Attempt ${ATTEMPT}/${ATTEMPTS})" + echo "$(date): Checking oozie query status (Attempt ${ATTEMPT}/${ATTEMPTS})" >> querySummary.txt + + curl -s -k -E ${TMP_PEM} \ + ${MAPREDUCE_ENDPOINT}/${JOB_ID}/list -o listResponse.xml -w '%{http_code}\n' >> querySummary.txt + + JOB_STATUS=$(get_job_status < listResponse.xml) + + echo "$(date): Job Status: $JOB_STATUS" + echo "$(date): Job Status: $JOB_STATUS" >> querySummary.txt + + if [ "$JOB_STATUS" != "DEFINED" ] && [ "$JOB_STATUS" != "SUBMITTED" ] && [ "$JOB_STATUS" != "RUNNING" ]; then + break; + fi + + if [ $ATTEMPT -lt $ATTEMPTS ]; then + sleep ${TIMEOUT} + ((ATTEMPT++)) + fi + done + + TOTAL_EVENTS=0 + TOTAL_FILES=0 + if [ "$JOB_STATUS" == "SUCCEEDED" ]; then + echo "$(date): Downloading results.tar" + echo "$(date): Downloading results.tar" >> querySummary.txt + + curl -s -k -E ${TMP_PEM} \ + ${MAPREDUCE_ENDPOINT}/${JOB_ID}/getAllFiles -o results.tar -w '%{http_code}\n' >> querySummary.txt + + tar -xf results.tar + + cd ${JOB_ID} + + for f in $(ls) + do + NUM_EVENTS=$(get_num_events < $f) + TOTAL_EVENTS=$((TOTAL_EVENTS + NUM_EVENTS)) + TOTAL_FILES=$((TOTAL_FILES + 1)) + + echo "$(date): $f contained $NUM_EVENTS events" + echo "$(date): $f contained $NUM_EVENTS events" >> querySummary.txt + done + + cd .. + fi + + echo "$(date): Returned $TOTAL_FILES files" + echo "$(date): Returned $TOTAL_FILES files" >> querySummary.txt + + echo "$(date): Returned $TOTAL_EVENTS events" + echo "$(date): Returned $TOTAL_EVENTS events" >> querySummary.txt + + cd ../ + + logMetrics +} \ No newline at end of file diff --git a/docker/scripts/common/plan.sh b/docker/scripts/common/plan.sh new file mode 100755 index 00000000000..f677877720c --- /dev/null +++ b/docker/scripts/common/plan.sh @@ -0,0 +1,62 @@ +#!/bin/bash + +source ${SCRIPT_DIR}/common/common.sh + +POOL="${POOL:-pool1}" + +# QUERY PARAMETERS +#QUERY_LOGIC='EventQuery' +#BEGIN='19660908 000000.000' +#END='20161002 235959.999' +#COLUMN_VISIBILITY='PUBLIC' +#QUERY='GENRES:[Action to Western]' +#QUERY_SYNTAX='LUCENE' +#AUTHS='PUBLIC,PRIVATE,BAR,FOO' +#QUERY_NAME='Developer Test Query' +#EXPAND_VALUES='true' + +get_query_plan () { + while read_dom; do + if [[ $ENTITY =~ 'Result' ]] && [[ ! $ENTITY =~ 'HasResults' ]]; then + echo $CONTENT + break + fi + done +} + +runPlan() { + createTempPem + + FOLDER="plan_$(date +%Y%m%d_%I%M%S.%N)" + + mkdir $FOLDER + cd $FOLDER + + SYSTEM_FROM=$(hostname) + + echo "$(date): Planning query" + echo "$(date): Planning query" > querySummary.txt + curl -s -D headers_0.txt -k -E ${TMP_PEM} \ + -H "Accept: application/xml" \ + -H "Pool: $POOL" \ + --data-urlencode "begin=${BEGIN}" \ + --data-urlencode "end=${END}" \ + --data-urlencode "columnVisibility=${COLUMN_VISIBILITY}" \ + --data-urlencode "query=${QUERY}" \ + --data-urlencode "query.syntax=${QUERY_SYNTAX}" \ + --data-urlencode "auths=${AUTHS}" \ + --data-urlencode "systemFrom=${SYSTEM_FROM}" \ + --data-urlencode "queryName=${QUERY_NAME}" \ + --data-urlencode "expand.values=${EXPAND_VALUES}" \ + ${DATAWAVE_ENDPOINT}/${QUERY_LOGIC}/plan -o planResponse.txt -w '%{http_code}\n' >> querySummary.txt + + QUERY_PLAN=$(get_query_plan < planResponse.txt) + + echo "$(date): Received query plan" + echo "$(date): Received query plan" >> querySummary.txt + + echo "$QUERY_PLAN" + echo "$QUERY_PLAN" >> querySummary.txt + + cd ../ +} diff --git a/docker/scripts/common/predict.sh b/docker/scripts/common/predict.sh new file mode 100755 index 00000000000..0632b2dbc6f --- /dev/null +++ b/docker/scripts/common/predict.sh @@ -0,0 +1,64 @@ +#!/bin/bash + +SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) + +source ${SCRIPT_DIR}/common/common.sh + +POOL="${POOL:-pool1}" + +# QUERY PARAMETERS +#QUERY_LOGIC='EventQuery' +#BEGIN='19660908 000000.000' +#END='20161002 235959.999' +#COLUMN_VISIBILITY='PUBLIC' +#QUERY='GENRES:[Action to Western]' +#QUERY_SYNTAX='LUCENE' +#AUTHS='PUBLIC,PRIVATE,BAR,FOO' +#QUERY_NAME='Developer Test Query' +#EXPAND_VALUES='true' + +get_query_prediction () { + while read_dom; do + if [[ $ENTITY =~ 'Result' ]] && [[ ! $ENTITY =~ 'HasResults' ]]; then + echo $CONTENT + break + fi + done +} + +runPredict() { + createTempPem + + FOLDER="predict_$(date +%Y%m%d_%I%M%S.%N)" + + mkdir $FOLDER + cd $FOLDER + + SYSTEM_FROM=$(hostname) + + echo "$(date): Predicting query" + echo "$(date): Predicting query" > querySummary.txt + curl -s -D headers_0.txt -k -E ${TMP_PEM} \ + -H "Accept: application/xml" \ + -H "Pool: $POOL" \ + --data-urlencode "begin=${BEGIN}" \ + --data-urlencode "end=${END}" \ + --data-urlencode "columnVisibility=${COLUMN_VISIBILITY}" \ + --data-urlencode "query=${QUERY}" \ + --data-urlencode "query.syntax=${QUERY_SYNTAX}" \ + --data-urlencode "auths=${AUTHS}" \ + --data-urlencode "systemFrom=${SYSTEM_FROM}" \ + --data-urlencode "queryName=${QUERY_NAME}" \ + --data-urlencode "expand.values=${EXPAND_VALUES}" \ + ${DATAWAVE_ENDPOINT}/${QUERY_LOGIC}/predict -o predictResponse.txt -w '%{http_code}\n' >> querySummary.txt + + QUERY_PREDICTION=$(get_query_prediction < predictResponse.txt) + + echo "$(date): Received query prediction" + echo "$(date): Received query prediction" >> querySummary.txt + + echo "$QUERY_PREDICTION" + echo "$QUERY_PREDICTION" >> querySummary.txt + + cd ../ +} diff --git a/docker/scripts/common/query.sh b/docker/scripts/common/query.sh index 2d2b390fd6e..2f94a39420c 100755 --- a/docker/scripts/common/query.sh +++ b/docker/scripts/common/query.sh @@ -3,6 +3,7 @@ source ${SCRIPT_DIR}/common/common.sh PAUSE='false' +POOL="${POOL:-pool1}" MAX_PAGES=100 QUERY_TYPE='query' @@ -32,6 +33,7 @@ runQuery() { curl -s -D headers_0.txt -k -E ${TMP_PEM} \ -H "Accept: application/xml" \ + -H "Pool: $POOL" \ --data-urlencode "begin=${BEGIN}" \ --data-urlencode "end=${END}" \ --data-urlencode "columnVisibility=${COLUMN_VISIBILITY}" \ @@ -60,6 +62,7 @@ runQuery() { echo "$(date): Requesting page $i for $QUERY_ID" >> querySummary.txt curl -s -D headers_$i.txt -q -k -E ${TMP_PEM} \ -H "Accept: application/xml" \ + -H "Pool: $POOL" \ ${DATAWAVE_ENDPOINT}/$QUERY_ID/next -o nextResponse_$i.xml -w '%{http_code}\n' >> querySummary.txt CONTINUE=`grep 'HTTP/2 200' headers_$i.txt` @@ -88,6 +91,7 @@ runQuery() { # close the query curl -s -q -k -X POST -E ${TMP_PEM} \ -H "Accept: application/xml" \ + -H "Pool: $POOL" \ ${DATAWAVE_ENDPOINT}/$QUERY_ID/close -o closeResponse.xml -w '%{http_code}\n' >> querySummary.txt cd ../ diff --git a/docker/scripts/common/streamingQuery.sh b/docker/scripts/common/streamingQuery.sh new file mode 100755 index 00000000000..2a6dd8c1308 --- /dev/null +++ b/docker/scripts/common/streamingQuery.sh @@ -0,0 +1,77 @@ +#!/bin/bash + +source ${SCRIPT_DIR}/common/common.sh + +PAUSE='false' +POOL="${POOL:-pool1}" +MAX_PAGES=100 +QUERY_TYPE='streamingQuery' + +# QUERY PARAMETERS +#QUERY_LOGIC='EventQuery' +#BEGIN='19660908 000000.000' +#END='20161002 235959.999' +#COLUMN_VISIBILITY='PUBLIC' +#QUERY='GENRES:[Action to Western]' +#QUERY_SYNTAX='LUCENE' +#AUTHS='PUBLIC,PRIVATE,BAR,FOO' +#QUERY_NAME='Developer Test Streaming Query' +#PAGE_SIZE='10' + +# Override common get_query_id +get_query_id () { + while read_dom; do + if [[ $ENTITY =~ 'QueryId' ]]; then + echo $CONTENT + break + fi + done +} + +# Override common get_num_events +get_num_events () { + count=0 + while read_dom; do + if [[ $ENTITY = 'ReturnedEvents' ]]; then + count=$((count + CONTENT)) + fi + done + echo $count +} + +runStreamingQuery() { + createTempPem + + FOLDER="${QUERY_TYPE}_$(date +%Y%m%d_%I%M%S.%N)" + + mkdir $FOLDER + cd $FOLDER + + SYSTEM_FROM=$(hostname) + + echo "$(date): Running streaming query" + echo "$(date): Running streaming query" > querySummary.txt + curl -s -D headers_0.txt -k -E ${TMP_PEM} \ + -H "Accept: application/xml" \ + -H "Pool: $POOL" \ + --data-urlencode "begin=${BEGIN}" \ + --data-urlencode "end=${END}" \ + --data-urlencode "columnVisibility=${COLUMN_VISIBILITY}" \ + --data-urlencode "query=${QUERY}" \ + --data-urlencode "query.syntax=${QUERY_SYNTAX}" \ + --data-urlencode "auths=${AUTHS}" \ + --data-urlencode "systemFrom=${SYSTEM_FROM}" \ + --data-urlencode "queryName=${QUERY_NAME}" \ + --data-urlencode "pagesize=${PAGE_SIZE}" \ + ${DATAWAVE_ENDPOINT}/${QUERY_LOGIC}/createAndExecute -o streamingResponse.xml -w '%{http_code}\n' >> querySummary.txt + + QUERY_ID=$(get_query_id < streamingResponse.xml) + NUM_EVENTS=$(get_num_events < streamingResponse.xml) + + echo "$(date): Streaming results contained $NUM_EVENTS events" + echo "$(date): Streaming results contained $NUM_EVENTS events" >> querySummary.txt + + cd ../ + + logMetrics +} diff --git a/docker/scripts/connectionFactory.sh b/docker/scripts/connectionFactory.sh new file mode 100755 index 00000000000..45941a20369 --- /dev/null +++ b/docker/scripts/connectionFactory.sh @@ -0,0 +1,28 @@ +#!/bin/bash + +SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) + +source ${SCRIPT_DIR}/common/common.sh + +createTempPem + +EXECUTOR_ENDPOINT1=https://localhost:8743/executor/v1 +EXECUTOR_ENDPOINT2=https://localhost:8843/executor/v1 + +FOLDER="executor_$(date +%Y%m%d_%I%M%S.%N)" + +mkdir $FOLDER +cd $FOLDER + +echo "$(date): polling connection factory for pool1" +curl -s -D headers_1.txt -k -E ${TMP_PEM} \ + -H "Accept: application/xml" \ + -H "Pool: $POOL" \ + ${EXECUTOR_ENDPOINT1}/Common/AccumuloConnectionFactory/stats -o connectionFactory1Response.txt -w '%{http_code}\n' +echo "$(date): polling connection factory for pool2" +curl -s -D headers_2.txt -k -E ${TMP_PEM} \ + -H "Accept: application/xml" \ + -H "Pool: $POOL" \ + ${EXECUTOR_ENDPOINT2}/Common/AccumuloConnectionFactory/stats -o connectionFactory2Response.txt -w '%{http_code}\n' + +cd ../ diff --git a/docker/scripts/edge.sh b/docker/scripts/edge.sh new file mode 100755 index 00000000000..425b21d4d9b --- /dev/null +++ b/docker/scripts/edge.sh @@ -0,0 +1,17 @@ +#!/bin/bash + +SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) + +source ${SCRIPT_DIR}/common/edge.sh + +# QUERY PARAMETERS +QUERY_LOGIC='EdgeQuery' +BEGIN='19660908 000000.000' +END='20161002 235959.999' +COLUMN_VISIBILITY='PUBLIC' +QUERY='SOURCE == 'Jerry Seinfeld'' +AUTHS='PUBLIC,PRIVATE,BAR,FOO' +QUERY_NAME='Developer Test Edge Query' +PAGE_SIZE='100' + +runEdgeQuery diff --git a/docker/scripts/edgeEvent.sh b/docker/scripts/edgeEvent.sh new file mode 100755 index 00000000000..ff5891b1dee --- /dev/null +++ b/docker/scripts/edgeEvent.sh @@ -0,0 +1,19 @@ +#!/bin/bash + +SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) + +source ${SCRIPT_DIR}/common/query.sh + +# QUERY PARAMETERS +QUERY_TYPE='edgeEvent' +QUERY_LOGIC='EdgeEventQuery' +BEGIN='19660908 000000.000' +END='20161002 235959.999' +COLUMN_VISIBILITY='PUBLIC' +QUERY='SOURCE:Jerry\\ Seinfeld SINK:Seinfeld TYPE:TV_SHOW_CAST RELATION:PERSON-SHOW' +QUERY_SYNTAX='LUCENE' +AUTHS='PUBLIC,PRIVATE,BAR,FOO' +QUERY_NAME='Developer Test Query' +PAGE_SIZE='10' + +runQuery diff --git a/docker/scripts/executorHealth.sh b/docker/scripts/executorHealth.sh new file mode 100755 index 00000000000..4fa56b10d41 --- /dev/null +++ b/docker/scripts/executorHealth.sh @@ -0,0 +1,23 @@ +#!/bin/bash + +SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) + +source ${SCRIPT_DIR}/common/common.sh + +createTempPem + +HEALTH_ENDPOINT=https://localhost:8743/executor/mgmt/health + +FOLDER="executorHealth_$(date +%Y%m%d_%I%M%S.%N)" + +mkdir $FOLDER +cd $FOLDER + +echo "$(date): Getting query executor service health" +echo "$(date): Getting query executor service health" > healthSummary.txt +curl -s -D headers_0.txt -k -E ${TMP_PEM} \ + -H "Accept: application/json" \ + ${HEALTH_ENDPOINT} -o healthResponse.json -w '%{http_code}\n' >> healthSummary.txt + +echo "$(date): Query Executor service health retrieved" +echo "$(date): Query Executor service health retrieved" > healthSummary.txt diff --git a/docker/scripts/executorShutdown.sh b/docker/scripts/executorShutdown.sh new file mode 100755 index 00000000000..68e733bbeb3 --- /dev/null +++ b/docker/scripts/executorShutdown.sh @@ -0,0 +1,23 @@ +#!/bin/bash + +SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) + +source ${SCRIPT_DIR}/common/common.sh + +createTempPem + +SHUTDOWN_ENDPOINT=https://localhost:8743/executor/mgmt/shutdown + +FOLDER="executorShutdown_$(date +%Y%m%d_%I%M%S.%N)" + +mkdir $FOLDER +cd $FOLDER + +echo "$(date): Shutting down query executor service" +echo "$(date): Shutting down query executor service" > shutdownSummary.txt +curl -s -D headers_0.txt -k -E ${TMP_PEM} -X POST \ + -H "Accept: application/json" \ + ${SHUTDOWN_ENDPOINT} -o shutdownResponse.json -w '%{http_code}\n' >> shutdownSummary.txt + +echo "$(date): Query Executor service shutdown" +echo "$(date): Query Executor service shutdown" > shutdownSummary.txt diff --git a/docker/scripts/mapReduceCancel.sh b/docker/scripts/mapReduceCancel.sh new file mode 100755 index 00000000000..cec8a55c474 --- /dev/null +++ b/docker/scripts/mapReduceCancel.sh @@ -0,0 +1,10 @@ +#!/bin/bash + +SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) + +source ${SCRIPT_DIR}/common/common.sh + +createTempPem + +echo "$(date): Canceling map reduce query" +curl -X POST -s -k -E ${TMP_PEM} ${MAPREDUCE_ENDPOINT}/$1/cancel -w '%{http_code}\n' diff --git a/docker/scripts/mapReduceQuery.sh b/docker/scripts/mapReduceQuery.sh new file mode 100755 index 00000000000..dca3cc9da5e --- /dev/null +++ b/docker/scripts/mapReduceQuery.sh @@ -0,0 +1,21 @@ +#!/bin/bash + +SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) + +source ${SCRIPT_DIR}/common/mapReduceQuery.sh + +# QUERY PARAMETERS +QUERY_LOGIC='EventQuery' +JOB_NAME='BulkResultsJob' +FORMAT=XML +OUTPUT_FORMAT=TEXT +BEGIN='19660908 000000.000' +END='20161002 235959.999' +COLUMN_VISIBILITY='PUBLIC' +QUERY='GENRES:[Action to Western]' +QUERY_SYNTAX='LUCENE' +AUTHS='PUBLIC,PRIVATE,BAR,FOO' +QUERY_NAME='Developer Test Query' +PAGE_SIZE='10' + +runMapReduceQuery diff --git a/docker/scripts/mapReduceRemove.sh b/docker/scripts/mapReduceRemove.sh new file mode 100755 index 00000000000..5c5d8562f9e --- /dev/null +++ b/docker/scripts/mapReduceRemove.sh @@ -0,0 +1,10 @@ +#!/bin/bash + +SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) + +source ${SCRIPT_DIR}/common/common.sh + +createTempPem + +echo "$(date): Removing map reduce query" +curl -X DELETE -s -k -E ${TMP_PEM} ${DATAWAVE_ENDPOINT}/$1/remove -w '%{http_code}\n' diff --git a/docker/scripts/metrics.sh b/docker/scripts/metrics.sh new file mode 100755 index 00000000000..a6c4ce56b3b --- /dev/null +++ b/docker/scripts/metrics.sh @@ -0,0 +1,19 @@ +#!/bin/bash + +SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) + +source ${SCRIPT_DIR}/common/query.sh + +# QUERY PARAMETERS +QUERY_TYPE='metrics' +QUERY_LOGIC='QueryMetricsQuery' +BEGIN='20000101 000000.000' +END="$(date +%Y%m%d) 235959.999" +COLUMN_VISIBILITY='PUBLIC' +QUERY='QUERY_ID:[0 TO z]' +QUERY_SYNTAX='LUCENE' +AUTHS='PUBLIC,PRIVATE,BAR,FOO' +QUERY_NAME='Developer Test Query' +PAGE_SIZE='10' + +runQuery diff --git a/docker/scripts/modification.sh b/docker/scripts/modification.sh new file mode 100755 index 00000000000..26917515f69 --- /dev/null +++ b/docker/scripts/modification.sh @@ -0,0 +1,31 @@ +#!/bin/bash + +SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) + +source ${SCRIPT_DIR}/common/common.sh + +createTempPem + +MODIFICATION_ENDPOINT=https://localhost:9343/modification/v1 + +FOLDER="modification_$(date +%Y%m%d_%I%M%S.%N)" + +mkdir $FOLDER +cd $FOLDER + +echo "$(date): polling modification service for configurations" +curl -s -D headers_1.txt -k -E ${TMP_PEM} \ + -H "Accept: application/json" \ + ${MODIFICATION_ENDPOINT}/listConfigurations -o modificationConfigurationResponse.txt -w '%{http_code}\n' + +echo "$(date): reloading modification service fields" +curl -s -D headers_2.txt -k -E ${TMP_PEM} \ + -H "Accept: application/json" \ + ${MODIFICATION_ENDPOINT}/reloadCache -o modificationReloadCacheResponse.txt -w '%{http_code}\n' + +echo "$(date): polling modification service fields" +curl -s -D headers_2.txt -k -E ${TMP_PEM} \ + -H "Accept: application/json" \ + ${MODIFICATION_ENDPOINT}/getMutableFieldList -o modificationFieldListResponse.txt -w '%{http_code}\n' + +cd ../ diff --git a/docker/scripts/oozieQuery.sh b/docker/scripts/oozieQuery.sh new file mode 100755 index 00000000000..8251471bf0b --- /dev/null +++ b/docker/scripts/oozieQuery.sh @@ -0,0 +1,11 @@ +#!/bin/bash + +SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) + +source ${SCRIPT_DIR}/common/oozieQuery.sh + +# QUERY PARAMETERS +WORKFLOW='OozieJob' +COLUMN_VISIBILITY='PUBLIC' + +runOozieQuery diff --git a/docker/scripts/plan.sh b/docker/scripts/plan.sh new file mode 100755 index 00000000000..4b01d4e3b1c --- /dev/null +++ b/docker/scripts/plan.sh @@ -0,0 +1,18 @@ +#!/bin/bash + +SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) + +source ${SCRIPT_DIR}/common/plan.sh + +# QUERY PARAMETERS +QUERY_LOGIC='EventQuery' +BEGIN='19660908 000000.000' +END='20161002 235959.999' +COLUMN_VISIBILITY='PUBLIC' +QUERY='GENRES:[Action to Western]' +QUERY_SYNTAX='LUCENE' +AUTHS='PUBLIC,PRIVATE,BAR,FOO' +QUERY_NAME='Developer Test Query' +EXPAND_VALUES='true' + +runPlan diff --git a/docker/scripts/poundit.sh b/docker/scripts/poundit.sh new file mode 100755 index 00000000000..bc3ce8bae47 --- /dev/null +++ b/docker/scripts/poundit.sh @@ -0,0 +1,19 @@ +#!/bin/bash + +RUNS=${1:-10} +SCRIPTS=${@:2} +FOLDER="poundit_${RUNS}_$(date +%Y%m%d_%I%M%S.%N)" + +if [[ "$SCRIPTS" == "" || "$SCRIPTS" == "all" ]]; then + SCRIPTS="batchLookupContent.sh batchLookup.sh connectionFactory.sh count.sh discovery.sh edgeEvent.sh edge.sh errorCount.sh errorDiscovery.sh errorFieldIndexCount.sh errorQuery.sh fieldIndexCount.sh hitHighlights.sh lookupContent.sh lookup.sh metrics.sh plan.sh predict.sh query.sh streamingQuery.sh termFrequency.sh" +fi + +mkdir $FOLDER +cd $FOLDER + +for ((i=0; i < ${RUNS}; i++)); do + for script in $SCRIPTS; do + echo "Executing ../${script} >> ${script%%.sh}.log &" + ../${script} >> ${script%%.sh}.log & + done +done diff --git a/docker/scripts/predict.sh b/docker/scripts/predict.sh new file mode 100755 index 00000000000..4eeb34695c7 --- /dev/null +++ b/docker/scripts/predict.sh @@ -0,0 +1,18 @@ +#!/bin/bash + +SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) + +source ${SCRIPT_DIR}/common/predict.sh + +# QUERY PARAMETERS +QUERY_LOGIC='EventQuery' +BEGIN='19660908 000000.000' +END='20161002 235959.999' +COLUMN_VISIBILITY='PUBLIC' +QUERY='GENRES:[Action to Western]' +QUERY_SYNTAX='LUCENE' +AUTHS='PUBLIC,PRIVATE,BAR,FOO' +QUERY_NAME='Developer Test Query' +EXPAND_VALUES='true' + +runPredict diff --git a/docker/scripts/queryHealth.sh b/docker/scripts/queryHealth.sh new file mode 100755 index 00000000000..25a639bd8ec --- /dev/null +++ b/docker/scripts/queryHealth.sh @@ -0,0 +1,23 @@ +#!/bin/bash + +SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) + +source ${SCRIPT_DIR}/common/common.sh + +createTempPem + +HEALTH_ENDPOINT=https://localhost:8443/query/mgmt/health + +FOLDER="queryHealth_$(date +%Y%m%d_%I%M%S.%N)" + +mkdir $FOLDER +cd $FOLDER + +echo "$(date): Getting query service health" +echo "$(date): Getting query service health" > healthSummary.txt +curl -s -D headers_0.txt -k -E ${TMP_PEM} \ + -H "Accept: application/json" \ + ${HEALTH_ENDPOINT} -o healthResponse.json -w '%{http_code}\n' >> healthSummary.txt + +echo "$(date): Query service health retrieved" +echo "$(date): Query service health retrieved" > healthSummary.txt diff --git a/docker/scripts/queryShutdown.sh b/docker/scripts/queryShutdown.sh new file mode 100755 index 00000000000..c720eed8bd1 --- /dev/null +++ b/docker/scripts/queryShutdown.sh @@ -0,0 +1,23 @@ +#!/bin/bash + +SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) + +source ${SCRIPT_DIR}/common/common.sh + +createTempPem + +SHUTDOWN_ENDPOINT=https://localhost:8443/query/mgmt/shutdown + +FOLDER="queryShutdown_$(date +%Y%m%d_%I%M%S.%N)" + +mkdir $FOLDER +cd $FOLDER + +echo "$(date): Shutting down query service" +echo "$(date): Shutting down query service" > shutdownSummary.txt +curl -s -D headers_0.txt -k -E ${TMP_PEM} -X POST \ + -H "Accept: application/json" \ + ${SHUTDOWN_ENDPOINT} -o shutdownResponse.json -w '%{http_code}\n' >> shutdownSummary.txt + +echo "$(date): Query service shutdown" +echo "$(date): Query service shutdown" > shutdownSummary.txt diff --git a/docker/scripts/streamingQuery.sh b/docker/scripts/streamingQuery.sh new file mode 100755 index 00000000000..8eb48db3e1d --- /dev/null +++ b/docker/scripts/streamingQuery.sh @@ -0,0 +1,18 @@ +#!/bin/bash + +SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) + +source ${SCRIPT_DIR}/common/streamingQuery.sh + +# QUERY PARAMETERS +QUERY_LOGIC='EventQuery' +BEGIN='19660908 000000.000' +END='20161002 235959.999' +COLUMN_VISIBILITY='PUBLIC' +QUERY='GENRES:[Action to Western]' +QUERY_SYNTAX='LUCENE' +AUTHS='PUBLIC,PRIVATE,BAR,FOO' +QUERY_NAME='Developer Test Streaming Query' +PAGE_SIZE='10' + +runStreamingQuery diff --git a/docker/scripts/termFrequency.sh b/docker/scripts/termFrequency.sh new file mode 100755 index 00000000000..2accd64ca48 --- /dev/null +++ b/docker/scripts/termFrequency.sh @@ -0,0 +1,19 @@ +#!/bin/bash + +SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) + +source ${SCRIPT_DIR}/common/query.sh + +# QUERY PARAMETERS +QUERY_TYPE='termFrequency' +QUERY_LOGIC='TermFrequencyQuery' +BEGIN='19500101 000000.000' +END='20161002 235959.999' +COLUMN_VISIBILITY='PUBLIC' +QUERY='jackie:19520920_0/tvmaze/-bb3qxp.e771of.e3f2gs' +QUERY_SYNTAX='LUCENE' +AUTHS='PUBLIC,PRIVATE,BAR,FOO' +QUERY_NAME='Developer Test Query' +PAGE_SIZE='10' + +runQuery diff --git a/docker/scripts/testAll.sh b/docker/scripts/testAll.sh index 2647d5594da..90ac157638a 100755 --- a/docker/scripts/testAll.sh +++ b/docker/scripts/testAll.sh @@ -2,6 +2,7 @@ SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) +WEBSERVICE="${WEBSERVICE:-false}" MAX_ATTEMPTS=30 TIMEOUT=10 @@ -58,16 +59,31 @@ runTest () { done } -echo "Waiting for webservice to be ready..." +if [ "$WEBSERVICE" = true ]; then + echo "Waiting for webservice to be ready..." +else + echo "Waiting for services to be ready..." +fi attempt=0 while [ $attempt -lt $MAX_ATTEMPTS ]; do - echo "Checking webservice status (${attempt}/${MAX_ATTEMPTS})" + if [ "$WEBSERVICE" = true ]; then + echo "Checking webservice status (${attempt}/${MAX_ATTEMPTS})" - WEBSERVICE_STATUS=$(curl -s -m 5 -k https://localhost:8443/DataWave/Common/Health/health | grep Status) - if [[ "${WEBSERVICE_STATUS}" =~ \"Status\":\"ready\" ]] ; then - echo "Webservice ready" - break + WEBSERVICE_STATUS=$(curl -s -m 5 -k https://localhost:9443/DataWave/Common/Health/health | grep Status) + if [[ "${WEBSERVICE_STATUS}" =~ \"Status\":\"ready\" ]] ; then + echo "Webservice ready" + break + fi + else + echo "Checking query and executor status (${attempt}/${MAX_ATTEMPTS})" + + QUERY_STATUS=$(curl -s -m 5 http://localhost:8080/query/mgmt/health | grep UP) + EXEC_STATUS=$(curl -s -m 5 http://localhost:8380/executor/mgmt/health | grep UP) + if [ "${QUERY_STATUS}" == "{\"status\":\"UP\"}" ] && [ "${EXEC_STATUS}" == "{\"status\":\"UP\"}" ] ; then + echo "Query and Executor Services ready" + break + fi fi sleep ${TIMEOUT} @@ -76,7 +92,11 @@ while [ $attempt -lt $MAX_ATTEMPTS ]; do done if [ $attempt == $MAX_ATTEMPTS ]; then - echo "FAILURE! Webservice never became ready" + if [ "$WEBSERVICE" = true ]; then + echo "FAILURE! Webservice never became ready" + else + echo "FAILURE! Query and/or Executor Services never became ready" + fi exit 1 fi @@ -88,6 +108,8 @@ runTest batchLookup.sh 2 runTest batchLookupContent.sh 4 runTest count.sh 12 1 runTest discovery.sh 2 1 +# runTest edge.sh 0 0 +# runTest edgeEvent.sh 1 1 runTest errorCount.sh 1 1 runTest errorDiscovery.sh 1 1 runTest errorFieldIndexCount.sh 1 1 @@ -96,7 +118,10 @@ runTest fieldIndexCount.sh 12 2 runTest hitHighlights.sh 12 2 runTest lookup.sh 1 runTest lookupContent.sh 2 +# runTest metrics.sh 0 0 runTest query.sh 12 2 +#runTest mapReduceQuery.sh 12 2 +#runTest oozieQuery.sh 0 0 $SCRIPT_DIR/cleanup.sh diff --git a/docker/scripts/webQuery.sh b/docker/scripts/webQuery.sh new file mode 100755 index 00000000000..1f63ebb34d1 --- /dev/null +++ b/docker/scripts/webQuery.sh @@ -0,0 +1,25 @@ +#!/bin/bash + +# For this to work, the webserver must be running in the quickstart docker image. +# To do that, change --accumulo to --web or --webdebug in the docker-compose.yml. + +SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) + +source ${SCRIPT_DIR}/common/query.sh + +# QUERY PARAMETERS +QUERY_TYPE='webQuery' +QUERY_LOGIC='RemoteEventQuery' +BEGIN='19660908 000000.000' +END='20161002 235959.999' +COLUMN_VISIBILITY='PUBLIC' +QUERY='GENRES:[Action to Western]' +QUERY_SYNTAX='LUCENE' +AUTHS='PUBLIC,PRIVATE,BAR,FOO' +QUERY_NAME='Developer Test Query' +PAGE_SIZE='10' + +# run query against the webservice +WEBSERVICE=true + +runQuery diff --git a/docs/enunciate.xml b/docs/enunciate.xml index ce09bda329c..8a3857682f1 100644 --- a/docs/enunciate.xml +++ b/docs/enunciate.xml @@ -3,8 +3,8 @@ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:noNamespaceSchemaLocation="http://enunciate.webcohesion.com/schemas/enunciate-2.9.1.xsd"> - - + + diff --git a/docs/pom.xml b/docs/pom.xml index a575f602f98..b02858f5fb6 100644 --- a/docs/pom.xml +++ b/docs/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-parent - 6.14.0-SNAPSHOT + 7.0.0-SNAPSHOT datawave-docs @@ -166,6 +166,11 @@ jackson-jaxrs 1.8.8 + + org.springframework.security + spring-security-core + ${version.springframework} + org.wildfly wildfly-undertow @@ -237,6 +242,12 @@ 2.28.2 provided + + org.mockito + mockito-core + 2.28.2 + provided + org.picketbox picketbox @@ -321,7 +332,7 @@ ${project.build.directory}/dependency ${project.build.directory}/apidocs - generated-sources.* + generated-sources.*:datawave.microservice.querymetric diff --git a/microservices/configcheck/pom.xml b/microservices/configcheck/pom.xml index 903efa54929..a7f08574ace 100644 --- a/microservices/configcheck/pom.xml +++ b/microservices/configcheck/pom.xml @@ -3,7 +3,7 @@ 4.0.0 gov.nsa.datawave.microservice datawave-microservice-configcheck - 1.0.0-SNAPSHOT + 7.0.0-SNAPSHOT ${project.artifactId} diff --git a/microservices/microservice-parent b/microservices/microservice-parent index 728d1ee107a..df0eccee14c 160000 --- a/microservices/microservice-parent +++ b/microservices/microservice-parent @@ -1 +1 @@ -Subproject commit 728d1ee107a0447d39716578dcfbe3d72d90f973 +Subproject commit df0eccee14c4c906c14679fad4948be915a5ae69 diff --git a/microservices/microservice-service-parent b/microservices/microservice-service-parent index 3527c5367b8..ebf58213c95 160000 --- a/microservices/microservice-service-parent +++ b/microservices/microservice-service-parent @@ -1 +1 @@ -Subproject commit 3527c5367b8ccabd75d7ed9a70fc02a7ec115680 +Subproject commit ebf58213c955214329faeb1efe85bb07e05ded8c diff --git a/microservices/pom.xml b/microservices/pom.xml index 707f2632b42..037d75087fe 100644 --- a/microservices/pom.xml +++ b/microservices/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-parent - 6.14.0-SNAPSHOT + 7.0.0-SNAPSHOT gov.nsa.datawave.microservice datawave-microservice-build-parent diff --git a/microservices/services/accumulo b/microservices/services/accumulo index 14d2afbab4d..d8586d45996 160000 --- a/microservices/services/accumulo +++ b/microservices/services/accumulo @@ -1 +1 @@ -Subproject commit 14d2afbab4d9d3d9b762cae649c6f46215912172 +Subproject commit d8586d459969504430779b4c958ec20f6fc2f5c2 diff --git a/microservices/services/audit b/microservices/services/audit index 2bcfbb3e2f2..94eb6ba81e0 160000 --- a/microservices/services/audit +++ b/microservices/services/audit @@ -1 +1 @@ -Subproject commit 2bcfbb3e2f22d50dd3d174e633f48685a96e8007 +Subproject commit 94eb6ba81e06d348ecf5a01b4c0e19bc8cd8d2ca diff --git a/microservices/services/authorization b/microservices/services/authorization index 46118a8c30b..5c92e1b7665 160000 --- a/microservices/services/authorization +++ b/microservices/services/authorization @@ -1 +1 @@ -Subproject commit 46118a8c30b48e9baab56237661f66621f5ac58a +Subproject commit 5c92e1b766587b27358b5a695103475d7f1270b3 diff --git a/microservices/services/config b/microservices/services/config index ca8933ede9a..25e03153aa6 160000 --- a/microservices/services/config +++ b/microservices/services/config @@ -1 +1 @@ -Subproject commit ca8933ede9a267c9a3e40b714b405a12f345b4bb +Subproject commit 25e03153aa6a2dbf0b7496ec12f9ea2d1bb7eee4 diff --git a/microservices/services/dictionary b/microservices/services/dictionary index 98b13bc5055..d09519d054e 160000 --- a/microservices/services/dictionary +++ b/microservices/services/dictionary @@ -1 +1 @@ -Subproject commit 98b13bc5055cc17fcbc3b78fff085ee08489e0d4 +Subproject commit d09519d054e759760f5c1044ed3b19c53ab3470f diff --git a/microservices/services/hazelcast b/microservices/services/hazelcast index 5443777a63a..51679472b7f 160000 --- a/microservices/services/hazelcast +++ b/microservices/services/hazelcast @@ -1 +1 @@ -Subproject commit 5443777a63a63f164ed49334fc211d9b09d3eb75 +Subproject commit 51679472b7fbd6fc73aec09d8c3397ea75f8dc88 diff --git a/microservices/services/mapreduce-query b/microservices/services/mapreduce-query new file mode 160000 index 00000000000..deefe1ebd6c --- /dev/null +++ b/microservices/services/mapreduce-query @@ -0,0 +1 @@ +Subproject commit deefe1ebd6c7450d65af478526f97ea5dc3df8ff diff --git a/microservices/services/modification b/microservices/services/modification new file mode 160000 index 00000000000..c2791ee6061 --- /dev/null +++ b/microservices/services/modification @@ -0,0 +1 @@ +Subproject commit c2791ee6061d811768c9d315e1574dd573271c2c diff --git a/microservices/services/pom.xml b/microservices/services/pom.xml index f9d7948ace3..29feb7d3fcb 100644 --- a/microservices/services/pom.xml +++ b/microservices/services/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.microservice datawave-microservice-build-parent - 6.14.0-SNAPSHOT + 7.0.0-SNAPSHOT datawave-microservice-service-build-parent pom @@ -77,6 +77,50 @@ hazelcast + + submodule-service-mapreduce-query + + + mapreduce-query/pom.xml + + + + mapreduce-query + + + + submodule-service-modification + + + modification/pom.xml + + + + modification + + + + submodule-service-query + + + query/pom.xml + + + + query + + + + submodule-service-query-executor + + + query-executor/pom.xml + + + + query-executor + + submodule-service-query-metric diff --git a/microservices/services/query b/microservices/services/query new file mode 160000 index 00000000000..85cd4e8ec57 --- /dev/null +++ b/microservices/services/query @@ -0,0 +1 @@ +Subproject commit 85cd4e8ec57a8e1f9eb5a2870959b08d3b39f058 diff --git a/microservices/services/query-executor b/microservices/services/query-executor new file mode 160000 index 00000000000..2970bd342c2 --- /dev/null +++ b/microservices/services/query-executor @@ -0,0 +1 @@ +Subproject commit 2970bd342c28cd20ed93784e1da74cca601e0d23 diff --git a/microservices/services/query-metric b/microservices/services/query-metric index 6cde4c7c784..251b0836642 160000 --- a/microservices/services/query-metric +++ b/microservices/services/query-metric @@ -1 +1 @@ -Subproject commit 6cde4c7c784925a4f46d2f8401d9f5161eca986f +Subproject commit 251b0836642ef2aab454b4278429bb7318e7fd8c diff --git a/microservices/starters/audit b/microservices/starters/audit index 39e340da22a..42d04f6da3d 160000 --- a/microservices/starters/audit +++ b/microservices/starters/audit @@ -1 +1 @@ -Subproject commit 39e340da22a3aca24e13a225ac5f9b14d9fd62d9 +Subproject commit 42d04f6da3dfa4f41921a9b4d26eaea2b0eabadc diff --git a/microservices/starters/cache b/microservices/starters/cache index e87bd18bd47..98bc3137442 160000 --- a/microservices/starters/cache +++ b/microservices/starters/cache @@ -1 +1 @@ -Subproject commit e87bd18bd4769c83a8c3dd734333fb9f83caa941 +Subproject commit 98bc3137442eea99c36173d2e598076024d1b96b diff --git a/microservices/starters/cached-results b/microservices/starters/cached-results new file mode 160000 index 00000000000..098d6c0d56e --- /dev/null +++ b/microservices/starters/cached-results @@ -0,0 +1 @@ +Subproject commit 098d6c0d56ebeef1ea421903963aec8e8d3b1818 diff --git a/microservices/starters/datawave b/microservices/starters/datawave index ab244f56fdc..30d55cd22e0 160000 --- a/microservices/starters/datawave +++ b/microservices/starters/datawave @@ -1 +1 @@ -Subproject commit ab244f56fdcae92c164707240f9565b2ebd2cc8b +Subproject commit 30d55cd22e00c0478783265ab76886c9f0a4c5d9 diff --git a/microservices/starters/metadata b/microservices/starters/metadata index 98df6b2db6f..df0232ab5e3 160000 --- a/microservices/starters/metadata +++ b/microservices/starters/metadata @@ -1 +1 @@ -Subproject commit 98df6b2db6f9ea59c689e68ce9f42a37eb68eb01 +Subproject commit df0232ab5e3d399a219f87c2e65e66f85bef5fdc diff --git a/microservices/starters/pom.xml b/microservices/starters/pom.xml index 93964b61729..0200a781287 100644 --- a/microservices/starters/pom.xml +++ b/microservices/starters/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.microservice datawave-microservice-build-parent - 6.14.0-SNAPSHOT + 7.0.0-SNAPSHOT datawave-microservice-starter-build-parent pom @@ -12,36 +12,47 @@ - submodule-starter-datawave + submodule-starter-audit - datawave/pom.xml + audit/pom.xml - datawave + audit - submodule-starter-audit + submodule-starter-cache - audit/pom.xml + cache/pom.xml - audit + cache - submodule-starter-cache + submodule-starter-cached-results - cache/pom.xml + cached-results/pom.xml - cache + cached-results + + + + submodule-starter-datawave + + + datawave/pom.xml + + + + datawave @@ -55,6 +66,17 @@ metadata + + submodule-starter-query + + + query/pom.xml + + + + query + + submodule-starter-query-metric diff --git a/microservices/starters/query b/microservices/starters/query new file mode 160000 index 00000000000..aa88a610f76 --- /dev/null +++ b/microservices/starters/query @@ -0,0 +1 @@ +Subproject commit aa88a610f767c97f16b4137c84f9dff61e9f239c diff --git a/microservices/starters/query-metric b/microservices/starters/query-metric index 3454b9eda4a..9ae002a7927 160000 --- a/microservices/starters/query-metric +++ b/microservices/starters/query-metric @@ -1 +1 @@ -Subproject commit 3454b9eda4a026c4176981180c5e32ea30bc3ab5 +Subproject commit 9ae002a79276bc804272e93ddf239938bbaeb8e0 diff --git a/pom.xml b/pom.xml index adf28228acb..be9e7852f82 100644 --- a/pom.xml +++ b/pom.xml @@ -3,7 +3,7 @@ 4.0.0 gov.nsa.datawave datawave-parent - 6.14.0-SNAPSHOT + 7.0.0-SNAPSHOT pom DataWave DataWave is a Java-based ingest and query framework that leverages Apache Accumulo to provide fast, secure access to your data. @@ -58,6 +58,7 @@ 1.6 1.8 5.2.0 + 5.2.0 1.9.0 5.2.0 2.15.0 @@ -73,7 +74,7 @@ 1.3 4.5.13 4.4.8 - 3.0.1 + 4.0.0-SNAPSHOT 9.4.21.Final 2.10.0.pr1 1.9.13 @@ -98,17 +99,19 @@ 7.5.0 2.5.2 1.6.0 - 3.0.0 - 3.0.2 - 3.0.0 - 3.0.0 - 3.0.0 - 2.0.1 - 3.0.0 - 3.0.0 - 2.0.0 - 3.0.0 - 2.0.2 + 4.0.0-SNAPSHOT + 4.0.0-SNAPSHOT + 4.0.0-SNAPSHOT + 4.0.0-SNAPSHOT + 4.0.0-SNAPSHOT + 3.0.0-SNAPSHOT + 4.0.0-SNAPSHOT + 1.0.0-SNAPSHOT + 4.0.0-SNAPSHOT + 3.0.0-SNAPSHOT + 1.0.0-SNAPSHOT + 4.0.0-SNAPSHOT + 3.0.0-SNAPSHOT 1.2 2.23.0 8.0.28 @@ -364,6 +367,11 @@ dictionary-api ${version.microservice.dictionary-api} + + gov.nsa.datawave.microservice + mapreduce-query-api + ${version.microservice.mapreduce-query-api} + gov.nsa.datawave.microservice metadata-utils @@ -388,6 +396,17 @@ metrics-reporter ${version.microservice.metrics-reporter} + + gov.nsa.datawave.microservice + query-api + ${version.microservice.query-api} + + + gov.nsa.datawave.microservice + query-api + ${version.microservice.query-api} + jboss + gov.nsa.datawave.microservice query-metric-api @@ -627,17 +646,33 @@ log4j * + + org.apache.zookeeper + zookeeper + org.apache.curator curator-framework ${version.curator} + + + org.apache.zookeeper + zookeeper + + org.apache.curator curator-recipes ${version.curator} + + + org.apache.zookeeper + zookeeper + + org.apache.deltaspike.core @@ -810,6 +845,16 @@ org.apache.zookeeper zookeeper ${version.zookeeper} + + + org.slf4j + slf4j-log4j12 + + + log4j + log4j + + org.eclipse.emf @@ -1113,7 +1158,7 @@ org.apache.curator curator-test - ${version.curator} + ${version.curator.test} test true @@ -1171,6 +1216,12 @@ ${version.powermock} test + + org.powermock + powermock-reflect + ${version.powermock} + test + @@ -1356,6 +1407,11 @@ false + + ch.qos.reload4j + reload4j + 1.2.22 + ch.qos.reload4j reload4j @@ -1377,6 +1433,17 @@ + + org.apache.maven.doxia + doxia-site-renderer + 1.4 + + + log4j + log4j + + + @@ -1638,7 +1705,7 @@ org.codehaus.mojo build-helper-maven-plugin - 3.0.0 + 3.3.0 org.codehaus.mojo @@ -1836,6 +1903,24 @@ org.apache.maven.plugins maven-javadoc-plugin + + + ch.qos.reload4j + reload4j + 1.2.22 + + + org.apache.maven.doxia + doxia-site-renderer + 1.7.4 + + + log4j + log4j + + + + @@ -2050,9 +2135,9 @@ clover - com.cenqua.clover + com.atlassian.clover clover - RELEASE + [4.1.2,) diff --git a/properties/compose.properties b/properties/compose.properties index e623100ca2e..7971d5568b0 100644 --- a/properties/compose.properties +++ b/properties/compose.properties @@ -156,6 +156,7 @@ event.query.data.decorators= \ \n \ \n \ \n \ +\n \ \n \ \n \ \n \ @@ -167,17 +168,18 @@ event.query.data.decorators= \ \n \ \n \ \n \ +\n \ \n \ \n lookup.uuid.uuidTypes= \ \ \n \ -\n \ +\n \ \n \ \n \ \n \ -\n \ +\n \ \n \ \n \ \n \ @@ -185,24 +187,24 @@ lookup.uuid.uuidTypes= \ \n \ \n \ \n \ -\n \ +\n \ \n \ \n \ \n \ \n \ -\n \ +\n \ \n \ \n \ \n \ -\n \ +\n \ \n \ \n \ \n \ -\n \ +\n \ \n \ \n \ \n \ -\n \ +\n \ \n query.metrics.marking=(PUBLIC) diff --git a/properties/default.properties b/properties/default.properties index edc970f95d3..8ee098f3709 100644 --- a/properties/default.properties +++ b/properties/default.properties @@ -693,11 +693,3 @@ basemaps= {\ accessToken: 'your.mapbox.access.token' \ }) \ } - -########################## -# -# The response object factory class -# -########################## -response.object.factory.class=datawave.webservice.query.result.event.DefaultResponseObjectFactory - diff --git a/properties/dev.properties b/properties/dev.properties index 9c36e7f7e27..ea526d63026 100644 --- a/properties/dev.properties +++ b/properties/dev.properties @@ -197,6 +197,7 @@ event.query.data.decorators= \ \n \ \n \ \n \ +\n \ \n \ \n \ \n \ @@ -208,17 +209,18 @@ event.query.data.decorators= \ \n \ \n \ \n \ +\n \ \n \ \n lookup.uuid.uuidTypes= \ \ \n \ -\n \ +\n \ \n \ \n \ \n \ -\n \ +\n \ \n \ \n \ \n \ @@ -226,24 +228,24 @@ lookup.uuid.uuidTypes= \ \n \ \n \ \n \ -\n \ +\n \ \n \ \n \ \n \ \n \ -\n \ +\n \ \n \ \n \ \n \ -\n \ +\n \ \n \ \n \ \n \ -\n \ +\n \ \n \ \n \ \n \ -\n \ +\n \ \n query.metrics.marking=(PUBLIC) diff --git a/warehouse/accumulo-extensions/pom.xml b/warehouse/accumulo-extensions/pom.xml index 388bf37a937..94bbd39ca26 100644 --- a/warehouse/accumulo-extensions/pom.xml +++ b/warehouse/accumulo-extensions/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.14.0-SNAPSHOT + 7.0.0-SNAPSHOT datawave-accumulo-extensions ${project.artifactId} diff --git a/warehouse/age-off-utils/pom.xml b/warehouse/age-off-utils/pom.xml index 75bb4eeffea..1e05bb334e7 100644 --- a/warehouse/age-off-utils/pom.xml +++ b/warehouse/age-off-utils/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.14.0-SNAPSHOT + 7.0.0-SNAPSHOT datawave-age-off-utils ${project.artifactId} diff --git a/warehouse/age-off/pom.xml b/warehouse/age-off/pom.xml index 93e1447ae10..e4142a3184c 100644 --- a/warehouse/age-off/pom.xml +++ b/warehouse/age-off/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.14.0-SNAPSHOT + 7.0.0-SNAPSHOT datawave-age-off ${project.artifactId} diff --git a/warehouse/age-off/src/main/java/datawave/iterators/filter/ConfigurableAgeOffFilter.java b/warehouse/age-off/src/main/java/datawave/iterators/filter/ConfigurableAgeOffFilter.java index 4a7df28b713..d823b3639fa 100644 --- a/warehouse/age-off/src/main/java/datawave/iterators/filter/ConfigurableAgeOffFilter.java +++ b/warehouse/age-off/src/main/java/datawave/iterators/filter/ConfigurableAgeOffFilter.java @@ -113,7 +113,6 @@ public class ConfigurableAgeOffFilter extends Filter implements OptionDescriber .setNameFormat(ConfigurableAgeOffFilter.class.getSimpleName() + "-ruleCache-refresh-%d").build(); private static final ScheduledExecutorService SIMPLE_TIMER = Executors.newSingleThreadScheduledExecutor(TIMER_THREAD_FACTORY); - public static final String UPDATE_INTERVAL_MS_PROP = "tserver.datawave.ageoff.cache.update.interval.ms"; protected static final long DEFAULT_UPDATE_INTERVAL_MS = 5; protected static long UPDATE_INTERVAL_MS = DEFAULT_UPDATE_INTERVAL_MS; @@ -337,7 +336,6 @@ public void init(SortedKeyValueIterator source, Map op long sessionScanStart = options.containsKey(AgeOffConfigParams.SCAN_START_TIMESTAMP) ? Long.parseLong(options.get(AgeOffConfigParams.SCAN_START_TIMESTAMP)) : System.currentTimeMillis(); - initialize(options.get(AgeOffConfigParams.TTL), options.get(AgeOffConfigParams.TTL_UNITS), options.get(AgeOffConfigParams.TTL_SHORT_CIRCUIT), sessionScanStart, options.get(AgeOffConfigParams.FILTER_CONFIG)); diff --git a/warehouse/assemble/datawave/pom.xml b/warehouse/assemble/datawave/pom.xml index f1edb9bfd9e..b122388d412 100644 --- a/warehouse/assemble/datawave/pom.xml +++ b/warehouse/assemble/datawave/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave assemble-parent - 6.14.0-SNAPSHOT + 7.0.0-SNAPSHOT assemble-datawave jar @@ -664,6 +664,7 @@ com.github.ben-manes.caffeine:caffeine org.apache.commons:commons-collections4 org.apache.commons:commons-jexl3 + org.apache.curator:curator-client ${project.groupId}:datawave-core ${project.groupId}:datawave-accumulo-extensions ${project.groupId}:datawave-ingest-csv @@ -678,6 +679,10 @@ ${project.groupId}:datawave-ingest-wikipedia ${project.groupId}:datawave-ingest-nyctlc ${project.groupId}:datawave-ingest-ssdeep + gov.nsa.datawave.core:datawave-core-connection-pool + gov.nsa.datawave.core:datawave-core-common + gov.nsa.datawave.core:datawave-core-common-util + gov.nsa.datawave.core:datawave-core-query gov.nsa.datawave.microservice:accumulo-utils gov.nsa.datawave.microservice:metadata-utils gov.nsa.datawave.microservice:type-utils @@ -724,6 +729,9 @@ ${project.groupId}:datawave-core ${project.groupId}:datawave-query-core ${project.groupId}:datawave-metrics-core + gov.nsa.datawave.core:datawave-core-connection-pool + gov.nsa.datawave.core:datawave-core-common + gov.nsa.datawave.core:datawave-core-common-util gov.nsa.datawave.microservice:accumulo-utils gov.nsa.datawave.microservice:metadata-utils gov.nsa.datawave.microservice:type-utils diff --git a/warehouse/assemble/datawave/src/main/assembly/dist.xml b/warehouse/assemble/datawave/src/main/assembly/dist.xml index 9b0bf15d451..c737cd267da 100644 --- a/warehouse/assemble/datawave/src/main/assembly/dist.xml +++ b/warehouse/assemble/datawave/src/main/assembly/dist.xml @@ -94,6 +94,7 @@ org.apache.commons:commons-collections4 org.apache.commons:commons-jexl3 + org.apache.curator:curator-client ${groupId}:datawave-accumulo-extensions ${groupId}:datawave-common ${groupId}:datawave-core @@ -108,6 +109,10 @@ ${groupId}:datawave-ingest-wikipedia ${groupId}:datawave-ingest-nyctlc ${groupId}:datawave-ingest-json + gov.nsa.datawave.core:datawave-core-connection-pool + gov.nsa.datawave.core:datawave-core-common + gov.nsa.datawave.core:datawave-core-common-util + gov.nsa.datawave.core:datawave-core-query gov.nsa.datawave.microservice:accumulo-utils gov.nsa.datawave.microservice:metadata-utils gov.nsa.datawave.microservice:type-utils @@ -158,6 +163,9 @@ ${groupId}:datawave-ingest-core ${groupId}:datawave-ingest-configuration ${groupId}:datawave-query-core + gov.nsa.datawave.core:datawave-core-connection-pool + gov.nsa.datawave.core:datawave-core-common + gov.nsa.datawave.core:datawave-core-common-util gov.nsa.datawave.microservice:accumulo-utils gov.nsa.datawave.microservice:metadata-utils gov.nsa.datawave.microservice:type-utils diff --git a/warehouse/assemble/pom.xml b/warehouse/assemble/pom.xml index a131fd5cfa4..a52be9dd2a4 100644 --- a/warehouse/assemble/pom.xml +++ b/warehouse/assemble/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.14.0-SNAPSHOT + 7.0.0-SNAPSHOT assemble-parent pom diff --git a/warehouse/assemble/webservice/pom.xml b/warehouse/assemble/webservice/pom.xml index a3d04e52e95..634fcc0394c 100644 --- a/warehouse/assemble/webservice/pom.xml +++ b/warehouse/assemble/webservice/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave assemble-parent - 6.14.0-SNAPSHOT + 7.0.0-SNAPSHOT assemble-webservice ${project.artifactId} diff --git a/warehouse/common/pom.xml b/warehouse/common/pom.xml index 15b2e1197b2..d42d6252562 100644 --- a/warehouse/common/pom.xml +++ b/warehouse/common/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.14.0-SNAPSHOT + 7.0.0-SNAPSHOT datawave-common ${project.artifactId} diff --git a/warehouse/core/pom.xml b/warehouse/core/pom.xml index db4633f8518..4795c111cdd 100644 --- a/warehouse/core/pom.xml +++ b/warehouse/core/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.14.0-SNAPSHOT + 7.0.0-SNAPSHOT datawave-core jar diff --git a/warehouse/core/src/main/java/datawave/edge/model/DefaultEdgeModelFieldsFactory.java b/warehouse/core/src/main/java/datawave/edge/model/DefaultEdgeModelFieldsFactory.java new file mode 100644 index 00000000000..acdb4800ae3 --- /dev/null +++ b/warehouse/core/src/main/java/datawave/edge/model/DefaultEdgeModelFieldsFactory.java @@ -0,0 +1,62 @@ +package datawave.edge.model; + +import java.util.Map; + +import org.apache.log4j.Logger; +import org.springframework.context.ApplicationContext; +import org.springframework.context.support.AbstractApplicationContext; +import org.springframework.context.support.ClassPathXmlApplicationContext; +import org.springframework.context.support.FileSystemXmlApplicationContext; + +public class DefaultEdgeModelFieldsFactory implements EdgeModelFieldsFactory { + + /** required bean context */ + /** common default locations for locating bean xml */ + static final String[] EDGE_MODEL_CONTEXT = {"classpath*:EdgeModelContext.xml"}; + /** required bean name */ + static final String BASE_MODEL_BEAN = "baseFieldMap"; + /** required bean name */ + static final String KEYUTIL_MODEL_BEAN = "keyUtilFieldMap"; + /** required bean name */ + static final String TRANSFORM_MODEL_BEAN = "transformFieldMap"; + + private static Logger log = Logger.getLogger(DefaultEdgeModelFieldsFactory.class); + + @Override + public EdgeModelFields createFields() { + EdgeModelFields fields = new EdgeModelFields(); + AbstractApplicationContext context = null; + try { + String contextOverride = System.getProperty("edge.model.context.path"); + if (null != contextOverride) { + context = new FileSystemXmlApplicationContext(contextOverride); + } else { + ClassLoader thisClassLoader = EdgeModelFields.class.getClassLoader(); + ClassPathXmlApplicationContext cpContext = new ClassPathXmlApplicationContext(); + cpContext.setClassLoader(thisClassLoader); + cpContext.setConfigLocations(EDGE_MODEL_CONTEXT); + cpContext.refresh(); + context = cpContext; + } + + // now load the maps + fields.setBaseFieldMap((Map) context.getBean(BASE_MODEL_BEAN)); + fields.setKeyUtilFieldMap((Map) context.getBean(KEYUTIL_MODEL_BEAN)); + fields.setTransformFieldMap((Map) context.getBean(TRANSFORM_MODEL_BEAN)); + } catch (Throwable t) { + log.fatal("Edge model configuration not loaded!! Edge queries will fail until this issue is corrected."); + log.fatal(String.format("Ensure that the Spring config file '%s' is on the classpath and contains bean names '%s', '%s', and '%s'", + EDGE_MODEL_CONTEXT, BASE_MODEL_BEAN, KEYUTIL_MODEL_BEAN, TRANSFORM_MODEL_BEAN), t); + } finally { + if (context != null) { + context.close(); + } + } + + return fields; + } + + @SuppressWarnings("unchecked") + private void loadMaps(ApplicationContext context) {} + +} diff --git a/warehouse/core/src/main/java/datawave/edge/model/EdgeModelAware.java b/warehouse/core/src/main/java/datawave/edge/model/EdgeModelAware.java deleted file mode 100644 index 32cf42b4172..00000000000 --- a/warehouse/core/src/main/java/datawave/edge/model/EdgeModelAware.java +++ /dev/null @@ -1,375 +0,0 @@ -package datawave.edge.model; - -import java.util.Collection; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; - -import org.apache.log4j.Logger; -import org.springframework.context.ApplicationContext; -import org.springframework.context.support.ClassPathXmlApplicationContext; -import org.springframework.context.support.FileSystemXmlApplicationContext; - -/** - * Interface that allows internal field names used within the edge model to be configured and injected transparently into classes that need them.
    - *
    - * - * The intention is to enforce a unified approach to field name management and usage throughout the entire codebase, so that the actual field names in use can - * be dictated by the deployment environment rather than the code itself. - */ -public interface EdgeModelAware { - - String EQUALS = "=="; - String EQUALS_REGEX = "=~"; - String NOT_EQUALS = "!="; - String NOT_EQUALS_REGEX = "!~"; - String FUNCTION = "function"; - String OR = " || "; - String AND = " && "; - char STRING_QUOTE = '\''; - char BACKSLASH = '\\'; - - /* Base field names */ - - /** Internal field name denoting edge vertex 1 */ - String EDGE_SOURCE = Fields.getInstance().getSourceFieldName(); - /** Internal field name denoting edge vertex 2 */ - String EDGE_SINK = Fields.getInstance().getSinkFieldName(); - /** Internal field name denoting the edge type defined by the two vertices */ - String EDGE_TYPE = Fields.getInstance().getTypeFieldName(); - /** Internal field name denoting the source-sink relationship */ - String EDGE_RELATIONSHIP = Fields.getInstance().getRelationshipFieldName(); - /** Internal field name denoting the edge's 1st attribute */ - String EDGE_ATTRIBUTE1 = Fields.getInstance().getAttribute1FieldName(); - /** Internal field name denoting the edge's 2nd attribute */ - String EDGE_ATTRIBUTE2 = Fields.getInstance().getAttribute2FieldName(); - /** Internal field name denoting the edge's 3rd attribute */ - String EDGE_ATTRIBUTE3 = Fields.getInstance().getAttribute3FieldName(); - /** Internal field name denoting a 'stats' edge */ - String STATS_EDGE = Fields.getInstance().getStatsEdgeFieldName(); - /** Internal field name denoting the edge date */ - String DATE = Fields.getInstance().getDateFieldName(); - - /* These are specific to edge key processing (as previously managed with enum in EdgeKeyUtil) */ - - /** Internal field name denoting the edge enrichment type */ - String ENRICHMENT_TYPE = Fields.getInstance().getEnrichmentTypeFieldName(); - /** Internal field name denoting the edge fact type */ - String FACT_TYPE = Fields.getInstance().getFactTypeFieldName(); - /** Internal field name denoting grouped edge fields */ - String GROUPED_FIELDS = Fields.getInstance().getGroupedFieldsFieldName(); - - /* These are specific to query result transformation (previously hardcoded literals in EdgeQueryTransformer) */ - - /** Internal field name used to convey an edge count */ - String COUNT = Fields.getInstance().getCountFieldName(); - /** Internal field name used to convey edge counts */ - String COUNTS = Fields.getInstance().getCountsFieldName(); - /** Internal field name denoting an edge load date */ - String LOAD_DATE = Fields.getInstance().getLoadDateFieldName(); - /** Internal field name denoting an edge id */ - String UUID = Fields.getInstance().getUuidFieldName(); - /** Internal field name denoting and edge activity date */ - String ACTIVITY_DATE = Fields.getInstance().getActivityDateFieldName(); - /** Internal field name denoting if the edge activity date was good or bad */ - String BAD_ACTIVITY_DATE = Fields.getInstance().getBadActivityDateFieldName(); - - /** - * On-demand singleton for loading the internal model.
    - *
    - * - * With the edge schema, since field names don't exist on disk in the way that they do for events, we have the flexiblity to alter the names within the - * model to suit the query syntax preferred by the target deployment environment. This class uses Spring injection to load and map the internal model.
    - *
    - * - * NOTE:
    - *
    - * Since the EdgeModelAware interface is intended to provide a single point of access to field names for all application tiers, it is important to ensure - * that the Spring config is available within a variety of distinct classloading contexts...ie, within webservers, tservers, etc. If the config fails to - * load at any tier, then edge queries will fail. FATAL log entries are emitted when this case arises. - */ - class Fields { - /** required bean context */ - /** common default locations for locating bean xml */ - static final String[] EDGE_MODEL_CONTEXT = {"classpath*:EdgeModelContext.xml"}; - /** required bean name */ - static final String BASE_MODEL_BEAN = "baseFieldMap"; - /** required bean name */ - static final String KEYUTIL_MODEL_BEAN = "keyUtilFieldMap"; - /** required bean name */ - static final String TRANSFORM_MODEL_BEAN = "transformFieldMap"; - - /** internal fields common to all application tiers */ - private Map baseFieldMap; - /** internal fields used in key processing (as previously defined by enum within EdgeKeyUtil.java) */ - private Map keyUtilFieldMap; - /** internal fields used in query result transformation (eg, EdgeQueryTransformer.java) */ - private Map transformFieldMap; - - private Logger log = Logger.getLogger(Fields.class); - - private Fields() { - loadContext(); - } - - @SuppressWarnings("unchecked") - private void loadMaps(ApplicationContext context) { - baseFieldMap = (Map) context.getBean(BASE_MODEL_BEAN); - keyUtilFieldMap = (Map) context.getBean(KEYUTIL_MODEL_BEAN); - transformFieldMap = (Map) context.getBean(TRANSFORM_MODEL_BEAN); - } - - /** - * Initializes the internal field maps using the EdgeModelContext.xml config - */ - private void loadContext() { - - String contextOverride = System.getProperty("edge.model.context.path"); - if (null != contextOverride) { - FileSystemXmlApplicationContext context = new FileSystemXmlApplicationContext(contextOverride); - loadMaps(context); - context.close(); - } else { - ClassLoader thisClassLoader = EdgeModelAware.class.getClassLoader(); - ClassPathXmlApplicationContext context = new ClassPathXmlApplicationContext(); - try { - context.setClassLoader(thisClassLoader); - context.setConfigLocations(EDGE_MODEL_CONTEXT); - context.refresh(); - loadMaps(context); - } catch (Throwable t) { - log.fatal("Edge model configuration not loaded!! Edge queries will fail until this issue is corrected."); - log.fatal(String.format("Ensure that the Spring config file '%s' is on the classpath and contains bean names '%s', '%s', and '%s'", - EDGE_MODEL_CONTEXT, BASE_MODEL_BEAN, KEYUTIL_MODEL_BEAN, TRANSFORM_MODEL_BEAN), t); - } finally { - context.close(); - } - } - } - - /** - * Enum that can be used for convenience to lookup the internal edge field name values. - */ - public enum FieldKey { - /** Key to the internal field name used to denote edge vertex 1 */ - EDGE_SOURCE, - /** Key to the internal field name used to denote edge vertex 2 */ - EDGE_SINK, - /** Key to the internal field name used to denote the edge type defined by the two vertices */ - EDGE_TYPE, - /** Key to the internal field name used to denote the source-sink relationship */ - EDGE_RELATIONSHIP, - /** Key to the internal field name used to denote the edge's 1st attribute */ - EDGE_ATTRIBUTE1, - /** Key to the internal field name used to denote the edge's 2nd attribute */ - EDGE_ATTRIBUTE2, - /** Key to the internal field name used to denote the edge's 3rd attribute */ - EDGE_ATTRIBUTE3, - /** Key to the internal field name used to denote a stats edge */ - STATS_EDGE, - /** Key to the internal field name used to denote the edge's date */ - DATE, - /** Key to the internal field name used to denote the edge enrichment type */ - ENRICHMENT_TYPE, - /** Key to the internal field name used to denote the edge fact type */ - FACT_TYPE, - /** Key to the internal field name used to denote the edge grouped fields */ - GROUPED_FIELDS, - /** Key to the internal field name used to convey an edge count */ - COUNT, - /** Key to the internal field name used to convey edge counts */ - COUNTS, - /** Key to the internal field name used to denote an edge load date */ - LOAD_DATE, - /** Key to the internal field name used to denote and edge id */ - UUID, - /** Key to the internal field name used to denote the edge activity date */ - ACTIVITY_DATE, - /** Key to the internal field name used to denote weather the edge activity date was good or bad */ - BAD_ACTIVITY_DATE; - - private static Map reverseMap = new HashMap<>(); - - static { - Map fieldMap = getInstance().getAllFieldsMap(); - for (Map.Entry entry : fieldMap.entrySet()) { - reverseMap.put(entry.getValue(), FieldKey.valueOf(entry.getKey())); - } - } - - /** - * Returns the FieldKey associated with the actual (ie, configured) edge field name, ie, reverse lookup... - * - * @param internalFieldName - * configured field name to parse - * @return the reverse lookup field name - */ - public static FieldKey parse(String internalFieldName) { - return reverseMap.get(internalFieldName); - } - } - - public static Fields getInstance() { - return OnDemand.INSTANCE; - } - - private static class OnDemand { - private static final Fields INSTANCE = new Fields(); - } - - public String getSourceFieldName() { - return baseFieldMap.get(FieldKey.EDGE_SOURCE.name()); - } - - public String getSinkFieldName() { - return baseFieldMap.get(FieldKey.EDGE_SINK.name()); - } - - public String getTypeFieldName() { - return baseFieldMap.get(FieldKey.EDGE_TYPE.name()); - } - - public String getRelationshipFieldName() { - return baseFieldMap.get(FieldKey.EDGE_RELATIONSHIP.name()); - } - - public String getAttribute1FieldName() { - return baseFieldMap.get(FieldKey.EDGE_ATTRIBUTE1.name()); - } - - public String getAttribute2FieldName() { - return baseFieldMap.get(FieldKey.EDGE_ATTRIBUTE2.name()); - } - - public String getAttribute3FieldName() { - return baseFieldMap.get(FieldKey.EDGE_ATTRIBUTE3.name()); - } - - public String getDateFieldName() { - return baseFieldMap.get(FieldKey.DATE.name()); - } - - public String getStatsEdgeFieldName() { - return baseFieldMap.get(FieldKey.STATS_EDGE.name()); - } - - public String getGroupedFieldsFieldName() { - return keyUtilFieldMap.get(FieldKey.GROUPED_FIELDS.name()); - } - - public String getEnrichmentTypeFieldName() { - return keyUtilFieldMap.get(FieldKey.ENRICHMENT_TYPE.name()); - } - - public String getFactTypeFieldName() { - return keyUtilFieldMap.get(FieldKey.FACT_TYPE.name()); - } - - public String getCountFieldName() { - return transformFieldMap.get(FieldKey.COUNT.name()); - } - - public String getCountsFieldName() { - return transformFieldMap.get(FieldKey.COUNTS.name()); - } - - public String getLoadDateFieldName() { - return transformFieldMap.get(FieldKey.LOAD_DATE.name()); - } - - public String getUuidFieldName() { - return transformFieldMap.get(FieldKey.UUID.name()); - } - - public String getActivityDateFieldName() { - return transformFieldMap.get(FieldKey.ACTIVITY_DATE.name()); - } - - public String getBadActivityDateFieldName() { - return transformFieldMap.get(FieldKey.BAD_ACTIVITY_DATE.name()); - } - - /** - * Returns the subset of all edge-related field names which are common to all application tiers. - * - * @return subset of field names - */ - public Collection getBaseFieldNames() { - return Collections.unmodifiableCollection(baseFieldMap.values()); - } - - /** - * Returns the field names associated with key manipulation and processing, a superset of the fields given by the {@link Fields#getBaseFieldNames} - * method - * - * @return field names associated with key manipulation and processing - */ - public Collection getKeyProcessingFieldNames() { - HashSet fields = new HashSet<>(); - fields.addAll(baseFieldMap.values()); - fields.addAll(keyUtilFieldMap.values()); - return Collections.unmodifiableCollection(fields); - } - - /** - * Returns the field names associated with query result transformation, a superset of the fields given by the {@link Fields#getBaseFieldNames} method - * - * @return query result field names - */ - public Collection getTransformFieldNames() { - HashSet fields = new HashSet<>(); - fields.addAll(baseFieldMap.values()); - fields.addAll(transformFieldMap.values()); - return Collections.unmodifiableCollection(fields); - } - - /** - * Returns the mapped fields associated with edge key manipulation and processing, where the keys are represented as FieldKey.name() - * - * @return mapped key processing fields - */ - public Map getKeyProcessingFieldMap() { - HashMap all = new HashMap<>(); - all.putAll(baseFieldMap); - all.putAll(keyUtilFieldMap); - return Collections.unmodifiableMap(all); - } - - /** - * Returns the mapping for fields associated with query result transformation, where the keys are represented as FieldKey.name() - * - * @return mapping for fields associated with query result transformation - */ - public Map getTransformFieldMap() { - HashMap all = new HashMap<>(); - all.putAll(baseFieldMap); - all.putAll(transformFieldMap); - return Collections.unmodifiableMap(all); - } - - /** - * Returns all mapped field names, where the keys are represented as FieldKey.name() - * - * @return map of field names - */ - public Map getAllFieldsMap() { - HashMap all = new HashMap<>(); - all.putAll(baseFieldMap); - all.putAll(keyUtilFieldMap); - all.putAll(transformFieldMap); - return Collections.unmodifiableMap(all); - } - - /** - * Returns the field name mapped to the specified FieldKey - * - * @param key - * key to pull name from - * @return field name - */ - public String getFieldName(FieldKey key) { - return getAllFieldsMap().get(key.name()); - } - } -} diff --git a/warehouse/core/src/main/java/datawave/edge/model/EdgeModelFields.java b/warehouse/core/src/main/java/datawave/edge/model/EdgeModelFields.java new file mode 100644 index 00000000000..a554cedcf2f --- /dev/null +++ b/warehouse/core/src/main/java/datawave/edge/model/EdgeModelFields.java @@ -0,0 +1,324 @@ +package datawave.edge.model; + +import java.io.Serializable; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; + +import org.apache.log4j.Logger; +import org.springframework.context.ApplicationContext; +import org.springframework.context.support.ClassPathXmlApplicationContext; +import org.springframework.context.support.FileSystemXmlApplicationContext; + +/** + * Interface that allows internal field names used within the edge model to be configured and injected transparently into classes that need them.
    + *
    + * + * The intention is to enforce a unified approach to field name management and usage throughout the entire codebase, so that the actual field names in use can + * be dictated by the deployment environment rather than the code itself. + */ +public class EdgeModelFields implements Serializable { + + /** internal fields common to all application tiers */ + private Map baseFieldMap; + /** internal fields used in key processing (as previously defined by enum within EdgeKeyUtil.java) */ + private Map keyUtilFieldMap; + /** internal fields used in query result transformation (eg, EdgeQueryTransformer.java) */ + private Map transformFieldMap; + /** A mapping of field names to field keys */ + private Map reverseMap = new HashMap<>(); + + public static String EQUALS = "=="; + public static String EQUALS_REGEX = "=~"; + public static String NOT_EQUALS = "!="; + public static String NOT_EQUALS_REGEX = "!~"; + public static String OR = " || "; + public static String AND = " && "; + public static char STRING_QUOTE = '\''; + public static char BACKSLASH = '\\'; + + public Map getBaseFieldMap() { + return baseFieldMap; + } + + public void setBaseFieldMap(Map baseFieldMap) { + this.baseFieldMap = baseFieldMap; + updateReverseMap(baseFieldMap); + } + + public Map getKeyUtilFieldMap() { + return keyUtilFieldMap; + } + + public void setKeyUtilFieldMap(Map keyUtilFieldMap) { + this.keyUtilFieldMap = keyUtilFieldMap; + updateReverseMap(keyUtilFieldMap); + } + + public void setTransformFieldMap(Map transformFieldMap) { + this.transformFieldMap = transformFieldMap; + updateReverseMap(transformFieldMap); + } + + /** + * Enum that can be used for convenience to lookup the internal edge field name values. + */ + public enum FieldKey { + /** + * Key to the internal field name used to denote edge vertex 1 + */ + EDGE_SOURCE, + /** + * Key to the internal field name used to denote edge vertex 2 + */ + EDGE_SINK, + /** + * Key to the internal field name used to denote the edge type defined by the two vertices + */ + EDGE_TYPE, + /** + * Key to the internal field name used to denote the source-sink relationship + */ + EDGE_RELATIONSHIP, + /** + * Key to the internal field name used to denote the edge's 1st attribute + */ + EDGE_ATTRIBUTE1, + /** + * Key to the internal field name used to denote the edge's 2nd attribute + */ + EDGE_ATTRIBUTE2, + /** + * Key to the internal field name used to denote the edge's 3rd attribute + */ + EDGE_ATTRIBUTE3, + /** + * Key to the internal field name used to denote a stats edge + */ + STATS_EDGE, + /** + * Key to the internal field name used to denote the edge's date + */ + DATE, + /** + * Key to the internal field name used to denote the edge enrichment type + */ + ENRICHMENT_TYPE, + /** + * Key to the internal field name used to denote the edge fact type + */ + FACT_TYPE, + /** + * Key to the internal field name used to denote the edge grouped fields + */ + GROUPED_FIELDS, + /** + * Key to the internal field name used to convey an edge count + */ + COUNT, + /** + * Key to the internal field name used to convey edge counts + */ + COUNTS, + /** + * Key to the internal field name used to denote an edge load date + */ + LOAD_DATE, + /** + * Key to the internal field name used to denote and edge id + */ + UUID, + /** + * Key to the internal field name used to denote the edge activity date + */ + ACTIVITY_DATE, + /** + * Key to the internal field name used to denote weather the edge activity date was good or bad + */ + BAD_ACTIVITY_DATE, + /** + * Key to the function + */ + FUNCTION; + } + + /** + * Returns the FieldKey associated with the actual (ie, configured) edge field name, ie, reverse lookup... + */ + public FieldKey parse(String internalFieldName) { + FieldKey key = reverseMap.get(internalFieldName); + // if not specified in the maps, then try it as the enum name + if (key == null && internalFieldName != null) { + key = FieldKey.valueOf(internalFieldName); + } + return key; + } + + private void updateReverseMap(Map fields) { + if (fields != null) { + for (Map.Entry entry : fields.entrySet()) { + reverseMap.put(entry.getValue(), FieldKey.valueOf(entry.getKey())); + } + } + } + + public String getSourceFieldName() { + return baseFieldMap.get(FieldKey.EDGE_SOURCE.name()); + } + + public String getSinkFieldName() { + return baseFieldMap.get(FieldKey.EDGE_SINK.name()); + } + + public String getTypeFieldName() { + return baseFieldMap.get(FieldKey.EDGE_TYPE.name()); + } + + public String getRelationshipFieldName() { + return baseFieldMap.get(FieldKey.EDGE_RELATIONSHIP.name()); + } + + public String getAttribute1FieldName() { + return baseFieldMap.get(FieldKey.EDGE_ATTRIBUTE1.name()); + } + + public String getAttribute2FieldName() { + return baseFieldMap.get(FieldKey.EDGE_ATTRIBUTE2.name()); + } + + public String getAttribute3FieldName() { + return baseFieldMap.get(FieldKey.EDGE_ATTRIBUTE3.name()); + } + + public String getDateFieldName() { + return baseFieldMap.get(FieldKey.DATE.name()); + } + + public String getStatsEdgeFieldName() { + return baseFieldMap.get(FieldKey.STATS_EDGE.name()); + } + + public String getGroupedFieldsFieldName() { + return keyUtilFieldMap.get(FieldKey.GROUPED_FIELDS.name()); + } + + public String getEnrichmentTypeFieldName() { + return keyUtilFieldMap.get(FieldKey.ENRICHMENT_TYPE.name()); + } + + public String getFactTypeFieldName() { + return keyUtilFieldMap.get(FieldKey.FACT_TYPE.name()); + } + + public String getCountFieldName() { + return transformFieldMap.get(FieldKey.COUNT.name()); + } + + public String getCountsFieldName() { + return transformFieldMap.get(FieldKey.COUNTS.name()); + } + + public String getLoadDateFieldName() { + return transformFieldMap.get(FieldKey.LOAD_DATE.name()); + } + + public String getUuidFieldName() { + return transformFieldMap.get(FieldKey.UUID.name()); + } + + public String getActivityDateFieldName() { + return transformFieldMap.get(FieldKey.ACTIVITY_DATE.name()); + } + + public String getBadActivityDateFieldName() { + return transformFieldMap.get(FieldKey.BAD_ACTIVITY_DATE.name()); + } + + /** + * Returns the subset of all edge-related field names which are common to all application tiers. + * + * @return + */ + public Collection getBaseFieldNames() { + return Collections.unmodifiableCollection(baseFieldMap.values()); + } + + /** + * Returns the field names associated with key manipulation and processing, a superset of the fields given by the {@link EdgeModelFields#getBaseFieldNames} + * method + * + * @return + */ + public Collection getKeyProcessingFieldNames() { + HashSet fields = new HashSet<>(); + fields.addAll(baseFieldMap.values()); + fields.addAll(keyUtilFieldMap.values()); + return Collections.unmodifiableCollection(fields); + } + + /** + * Returns the field names associated with query result transformation, a superset of the fields given by the {@link EdgeModelFields#getBaseFieldNames} + * method + * + * @return + */ + public Collection getTransformFieldNames() { + HashSet fields = new HashSet<>(); + fields.addAll(baseFieldMap.values()); + fields.addAll(transformFieldMap.values()); + return Collections.unmodifiableCollection(fields); + } + + /** + * Returns the mapped fields associated with edge key manipulation and processing, where the keys are represented as FieldKey.name() + * + * @return + */ + public Map getKeyProcessingFieldMap() { + HashMap all = new HashMap<>(); + all.putAll(baseFieldMap); + all.putAll(keyUtilFieldMap); + return Collections.unmodifiableMap(all); + } + + /** + * Returns the mapping for fields associated with query result transformation, where the keys are represented as FieldKey.name() + * + * @return + */ + public Map getTransformFieldMap() { + HashMap all = new HashMap<>(); + all.putAll(baseFieldMap); + all.putAll(transformFieldMap); + return Collections.unmodifiableMap(all); + } + + /** + * Returns all mapped field names, where the keys are represented as FieldKey.name() + * + * @return + */ + public Map getAllFieldsMap() { + HashMap all = new HashMap<>(); + all.putAll(baseFieldMap); + all.putAll(keyUtilFieldMap); + all.putAll(transformFieldMap); + return Collections.unmodifiableMap(all); + } + + /** + * Returns the field name mapped to the specified FieldKey + * + * @param key + * @return + */ + public String getFieldName(FieldKey key) { + String name = getAllFieldsMap().get(key.name()); + if (name == null) { + name = key.name(); + } + return name; + } +} diff --git a/warehouse/core/src/main/java/datawave/edge/model/EdgeModelFieldsFactory.java b/warehouse/core/src/main/java/datawave/edge/model/EdgeModelFieldsFactory.java new file mode 100644 index 00000000000..224108b9e45 --- /dev/null +++ b/warehouse/core/src/main/java/datawave/edge/model/EdgeModelFieldsFactory.java @@ -0,0 +1,5 @@ +package datawave.edge.model; + +public interface EdgeModelFieldsFactory { + EdgeModelFields createFields(); +} diff --git a/warehouse/core/src/main/java/datawave/edge/util/EdgeKey.java b/warehouse/core/src/main/java/datawave/edge/util/EdgeKey.java index 1d9e5de6b58..3e9d83becb6 100644 --- a/warehouse/core/src/main/java/datawave/edge/util/EdgeKey.java +++ b/warehouse/core/src/main/java/datawave/edge/util/EdgeKey.java @@ -48,7 +48,6 @@ public class EdgeKey { private EdgeKey(EDGE_FORMAT format, STATS_TYPE statsType, String sourceData, String sinkData, String family, String sourceRelationship, String sinkRelationship, String sourceAttribute1, String sinkAttribute1, String yyyymmdd, String attribute3, String attribute2, Text colvis, long timestamp, boolean deleted, DATE_TYPE dateType) { - this.format = format; this.statsType = statsType; this.sourceData = sourceData; diff --git a/warehouse/core/src/main/java/datawave/edge/util/EdgeKeyUtil.java b/warehouse/core/src/main/java/datawave/edge/util/EdgeKeyUtil.java index 637d9499aec..d5dce59a9cd 100644 --- a/warehouse/core/src/main/java/datawave/edge/util/EdgeKeyUtil.java +++ b/warehouse/core/src/main/java/datawave/edge/util/EdgeKeyUtil.java @@ -14,7 +14,7 @@ import org.apache.hadoop.io.Text; import datawave.data.type.Type; -import datawave.edge.model.EdgeModelAware.Fields.FieldKey; +import datawave.edge.model.EdgeModelFields.FieldKey; import datawave.util.StringUtils; /** diff --git a/warehouse/core/src/main/java/datawave/ingest/util/cache/Loader.java b/warehouse/core/src/main/java/datawave/ingest/util/cache/Loader.java index 7448536c038..05a9a774318 100644 --- a/warehouse/core/src/main/java/datawave/ingest/util/cache/Loader.java +++ b/warehouse/core/src/main/java/datawave/ingest/util/cache/Loader.java @@ -70,6 +70,7 @@ public ListenableFuture reload(final K key, final V oldValue) { log.trace("Reloading synchronously"); try { build(null); + return Futures.immediateFuture(load(key)); } catch (Exception e) { diff --git a/warehouse/core/src/main/java/datawave/mr/bulk/BulkInputFormat.java b/warehouse/core/src/main/java/datawave/mr/bulk/BulkInputFormat.java index 4370cd6099d..6b517b8c443 100644 --- a/warehouse/core/src/main/java/datawave/mr/bulk/BulkInputFormat.java +++ b/warehouse/core/src/main/java/datawave/mr/bulk/BulkInputFormat.java @@ -956,7 +956,6 @@ public V getCurrentValue() { Map>> binOfflineTable(JobContext job, String tableName, List ranges) throws TableNotFoundException, AccumuloException, AccumuloSecurityException, IOException { - Map>> binnedRanges = new HashMap<>(); try (AccumuloClient client = getClient(job.getConfiguration())) { diff --git a/warehouse/core/src/main/java/datawave/query/data/UUIDType.java b/warehouse/core/src/main/java/datawave/query/data/UUIDType.java index e419da32d85..a205bdc167c 100644 --- a/warehouse/core/src/main/java/datawave/query/data/UUIDType.java +++ b/warehouse/core/src/main/java/datawave/query/data/UUIDType.java @@ -7,28 +7,28 @@ public class UUIDType { - public static final String DEFAULT_VIEW = "default"; + public static final String DEFAULT_LOGIC = "default"; private String fieldName = null; private Integer allowWildcardAfter = null; - private final Map definedViews = new HashMap<>(); + private final Map queryLogics = new HashMap<>(); public UUIDType() {} - public UUIDType(String field, String view, Integer allowWildcardAfter) { + public UUIDType(String field, String queryLogic, Integer allowWildcardAfter) { this.fieldName = field; this.allowWildcardAfter = allowWildcardAfter; - this.definedViews.put(DEFAULT_VIEW, view); + this.queryLogics.put(DEFAULT_LOGIC, queryLogic); } - public UUIDType(String field, Map views, Integer allowWildcardAfter) { + public UUIDType(String field, Map queryLogics, Integer allowWildcardAfter) { this.fieldName = field; this.allowWildcardAfter = allowWildcardAfter; - this.definedViews.putAll(views); + this.queryLogics.putAll(queryLogics); } public Integer getAllowWildcardAfter() { @@ -47,26 +47,26 @@ public void setFieldName(String fieldName) { this.fieldName = fieldName; } - public String getDefinedView(String context) { + public String getQueryLogic(String context) { if (StringUtils.isEmpty(context)) { - context = DEFAULT_VIEW; + context = DEFAULT_LOGIC; } - return getDefinedViews().get(context); + return getQueryLogics().get(context); } - public Map getDefinedViews() { - return definedViews; + public Map getQueryLogics() { + return queryLogics; } - public void setDefinedViews(Map views) { - this.definedViews.clear(); - this.definedViews.putAll(views); + public void setQueryLogics(Map queryLogics) { + this.queryLogics.clear(); + this.queryLogics.putAll(queryLogics); } - public void setDefinedView(String context, String view) { + public void setQueryLogic(String context, String queryLogic) { if (StringUtils.isEmpty(context)) { - context = DEFAULT_VIEW; + context = DEFAULT_LOGIC; } - getDefinedViews().put(context, view); + getQueryLogics().put(context, queryLogic); } } diff --git a/warehouse/core/src/main/java/datawave/util/OperationEvaluator.java b/warehouse/core/src/main/java/datawave/util/OperationEvaluator.java index 4cb164c8e5c..5787bdf15f9 100644 --- a/warehouse/core/src/main/java/datawave/util/OperationEvaluator.java +++ b/warehouse/core/src/main/java/datawave/util/OperationEvaluator.java @@ -414,7 +414,7 @@ public static boolean compare(Date left, Date right, String operator) { public static > boolean compare(T left, T right, String operator) { Preconditions.checkArgument(!StringUtils.isBlank(operator), "operator must not be blank"); operator = CharMatcher.whitespace().removeFrom(operator); - switch (CharMatcher.whitespace().removeFrom(operator)) { + switch (operator) { case EQUALS: case DOUBLE_EQUALS: return left == right || left.compareTo(right) == 0; @@ -460,7 +460,7 @@ public static boolean compare(T left, T right, String operator, Comparator gov.nsa.datawave datawave-warehouse-parent - 6.14.0-SNAPSHOT + 7.0.0-SNAPSHOT datawave-data-dictionary-core jar ${project.artifactId} - - gov.nsa.datawave - datawave-query-core - gov.nsa.datawave.microservice dictionary-api + + gov.nsa.datawave.webservices + datawave-ws-common + + + javax.enterprise + cdi-api + diff --git a/warehouse/query-core/src/main/java/datawave/webservice/datadictionary/DataDictionaryType.java b/warehouse/data-dictionary-core/src/main/java/datawave/webservice/datadictionary/DataDictionaryType.java similarity index 100% rename from warehouse/query-core/src/main/java/datawave/webservice/datadictionary/DataDictionaryType.java rename to warehouse/data-dictionary-core/src/main/java/datawave/webservice/datadictionary/DataDictionaryType.java diff --git a/warehouse/query-core/src/main/java/datawave/webservice/datadictionary/RemoteDataDictionary.java b/warehouse/data-dictionary-core/src/main/java/datawave/webservice/datadictionary/RemoteDataDictionary.java similarity index 100% rename from warehouse/query-core/src/main/java/datawave/webservice/datadictionary/RemoteDataDictionary.java rename to warehouse/data-dictionary-core/src/main/java/datawave/webservice/datadictionary/RemoteDataDictionary.java diff --git a/warehouse/edge-dictionary-core/pom.xml b/warehouse/edge-dictionary-core/pom.xml index 8426ef0f6b6..987ef739b67 100644 --- a/warehouse/edge-dictionary-core/pom.xml +++ b/warehouse/edge-dictionary-core/pom.xml @@ -4,20 +4,24 @@ gov.nsa.datawave datawave-warehouse-parent - 6.14.0-SNAPSHOT + 7.0.0-SNAPSHOT datawave-edge-dictionary-core jar ${project.artifactId} - - gov.nsa.datawave - datawave-query-core - gov.nsa.datawave.microservice dictionary-api + + gov.nsa.datawave.webservices + datawave-ws-common + + + javax.enterprise + cdi-api + diff --git a/warehouse/edge-dictionary-core/src/main/java/datawave/webservice/edgedictionary/EdgeDictionaryProviderImpl.java b/warehouse/edge-dictionary-core/src/main/java/datawave/webservice/edgedictionary/EdgeDictionaryProviderImpl.java new file mode 100644 index 00000000000..9d4d7f9c17e --- /dev/null +++ b/warehouse/edge-dictionary-core/src/main/java/datawave/webservice/edgedictionary/EdgeDictionaryProviderImpl.java @@ -0,0 +1,18 @@ +package datawave.webservice.edgedictionary; + +import javax.inject.Inject; + +import datawave.core.common.edgedictionary.EdgeDictionaryProvider; +import datawave.microservice.query.Query; +import datawave.webservice.dictionary.edge.EdgeDictionaryBase; +import datawave.webservice.dictionary.edge.MetadataBase; + +public class EdgeDictionaryProviderImpl implements EdgeDictionaryProvider { + @Inject + private RemoteEdgeDictionary remoteEdgeDictionary; + + @Override + public EdgeDictionaryBase> getEdgeDictionary(Query settings, String metadataTableName) { + return remoteEdgeDictionary.getEdgeDictionary(settings, metadataTableName); + } +} diff --git a/warehouse/query-core/src/main/java/datawave/webservice/edgedictionary/EdgeDictionaryType.java b/warehouse/edge-dictionary-core/src/main/java/datawave/webservice/edgedictionary/EdgeDictionaryType.java similarity index 100% rename from warehouse/query-core/src/main/java/datawave/webservice/edgedictionary/EdgeDictionaryType.java rename to warehouse/edge-dictionary-core/src/main/java/datawave/webservice/edgedictionary/EdgeDictionaryType.java diff --git a/warehouse/query-core/src/main/java/datawave/webservice/edgedictionary/RemoteEdgeDictionary.java b/warehouse/edge-dictionary-core/src/main/java/datawave/webservice/edgedictionary/RemoteEdgeDictionary.java similarity index 95% rename from warehouse/query-core/src/main/java/datawave/webservice/edgedictionary/RemoteEdgeDictionary.java rename to warehouse/edge-dictionary-core/src/main/java/datawave/webservice/edgedictionary/RemoteEdgeDictionary.java index 0cb7be817c6..80beb8db209 100644 --- a/warehouse/query-core/src/main/java/datawave/webservice/edgedictionary/RemoteEdgeDictionary.java +++ b/warehouse/edge-dictionary-core/src/main/java/datawave/webservice/edgedictionary/RemoteEdgeDictionary.java @@ -13,6 +13,7 @@ import com.fasterxml.jackson.databind.ObjectReader; import datawave.configuration.RefreshableScope; +import datawave.microservice.query.Query; import datawave.security.authorization.DatawavePrincipal; import datawave.security.system.CallerPrincipal; import datawave.webservice.common.remote.RemoteHttpService; @@ -94,17 +95,17 @@ public void init() { edgeDictReader = objectMapper.readerFor(edgeDictionaryType); } - public EdgeDictionaryBase> getEdgeDictionary(String metadataTableName, String auths) { + public EdgeDictionaryBase> getEdgeDictionary(Query settings, String metadataTableName) { final String bearerHeader = "Bearer " + jwtTokenHandler.createTokenFromUsers(callerPrincipal.getName(), callerPrincipal.getProxiedUsers()); // @formatter:off return executeGetMethodWithRuntimeException("", uriBuilder -> { uriBuilder.addParameter("metadataTableName", metadataTableName); - uriBuilder.addParameter("auths", auths); + uriBuilder.addParameter("auths", settings.getQueryAuthorizations()); }, httpGet -> httpGet.setHeader("Authorization", bearerHeader), entity -> edgeDictReader.readValue(entity.getContent()), - () -> "getEdgeDictionary [" + metadataTableName + ", " + auths + "]"); + () -> "getEdgeDictionary [" + metadataTableName + ", " + settings.getQueryAuthorizations() + "]"); // @formatter:on } diff --git a/warehouse/edge-model-configuration-core/pom.xml b/warehouse/edge-model-configuration-core/pom.xml index 71c6025f73d..a200c084250 100644 --- a/warehouse/edge-model-configuration-core/pom.xml +++ b/warehouse/edge-model-configuration-core/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.14.0-SNAPSHOT + 7.0.0-SNAPSHOT datawave-edge-model-configuration-core jar diff --git a/warehouse/index-stats/pom.xml b/warehouse/index-stats/pom.xml index f13553a106b..89d2237653e 100644 --- a/warehouse/index-stats/pom.xml +++ b/warehouse/index-stats/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.14.0-SNAPSHOT + 7.0.0-SNAPSHOT datawave-index-stats jar diff --git a/warehouse/ingest-configuration/pom.xml b/warehouse/ingest-configuration/pom.xml index a3f27bef329..2a487fe2347 100644 --- a/warehouse/ingest-configuration/pom.xml +++ b/warehouse/ingest-configuration/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.14.0-SNAPSHOT + 7.0.0-SNAPSHOT datawave-ingest-configuration diff --git a/warehouse/ingest-configuration/src/main/resources/config/myjson-ingest-config.xml b/warehouse/ingest-configuration/src/main/resources/config/myjson-ingest-config.xml index f65c721b222..adf227a33b0 100644 --- a/warehouse/ingest-configuration/src/main/resources/config/myjson-ingest-config.xml +++ b/warehouse/ingest-configuration/src/main/resources/config/myjson-ingest-config.xml @@ -208,11 +208,11 @@ ColumnVisibility expression to be applied to the "SUMMARY" field - - - - - + + myjson.data.category.date + PREMIERED + Known date field to be used, if found, for the shard row id. Otherwise, current date will be used + myjson.data.category.date.formats diff --git a/warehouse/ingest-core/pom.xml b/warehouse/ingest-core/pom.xml index 66583c54c92..dd133501dce 100644 --- a/warehouse/ingest-core/pom.xml +++ b/warehouse/ingest-core/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.14.0-SNAPSHOT + 7.0.0-SNAPSHOT datawave-ingest-core jar @@ -38,8 +38,8 @@ ${project.version} - gov.nsa.datawave.webservices - datawave-ws-common-util + gov.nsa.datawave.core + datawave-core-common-util ${project.version} @@ -75,6 +75,16 @@ org.apache.hadoop hadoop-mapreduce-client-common + + + org.eclipse.jetty + * + + + org.eclipse.jetty.websocket + * + + org.apache.hadoop diff --git a/warehouse/ingest-core/src/main/java/datawave/ingest/data/TypeRegistry.java b/warehouse/ingest-core/src/main/java/datawave/ingest/data/TypeRegistry.java index 6cb450fe66a..98537561123 100644 --- a/warehouse/ingest-core/src/main/java/datawave/ingest/data/TypeRegistry.java +++ b/warehouse/ingest-core/src/main/java/datawave/ingest/data/TypeRegistry.java @@ -17,6 +17,7 @@ import com.google.common.collect.HashMultimap; import com.google.common.collect.Multimap; +import datawave.core.common.logging.ThreadConfigurableLogger; import datawave.ingest.data.config.ConfigurationHelper; import datawave.ingest.data.config.DataTypeOverrideHelper; import datawave.ingest.data.config.filter.KeyValueFilter; @@ -24,7 +25,6 @@ import datawave.ingest.mapreduce.handler.DataTypeHandler; import datawave.marking.MarkingFunctions; import datawave.util.StringUtils; -import datawave.webservice.common.logging.ThreadConfigurableLogger; public class TypeRegistry extends HashMap { diff --git a/warehouse/ingest-core/src/main/java/datawave/ingest/data/config/ingest/BaseIngestHelper.java b/warehouse/ingest-core/src/main/java/datawave/ingest/data/config/ingest/BaseIngestHelper.java index 8784be6ecd9..4dcd86e58e7 100644 --- a/warehouse/ingest-core/src/main/java/datawave/ingest/data/config/ingest/BaseIngestHelper.java +++ b/warehouse/ingest-core/src/main/java/datawave/ingest/data/config/ingest/BaseIngestHelper.java @@ -25,6 +25,7 @@ import com.google.common.collect.Sets; import com.google.common.collect.TreeMultimap; +import datawave.core.common.logging.ThreadConfigurableLogger; import datawave.data.normalizer.NormalizationException; import datawave.data.type.NoOpType; import datawave.data.type.OneToManyNormalizerType; @@ -40,7 +41,6 @@ import datawave.ingest.data.config.NormalizedFieldAndValue; import datawave.ingest.data.config.XMLFieldConfigHelper; import datawave.util.StringUtils; -import datawave.webservice.common.logging.ThreadConfigurableLogger; /** * Specialization of the Helper type that validates the configuration for Ingest purposes. These helper classes also have the logic to parse the field names and diff --git a/warehouse/ingest-core/src/main/java/datawave/ingest/data/config/ingest/ContentBaseIngestHelper.java b/warehouse/ingest-core/src/main/java/datawave/ingest/data/config/ingest/ContentBaseIngestHelper.java index 1f59a330696..c85b66638ae 100644 --- a/warehouse/ingest-core/src/main/java/datawave/ingest/data/config/ingest/ContentBaseIngestHelper.java +++ b/warehouse/ingest-core/src/main/java/datawave/ingest/data/config/ingest/ContentBaseIngestHelper.java @@ -98,7 +98,6 @@ public void setup(Configuration config) throws IllegalArgumentException { this.saveRawDataOption = (null != config.get(getType().typeName() + SAVE_RAW_DATA_AS_DOCUMENT)) ? Boolean.parseBoolean(config.get(getType().typeName() + SAVE_RAW_DATA_AS_DOCUMENT)) : saveRawDataOption; - // If we're saving the raw data in the document column, we need a view name. // retrieve this view name if it has been specified, else use default if we // are saving the raw record in the doc column. diff --git a/warehouse/ingest-core/src/main/java/datawave/ingest/mapreduce/handler/dateindex/DateIndexDataTypeHandler.java b/warehouse/ingest-core/src/main/java/datawave/ingest/mapreduce/handler/dateindex/DateIndexDataTypeHandler.java index 964634a4282..c4c3da55537 100644 --- a/warehouse/ingest-core/src/main/java/datawave/ingest/mapreduce/handler/dateindex/DateIndexDataTypeHandler.java +++ b/warehouse/ingest-core/src/main/java/datawave/ingest/mapreduce/handler/dateindex/DateIndexDataTypeHandler.java @@ -25,6 +25,7 @@ import com.google.common.collect.HashMultimap; import com.google.common.collect.Multimap; +import datawave.core.common.logging.ThreadConfigurableLogger; import datawave.data.normalizer.DateNormalizer; import datawave.ingest.data.RawRecordContainer; import datawave.ingest.data.Type; @@ -38,7 +39,6 @@ import datawave.ingest.table.aggregator.DateIndexDateAggregator; import datawave.marking.MarkingFunctions; import datawave.util.StringUtils; -import datawave.webservice.common.logging.ThreadConfigurableLogger; /** *

    diff --git a/warehouse/ingest-core/src/main/java/datawave/ingest/mapreduce/handler/edge/ProtobufEdgeDataTypeHandler.java b/warehouse/ingest-core/src/main/java/datawave/ingest/mapreduce/handler/edge/ProtobufEdgeDataTypeHandler.java index 1c4740c3e0c..76b41fae926 100644 --- a/warehouse/ingest-core/src/main/java/datawave/ingest/mapreduce/handler/edge/ProtobufEdgeDataTypeHandler.java +++ b/warehouse/ingest-core/src/main/java/datawave/ingest/mapreduce/handler/edge/ProtobufEdgeDataTypeHandler.java @@ -916,7 +916,6 @@ protected void registerEventMetadata(Map> eventMetadataRegistr // add to the eventMetadataRegistry map Key baseKey = createMetadataEdgeKey(edgeValue, edgeValue.getSource(), edgeValue.getSource().getIndexedFieldValue(), edgeValue.getSink(), edgeValue.getSink().getIndexedFieldValue(), this.getVisibility(edgeValue)); - Key fwdMetaKey = EdgeKey.getMetadataKey(baseKey); Key revMetaKey = EdgeKey.getMetadataKey(EdgeKey.swapSourceSink(EdgeKey.decode(baseKey)).encode()); @@ -1020,7 +1019,6 @@ protected String getEdgeDefGroup(String groupedFieldName) { protected long writeEdges(EdgeDataBundle value, TaskInputOutputContext context, ContextWriter contextWriter, boolean validActivtyDate, boolean sameActivityDate, long eventDate) throws IOException, InterruptedException { - long edgesCreated = 0; if (eventDate < newFormatStartDate) { edgesCreated += writeEdges(value, context, contextWriter, EdgeKey.DATE_TYPE.OLD_EVENT); diff --git a/warehouse/ingest-core/src/main/java/datawave/ingest/mapreduce/handler/error/ErrorShardedDataTypeHandler.java b/warehouse/ingest-core/src/main/java/datawave/ingest/mapreduce/handler/error/ErrorShardedDataTypeHandler.java index 53fda2881a3..00fa4a0c1a1 100644 --- a/warehouse/ingest-core/src/main/java/datawave/ingest/mapreduce/handler/error/ErrorShardedDataTypeHandler.java +++ b/warehouse/ingest-core/src/main/java/datawave/ingest/mapreduce/handler/error/ErrorShardedDataTypeHandler.java @@ -102,7 +102,6 @@ */ public class ErrorShardedDataTypeHandler extends AbstractColumnBasedHandler implements ExtendedDataTypeHandler { - private static final Logger log = Logger.getLogger(ErrorShardedDataTypeHandler.class); public static final String ERROR_PROP_PREFIX = "error."; diff --git a/warehouse/ingest-core/src/main/java/datawave/ingest/mapreduce/handler/shard/AbstractColumnBasedHandler.java b/warehouse/ingest-core/src/main/java/datawave/ingest/mapreduce/handler/shard/AbstractColumnBasedHandler.java index b5b3d7fa0cf..f7c7fd6e49b 100644 --- a/warehouse/ingest-core/src/main/java/datawave/ingest/mapreduce/handler/shard/AbstractColumnBasedHandler.java +++ b/warehouse/ingest-core/src/main/java/datawave/ingest/mapreduce/handler/shard/AbstractColumnBasedHandler.java @@ -9,13 +9,13 @@ import com.google.common.collect.HashMultimap; import com.google.common.collect.Multimap; +import datawave.core.common.logging.ThreadConfigurableLogger; import datawave.ingest.data.RawRecordContainer; import datawave.ingest.data.Type; import datawave.ingest.data.TypeRegistry; import datawave.ingest.data.config.DataTypeHelper; import datawave.ingest.data.config.NormalizedContentInterface; import datawave.ingest.data.config.ingest.IngestHelperInterface; -import datawave.webservice.common.logging.ThreadConfigurableLogger; public class AbstractColumnBasedHandler extends ShardedDataTypeHandler { diff --git a/warehouse/ingest-core/src/main/java/datawave/ingest/mapreduce/handler/shard/ShardedDataTypeHandler.java b/warehouse/ingest-core/src/main/java/datawave/ingest/mapreduce/handler/shard/ShardedDataTypeHandler.java index 35a28cc004a..73798508b4b 100644 --- a/warehouse/ingest-core/src/main/java/datawave/ingest/mapreduce/handler/shard/ShardedDataTypeHandler.java +++ b/warehouse/ingest-core/src/main/java/datawave/ingest/mapreduce/handler/shard/ShardedDataTypeHandler.java @@ -27,6 +27,7 @@ import com.google.common.collect.Multimap; import com.google.common.hash.BloomFilter; +import datawave.core.common.logging.ThreadConfigurableLogger; import datawave.ingest.config.IngestConfiguration; import datawave.ingest.config.IngestConfigurationFactory; import datawave.ingest.data.RawRecordContainer; @@ -47,7 +48,6 @@ import datawave.marking.MarkingFunctions; import datawave.query.model.Direction; import datawave.util.TextUtil; -import datawave.webservice.common.logging.ThreadConfigurableLogger; /** *

    diff --git a/warehouse/ingest-core/src/main/java/datawave/ingest/mapreduce/handler/summary/CoreSummaryDataTypeHandler.java b/warehouse/ingest-core/src/main/java/datawave/ingest/mapreduce/handler/summary/CoreSummaryDataTypeHandler.java index 4795f153d35..94894ba67b5 100644 --- a/warehouse/ingest-core/src/main/java/datawave/ingest/mapreduce/handler/summary/CoreSummaryDataTypeHandler.java +++ b/warehouse/ingest-core/src/main/java/datawave/ingest/mapreduce/handler/summary/CoreSummaryDataTypeHandler.java @@ -9,6 +9,7 @@ import com.google.common.collect.Multimap; +import datawave.core.common.logging.ThreadConfigurableLogger; import datawave.ingest.data.RawRecordContainer; import datawave.ingest.data.Type; import datawave.ingest.data.TypeRegistry; @@ -17,7 +18,6 @@ import datawave.ingest.mapreduce.handler.DataTypeHandler; import datawave.ingest.mapreduce.job.BulkIngestKey; import datawave.ingest.metadata.RawRecordMetadata; -import datawave.webservice.common.logging.ThreadConfigurableLogger; public abstract class CoreSummaryDataTypeHandler implements DataTypeHandler { private static final Logger log = ThreadConfigurableLogger.getLogger(CoreSummaryDataTypeHandler.class); diff --git a/warehouse/ingest-core/src/main/java/datawave/ingest/mapreduce/handler/summary/MetricsSummaryDataTypeHandler.java b/warehouse/ingest-core/src/main/java/datawave/ingest/mapreduce/handler/summary/MetricsSummaryDataTypeHandler.java index 0d6f2a3abb6..b0d5433abc5 100644 --- a/warehouse/ingest-core/src/main/java/datawave/ingest/mapreduce/handler/summary/MetricsSummaryDataTypeHandler.java +++ b/warehouse/ingest-core/src/main/java/datawave/ingest/mapreduce/handler/summary/MetricsSummaryDataTypeHandler.java @@ -23,12 +23,12 @@ import com.google.common.collect.Multimap; import com.google.common.collect.Sets; +import datawave.core.common.logging.ThreadConfigurableLogger; import datawave.ingest.data.RawRecordContainer; import datawave.ingest.data.config.NormalizedContentInterface; import datawave.ingest.data.config.ingest.IngestHelperInterface; import datawave.ingest.mapreduce.job.BulkIngestKey; import datawave.util.StringUtils; -import datawave.webservice.common.logging.ThreadConfigurableLogger; /** * Creates MetricsSummary entries. diff --git a/warehouse/ingest-core/src/main/java/datawave/ingest/mapreduce/handler/summary/MetricsSummaryFormatter.java b/warehouse/ingest-core/src/main/java/datawave/ingest/mapreduce/handler/summary/MetricsSummaryFormatter.java index 6db4c5483b6..6a9defbb842 100644 --- a/warehouse/ingest-core/src/main/java/datawave/ingest/mapreduce/handler/summary/MetricsSummaryFormatter.java +++ b/warehouse/ingest-core/src/main/java/datawave/ingest/mapreduce/handler/summary/MetricsSummaryFormatter.java @@ -14,8 +14,8 @@ import com.google.common.collect.Iterables; import com.google.common.collect.Multimap; +import datawave.core.common.logging.ThreadConfigurableLogger; import datawave.ingest.data.config.NormalizedContentInterface; -import datawave.webservice.common.logging.ThreadConfigurableLogger; public class MetricsSummaryFormatter { @@ -130,6 +130,5 @@ Iterable> getMatches(final Matcher matc // check to see if entry key matches the supplied matcher return matcher.reset(input.getKey()).matches(); }); - } } diff --git a/warehouse/ingest-core/src/main/java/datawave/ingest/mapreduce/handler/summary/SummaryDataTypeHandler.java b/warehouse/ingest-core/src/main/java/datawave/ingest/mapreduce/handler/summary/SummaryDataTypeHandler.java index 75e1dd42af4..f4f3fbddec5 100644 --- a/warehouse/ingest-core/src/main/java/datawave/ingest/mapreduce/handler/summary/SummaryDataTypeHandler.java +++ b/warehouse/ingest-core/src/main/java/datawave/ingest/mapreduce/handler/summary/SummaryDataTypeHandler.java @@ -9,6 +9,7 @@ import com.google.common.collect.Multimap; +import datawave.core.common.logging.ThreadConfigurableLogger; import datawave.ingest.data.RawRecordContainer; import datawave.ingest.data.Type; import datawave.ingest.data.TypeRegistry; @@ -17,7 +18,6 @@ import datawave.ingest.mapreduce.handler.DataTypeHandler; import datawave.ingest.mapreduce.job.BulkIngestKey; import datawave.ingest.metadata.RawRecordMetadata; -import datawave.webservice.common.logging.ThreadConfigurableLogger; public abstract class SummaryDataTypeHandler implements DataTypeHandler { private static final Logger log = ThreadConfigurableLogger.getLogger(SummaryDataTypeHandler.class); diff --git a/warehouse/ingest-core/src/main/java/datawave/ingest/mapreduce/handler/tokenize/ContentIndexingColumnBasedHandler.java b/warehouse/ingest-core/src/main/java/datawave/ingest/mapreduce/handler/tokenize/ContentIndexingColumnBasedHandler.java index 01cd7fd0239..2c6e2a5432b 100644 --- a/warehouse/ingest-core/src/main/java/datawave/ingest/mapreduce/handler/tokenize/ContentIndexingColumnBasedHandler.java +++ b/warehouse/ingest-core/src/main/java/datawave/ingest/mapreduce/handler/tokenize/ContentIndexingColumnBasedHandler.java @@ -599,7 +599,6 @@ protected void createTermFrequencyIndex(RawRecordContainer event, Multimap tableNames, TableOperations tops, Configuration conf, Logger log) throws AccumuloSecurityException, AccumuloException, TableNotFoundException { - Map tableConfigs = setupTableConfigHelpers(log, conf, tableNames); for (String table : tableNames) { diff --git a/warehouse/ingest-core/src/main/java/datawave/ingest/mapreduce/partition/MultiTableRangePartitioner.java b/warehouse/ingest-core/src/main/java/datawave/ingest/mapreduce/partition/MultiTableRangePartitioner.java index cdc7d6fbf91..3a287a97bba 100644 --- a/warehouse/ingest-core/src/main/java/datawave/ingest/mapreduce/partition/MultiTableRangePartitioner.java +++ b/warehouse/ingest-core/src/main/java/datawave/ingest/mapreduce/partition/MultiTableRangePartitioner.java @@ -76,7 +76,6 @@ private void readCacheFilesIfNecessary() { log.error("Failed to read splits in MultiTableRangePartitioner: cache files: " + Arrays.toString(localCacheFiles), e); throw new RuntimeException("Failed to read splits in MultiTableRangePartitioner, fatal error. cache files: " + Arrays.toString(localCacheFiles), e); - } cacheFilesRead = true; } diff --git a/warehouse/ingest-core/src/main/java/datawave/ingest/table/config/ErrorShardTableConfigHelper.java b/warehouse/ingest-core/src/main/java/datawave/ingest/table/config/ErrorShardTableConfigHelper.java index 687ee496aad..bcefe87bc5d 100644 --- a/warehouse/ingest-core/src/main/java/datawave/ingest/table/config/ErrorShardTableConfigHelper.java +++ b/warehouse/ingest-core/src/main/java/datawave/ingest/table/config/ErrorShardTableConfigHelper.java @@ -58,7 +58,6 @@ public void setup(String tableName, Configuration config, Logger log) throws Ill localityGroupsConf = conf.get(shardDictionaryTableName + LOCALITY_GROUPS, ShardedDataTypeHandler.SHARD_DINDX_FLABEL_LOCALITY_NAME + ':' + ShardedDataTypeHandler.SHARD_DINDX_FLABEL + ',' + ShardedDataTypeHandler.SHARD_DINDX_RLABEL_LOCALITY_NAME + ':' + ShardedDataTypeHandler.SHARD_DINDX_RLABEL); - for (String localityGroupDefConf : StringUtils.split(localityGroupsConf)) { String[] localityGroupDef = StringUtils.split(localityGroupDefConf, '\\', ':'); Set families = localityGroups.get(localityGroupDef[0]); diff --git a/warehouse/ingest-core/src/main/java/datawave/ingest/table/config/ShardTableConfigHelper.java b/warehouse/ingest-core/src/main/java/datawave/ingest/table/config/ShardTableConfigHelper.java index 929f954dadd..2014e3d8d66 100644 --- a/warehouse/ingest-core/src/main/java/datawave/ingest/table/config/ShardTableConfigHelper.java +++ b/warehouse/ingest-core/src/main/java/datawave/ingest/table/config/ShardTableConfigHelper.java @@ -113,7 +113,6 @@ public void setup(String tableName, Configuration config, Logger log) throws Ill localityGroupsConf = conf.get(shardDictionaryTableName + LOCALITY_GROUPS, ShardedDataTypeHandler.SHARD_DINDX_FLABEL_LOCALITY_NAME + ':' + ShardedDataTypeHandler.SHARD_DINDX_FLABEL + ',' + ShardedDataTypeHandler.SHARD_DINDX_RLABEL_LOCALITY_NAME + ':' + ShardedDataTypeHandler.SHARD_DINDX_RLABEL); - for (String localityGroupDefConf : StringUtils.split(localityGroupsConf)) { String[] localityGroupDef = StringUtils.split(localityGroupDefConf, '\\', ':'); Set families = localityGroups.get(localityGroupDef[0]); @@ -171,7 +170,6 @@ protected void configureShardTable(TableOperations tops) throws AccumuloExceptio // Set a text index aggregator on the "tf" (Term Frequency) column family CombinerConfiguration tfConf = new CombinerConfiguration(new Column("tf"), new IteratorSetting(10, "TF", datawave.ingest.table.aggregator.TextIndexAggregator.class.getName())); - setAggregatorConfigurationIfNecessary(tableName, Collections.singletonList(tfConf), tops, log); if (markingsSetupIteratorEnabled) { diff --git a/warehouse/ingest-core/src/main/java/datawave/policy/ExampleIngestPolicyEnforcer.java b/warehouse/ingest-core/src/main/java/datawave/policy/ExampleIngestPolicyEnforcer.java index ef76221bcbe..c1942d3d5be 100644 --- a/warehouse/ingest-core/src/main/java/datawave/policy/ExampleIngestPolicyEnforcer.java +++ b/warehouse/ingest-core/src/main/java/datawave/policy/ExampleIngestPolicyEnforcer.java @@ -2,9 +2,9 @@ import org.apache.log4j.Logger; +import datawave.core.common.logging.ThreadConfigurableLogger; import datawave.ingest.data.RawDataErrorNames; import datawave.ingest.data.RawRecordContainer; -import datawave.webservice.common.logging.ThreadConfigurableLogger; /** * The purpose of this class is to demonstrate a likely usage pattern for an IngestPolicyEnforcer implementation diff --git a/warehouse/ingest-core/src/test/java/datawave/ingest/mapreduce/job/BulkIngestMapFileLoaderTest.java b/warehouse/ingest-core/src/test/java/datawave/ingest/mapreduce/job/BulkIngestMapFileLoaderTest.java index 8ac3a57093d..916d87f7a5d 100644 --- a/warehouse/ingest-core/src/test/java/datawave/ingest/mapreduce/job/BulkIngestMapFileLoaderTest.java +++ b/warehouse/ingest-core/src/test/java/datawave/ingest/mapreduce/job/BulkIngestMapFileLoaderTest.java @@ -805,7 +805,6 @@ public void testMainWithBadMajCDelay() throws IOException, InterruptedException Assert.assertTrue("BulkIngestMapLoader#main failed to generate the expected error message", processOutputContains(stdOut, "-majcDelay must be followed by the minimum number of ms to elapse between bringing map files online")); - } finally { BulkIngestMapFileLoaderTest.logger.info("testMainWithBadMajCDelay completed."); @@ -845,7 +844,6 @@ public void testMainWithMissingMajCDelay() throws IOException, InterruptedExcept Assert.assertTrue("BulkIngestMapLoader#main failed to generate the expected error message", processOutputContains(stdOut, "-majcDelay must be followed by the minimum number of ms to elapse between bringing map files online")); - } finally { BulkIngestMapFileLoaderTest.logger.info("testMainWithMissingMajCDelay completed."); diff --git a/warehouse/ingest-core/src/test/java/datawave/ingest/util/NGramTokenizationStrategyTest.java b/warehouse/ingest-core/src/test/java/datawave/ingest/util/NGramTokenizationStrategyTest.java index a3bd5791492..07d8ed864f4 100644 --- a/warehouse/ingest-core/src/test/java/datawave/ingest/util/NGramTokenizationStrategyTest.java +++ b/warehouse/ingest-core/src/test/java/datawave/ingest/util/NGramTokenizationStrategyTest.java @@ -483,7 +483,6 @@ public void testTokenize_WeightedLengthPruningWithMajorityOfOverweightValues() t int expectedCount = BloomFilterUtil.predictNGramCount(ncis.lastElement().getIndexedFieldValue()); assertTrue("Should have pruned the n-grams tokenized for field " + fieldName + " but got " + result3.get(fieldName), (null != result3.get(fieldName)) && (result3.get(fieldName) < expectedCount)); - } @Test diff --git a/warehouse/ingest-csv/pom.xml b/warehouse/ingest-csv/pom.xml index 8af44f0d50d..72bb5d81424 100644 --- a/warehouse/ingest-csv/pom.xml +++ b/warehouse/ingest-csv/pom.xml @@ -4,12 +4,17 @@ gov.nsa.datawave datawave-warehouse-parent - 6.14.0-SNAPSHOT + 7.0.0-SNAPSHOT datawave-ingest-csv jar ${project.artifactId} + + commons-net + commons-net + 3.6 + gov.nsa.datawave datawave-core @@ -37,6 +42,16 @@ org.apache.hadoop hadoop-mapreduce-client-common + + + org.eclipse.jetty + * + + + org.eclipse.jetty.websocket + * + + org.apache.hadoop diff --git a/warehouse/ingest-json/pom.xml b/warehouse/ingest-json/pom.xml index 23fa4da05fb..679101668ca 100644 --- a/warehouse/ingest-json/pom.xml +++ b/warehouse/ingest-json/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.14.0-SNAPSHOT + 7.0.0-SNAPSHOT datawave-ingest-json jar diff --git a/warehouse/ingest-json/src/test/resources/config/ingest/tvmaze-ingest-config.xml b/warehouse/ingest-json/src/test/resources/config/ingest/tvmaze-ingest-config.xml index a1a8f7a97f8..0afcb093fc5 100644 --- a/warehouse/ingest-json/src/test/resources/config/ingest/tvmaze-ingest-config.xml +++ b/warehouse/ingest-json/src/test/resources/config/ingest/tvmaze-ingest-config.xml @@ -228,11 +228,11 @@ ColumnVisibility expression to be applied to the "SUMMARY" field - - - - - + + myjson.data.category.date + PREMIERED + Known date field to be used, if found, for the shard row id. Otherwise, current date will be used + myjson.data.category.date.formats diff --git a/warehouse/ingest-nyctlc/pom.xml b/warehouse/ingest-nyctlc/pom.xml index 0d8f29f7d60..b15a93b6bd2 100644 --- a/warehouse/ingest-nyctlc/pom.xml +++ b/warehouse/ingest-nyctlc/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.14.0-SNAPSHOT + 7.0.0-SNAPSHOT datawave-ingest-nyctlc jar diff --git a/warehouse/ingest-scripts/pom.xml b/warehouse/ingest-scripts/pom.xml index 319560ee7f7..5a6319a60b0 100644 --- a/warehouse/ingest-scripts/pom.xml +++ b/warehouse/ingest-scripts/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.14.0-SNAPSHOT + 7.0.0-SNAPSHOT datawave-ingest-scripts ${project.artifactId} diff --git a/warehouse/ingest-scripts/src/main/resources/bin/ingest/findJars.sh b/warehouse/ingest-scripts/src/main/resources/bin/ingest/findJars.sh index 562a3fb83bb..6afb4f20f71 100644 --- a/warehouse/ingest-scripts/src/main/resources/bin/ingest/findJars.sh +++ b/warehouse/ingest-scripts/src/main/resources/bin/ingest/findJars.sh @@ -40,13 +40,17 @@ DATAWAVE_ACCUMULO_EXTENSIONS_JAR=$(findJar datawave-accumulo-extensions) DATAWAVE_METRICS_CORE_JAR=$(findJar datawave-metrics-core) DATAWAVE_METADATA_UTILS_JAR=$(findJar metadata-utils) DATAWAVE_TYPE_UTILS_JAR=$(findJar type-utils) -COMMON_UTIL_JAR=$(findWebserviceJar datawave-ws-common-util) -COMMON_JAR=$(findWebserviceJar datawave-ws-common) +DATAWAVE_CORE_COMMON_UTIL_JAR=$(findJar datawave-core-common-util) +DATAWAVE_WS_COMMON_UTIL_JAR=$(findWebserviceJar datawave-ws-common-util) +DATAWAVE_CORE_COMMON_JAR=$(findJar datawave-core-common) +DATAWAVE_WS_COMMON_JAR=$(findWebserviceJar datawave-ws-common) +DATAWAVE_CORE_CONNECTION_POOL_JAR=$(findJar datawave-core-connection-pool) INMEMORY_ACCUMULO_JAR=$(findJar datawave-in-memory-accumulo) DATAWAVE_ACCUMULO_UTILS_JAR=$(findJar accumulo-utils) DATAWAVE_BASE_REST_RESPONSES_JAR=$(findJar base-rest-responses) DATAWAVE_COMMON_UTILS_JAR=$(findJar common-utils) DATAWAVE_CORE_JAR=$(findJar datawave-core) +DATAWAVE_CORE_QUERY_JAR=$(findJar datawave-core-query) DATAWAVE_WS_QUERY_JAR=$(findWebserviceJar datawave-ws-query) DATAWAVE_WS_CLIENT_JAR=$(findWebserviceJar datawave-ws-client) CURATOR_FRAMEWORK_JAR=$(findJar curator-framework) @@ -65,6 +69,7 @@ COMMONS_JCI_CORE_JAR=$(findJar commons-jci-core) COMMONS_JCI_FAM_JAR=$(findJar commons-jci-fam) CAFFEINE_JAR=$(findJar caffeine) GUAVA_JAR=$(findJar guava) +FAILURE_ACCESS_JAR=$(findJar failureaccess) PROTOBUF_JAR=$(findJar protobuf-java) SLF4J_JAR=$(findJar slf4j-api) LOG4J2_API_JAR=$(findJar log4j-api) diff --git a/warehouse/ingest-scripts/src/main/resources/bin/ingest/ingest-libs.sh b/warehouse/ingest-scripts/src/main/resources/bin/ingest/ingest-libs.sh index 3f4386b54fc..9cadcff87dc 100755 --- a/warehouse/ingest-scripts/src/main/resources/bin/ingest/ingest-libs.sh +++ b/warehouse/ingest-scripts/src/main/resources/bin/ingest/ingest-libs.sh @@ -8,7 +8,8 @@ # Jars # CLASSPATH=${CONF_DIR} -CLASSPATH=${CLASSPATH}:${COMMON_UTIL_JAR} +CLASSPATH=${CLASSPATH}:${DATAWAVE_CORE_COMMON_UTIL_JAR} +CLASSPATH=${CLASSPATH}:${DATAWAVE_WS_COMMON_UTIL_JAR} CLASSPATH=${CLASSPATH}:${COMMONS_JEXL_JAR} CLASSPATH=${CLASSPATH}:${DATAWAVE_CORE_JAR} CLASSPATH=${CLASSPATH}:${DATAWAVE_ACCUMULO_EXTENSIONS_JAR} @@ -44,6 +45,7 @@ CLASSPATH=${CLASSPATH}:${COMMONS_JCI_CORE_JAR} CLASSPATH=${CLASSPATH}:${COMMONS_JCI_FAM_JAR} CLASSPATH=${CLASSPATH}:${CAFFEINE_JAR} CLASSPATH=${CLASSPATH}:${GUAVA_JAR} +CLASSPATH=${CLASSPATH}:${FAILURE_ACCESS_JAR} CLASSPATH=${CLASSPATH}:${PROTOBUF_JAR} CLASSPATH=${CLASSPATH}:${SLF4J_JAR} CLASSPATH=${CLASSPATH}:${LOG4J2_API_JAR} @@ -97,11 +99,14 @@ CLASSPATH=${CLASSPATH}:${JAXB_IMPL_JAR} CLASSPATH=${CLASSPATH}:${JSON_SIMPLE} #for query +CLASSPATH=${CLASSPATH}:${DATAWAVE_CORE_QUERY_JAR} CLASSPATH=${CLASSPATH}:${DATAWAVE_WS_QUERY_JAR} CLASSPATH=${CLASSPATH}:${DATAWAVE_WS_CLIENT_JAR} CLASSPATH=${CLASSPATH}:${PROTOSTUFF_CORE_JAR} CLASSPATH=${CLASSPATH}:${PROTOSTUFF_API_JAR} -CLASSPATH=${CLASSPATH}:${COMMON_JAR} +CLASSPATH=${CLASSPATH}:${DATAWAVE_CORE_COMMON_JAR} +CLASSPATH=${CLASSPATH}:${DATAWAVE_WS_COMMON_JAR} +CLASSPATH=${CLASSPATH}:${DATAWAVE_CORE_CONNECTION_POOL_JAR} #required for edge ingest CLASSPATH=${CLASSPATH}:${EDGE_KEY_VERSION_CACHE_FILE} diff --git a/warehouse/ingest-ssdeep/pom.xml b/warehouse/ingest-ssdeep/pom.xml index 726e17492b0..3489e5d1f86 100644 --- a/warehouse/ingest-ssdeep/pom.xml +++ b/warehouse/ingest-ssdeep/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.14.0-SNAPSHOT + 7.0.0-SNAPSHOT datawave-ingest-ssdeep diff --git a/warehouse/ingest-wikipedia/pom.xml b/warehouse/ingest-wikipedia/pom.xml index 8fb4fdbbe98..a4b63d80249 100644 --- a/warehouse/ingest-wikipedia/pom.xml +++ b/warehouse/ingest-wikipedia/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.14.0-SNAPSHOT + 7.0.0-SNAPSHOT datawave-ingest-wikipedia jar diff --git a/warehouse/metrics-core/pom.xml b/warehouse/metrics-core/pom.xml index 0dc17601060..2b21d8c948d 100644 --- a/warehouse/metrics-core/pom.xml +++ b/warehouse/metrics-core/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.14.0-SNAPSHOT + 7.0.0-SNAPSHOT datawave-metrics-core jar diff --git a/warehouse/metrics-core/src/main/java/datawave/metrics/mapreduce/MetricsIngester.java b/warehouse/metrics-core/src/main/java/datawave/metrics/mapreduce/MetricsIngester.java index 7f9f2393162..936cbe2e3c0 100644 --- a/warehouse/metrics-core/src/main/java/datawave/metrics/mapreduce/MetricsIngester.java +++ b/warehouse/metrics-core/src/main/java/datawave/metrics/mapreduce/MetricsIngester.java @@ -282,7 +282,6 @@ protected int launchErrorsJob(Job job, Configuration conf) ranges.add(new Range(new Key(new Text("IngestJob_" + outFormat.format(dateObj))), new Key(new Text("IngestJob_" + outFormat.format(dateObjNext))))); - } catch (DateTimeParseException e) { log.error(e); } diff --git a/warehouse/ops-tools/config-compare/pom.xml b/warehouse/ops-tools/config-compare/pom.xml index 53c4ec08c9c..f1a7dec1a7f 100644 --- a/warehouse/ops-tools/config-compare/pom.xml +++ b/warehouse/ops-tools/config-compare/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-ops-tools-parent - 6.14.0-SNAPSHOT + 7.0.0-SNAPSHOT datawave-ops-tools-config-compare diff --git a/warehouse/ops-tools/index-validation/pom.xml b/warehouse/ops-tools/index-validation/pom.xml index e2e1d83fbc7..7ce29016e2f 100644 --- a/warehouse/ops-tools/index-validation/pom.xml +++ b/warehouse/ops-tools/index-validation/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-ops-tools-parent - 6.14.0-SNAPSHOT + 7.0.0-SNAPSHOT datawave-ops-tools-index-validation jar diff --git a/warehouse/ops-tools/pom.xml b/warehouse/ops-tools/pom.xml index 658728d7d0d..9533fce3c8e 100644 --- a/warehouse/ops-tools/pom.xml +++ b/warehouse/ops-tools/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.14.0-SNAPSHOT + 7.0.0-SNAPSHOT datawave-ops-tools-parent pom diff --git a/warehouse/pom.xml b/warehouse/pom.xml index 1d56085525f..447d4212ed2 100644 --- a/warehouse/pom.xml +++ b/warehouse/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-parent - 6.14.0-SNAPSHOT + 7.0.0-SNAPSHOT datawave-warehouse-parent pom @@ -50,7 +50,7 @@ 7.5.0 - 3.2.6 + 4.1.2 2.2.3 1.3 @@ -332,6 +332,16 @@ org.apache.zookeeper zookeeper ${version.zookeeper} + + + org.slf4j + slf4j-log4j12 + + + log4j + log4j + + org.codehaus.jackson diff --git a/warehouse/query-core/pom.xml b/warehouse/query-core/pom.xml index f5066975bad..86f0f2dac38 100644 --- a/warehouse/query-core/pom.xml +++ b/warehouse/query-core/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.14.0-SNAPSHOT + 7.0.0-SNAPSHOT datawave-query-core jar @@ -60,44 +60,63 @@ datawave-core ${project.version} + + gov.nsa.datawave + datawave-ingest-core + ${project.version} + gov.nsa.datawave datawave-ssdeep-common ${project.version} - gov.nsa.datawave.microservice - metadata-utils + gov.nsa.datawave.core + datawave-core-cached-results + ${project.version} - org.apache.hadoop - hadoop-common + log4j + log4j + + + org.slf4j + slf4j-log4j12 - gov.nsa.datawave.microservice - query-metric-api + gov.nsa.datawave.core + datawave-core-common + ${project.version} - gov.nsa.datawave.webservices - datawave-ws-client + gov.nsa.datawave.core + datawave-core-common-util + ${project.version} - gov.nsa.datawave.webservices - datawave-ws-common + gov.nsa.datawave.core + datawave-core-query ${project.version} - gov.nsa.datawave.webservices - datawave-ws-common-util - ${project.version} + gov.nsa.datawave.microservice + metadata-utils + + + org.apache.hadoop + hadoop-common + + + + + gov.nsa.datawave.microservice + query-metric-api gov.nsa.datawave.webservices - datawave-ws-query - ${project.version} - ejb + datawave-ws-client io.dropwizard.metrics @@ -107,6 +126,13 @@ javax.enterprise cdi-api + + + javax.ws.rs + jsr311-api + 1.1.1 + true + org.apache.accumulo accumulo-core @@ -161,9 +187,11 @@ org.javatuples javatuples + org.jboss.resteasy resteasy-jaxrs + true org.ow2.asm @@ -214,6 +242,12 @@ commons-configuration2 provided + + gov.nsa.datawave + datawave-edge-dictionary-core + ${project.version} + test + gov.nsa.datawave datawave-ingest-configuration @@ -252,6 +286,12 @@ test-jar test + + gov.nsa.datawave.webservices + datawave-ws-query + ${project.version} + test + javax.validation validation-api diff --git a/warehouse/query-core/src/main/java/datawave/audit/DatawaveSelectorExtractor.java b/warehouse/query-core/src/main/java/datawave/audit/DatawaveSelectorExtractor.java index 23418892994..a82adefe4c2 100644 --- a/warehouse/query-core/src/main/java/datawave/audit/DatawaveSelectorExtractor.java +++ b/warehouse/query-core/src/main/java/datawave/audit/DatawaveSelectorExtractor.java @@ -8,10 +8,10 @@ import org.apache.commons.jexl3.parser.ASTJexlScript; import org.apache.log4j.Logger; +import datawave.microservice.query.Query; import datawave.query.jexl.JexlASTHelper; import datawave.query.language.parser.jexl.LuceneToJexlQueryParser; import datawave.query.language.tree.QueryNode; -import datawave.webservice.query.Query; public class DatawaveSelectorExtractor implements SelectorExtractor { @@ -21,6 +21,7 @@ public class DatawaveSelectorExtractor implements SelectorExtractor { @Override public List extractSelectors(Query query) throws IllegalArgumentException { List selectorList = new ArrayList<>(); + try { ASTJexlScript jexlScript; try { diff --git a/warehouse/query-core/src/main/java/datawave/audit/SplitSelectorExtractor.java b/warehouse/query-core/src/main/java/datawave/audit/SplitSelectorExtractor.java index dc512aad02d..ca22ea8e0ca 100644 --- a/warehouse/query-core/src/main/java/datawave/audit/SplitSelectorExtractor.java +++ b/warehouse/query-core/src/main/java/datawave/audit/SplitSelectorExtractor.java @@ -6,8 +6,8 @@ import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.math.IntRange; -import datawave.webservice.query.Query; -import datawave.webservice.query.QueryImpl; +import datawave.microservice.query.Query; +import datawave.microservice.query.QueryImpl; public class SplitSelectorExtractor implements SelectorExtractor { diff --git a/web-services/query/src/main/java/datawave/webservice/query/iterator/KeyAggregatingTransformIterator.java b/warehouse/query-core/src/main/java/datawave/core/query/iterator/KeyAggregatingTransformIterator.java similarity index 99% rename from web-services/query/src/main/java/datawave/webservice/query/iterator/KeyAggregatingTransformIterator.java rename to warehouse/query-core/src/main/java/datawave/core/query/iterator/KeyAggregatingTransformIterator.java index a490a6ddc60..380d31ef07d 100644 --- a/web-services/query/src/main/java/datawave/webservice/query/iterator/KeyAggregatingTransformIterator.java +++ b/warehouse/query-core/src/main/java/datawave/core/query/iterator/KeyAggregatingTransformIterator.java @@ -1,4 +1,4 @@ -package datawave.webservice.query.iterator; +package datawave.core.query.iterator; import java.util.ArrayList; import java.util.Comparator; diff --git a/web-services/query/src/main/java/datawave/webservice/query/util/QueryParserUtil.java b/warehouse/query-core/src/main/java/datawave/core/query/util/QueryParserUtil.java similarity index 74% rename from web-services/query/src/main/java/datawave/webservice/query/util/QueryParserUtil.java rename to warehouse/query-core/src/main/java/datawave/core/query/util/QueryParserUtil.java index 760691ab2d5..ee8453564fe 100644 --- a/web-services/query/src/main/java/datawave/webservice/query/util/QueryParserUtil.java +++ b/warehouse/query-core/src/main/java/datawave/core/query/util/QueryParserUtil.java @@ -1,4 +1,4 @@ -package datawave.webservice.query.util; +package datawave.core.query.util; import java.util.List; diff --git a/warehouse/query-core/src/main/java/datawave/mr/bulk/RfileScanner.java b/warehouse/query-core/src/main/java/datawave/mr/bulk/RfileScanner.java index e0bccbb81c8..99e95ebb95f 100644 --- a/warehouse/query-core/src/main/java/datawave/mr/bulk/RfileScanner.java +++ b/warehouse/query-core/src/main/java/datawave/mr/bulk/RfileScanner.java @@ -32,10 +32,10 @@ import com.google.common.collect.Lists; import datawave.common.util.ArgumentChecker; +import datawave.microservice.authorization.util.AuthorizationsUtil; import datawave.mr.bulk.split.TabletSplitSplit; import datawave.query.tables.SessionOptions; import datawave.security.iterator.ConfigurableVisibilityFilter; -import datawave.security.util.WSAuthorizationsUtil; public class RfileScanner extends SessionOptions implements BatchScanner, Closeable { @@ -68,7 +68,7 @@ public RfileScanner(AccumuloClient client, Configuration conf, String table, Set this.auths = auths; this.client = client; ranges = null; - authIter = WSAuthorizationsUtil.minimize(auths).iterator(); + authIter = AuthorizationsUtil.minimize(auths).iterator(); recordIterAuthString = authIter.next().toString(); iterators = Lists.newArrayList(); iterators = Collections.synchronizedList(iterators); diff --git a/warehouse/query-core/src/main/java/datawave/query/DocumentSerialization.java b/warehouse/query-core/src/main/java/datawave/query/DocumentSerialization.java index 36cdff784e5..84437af593e 100644 --- a/warehouse/query-core/src/main/java/datawave/query/DocumentSerialization.java +++ b/warehouse/query-core/src/main/java/datawave/query/DocumentSerialization.java @@ -10,6 +10,8 @@ import java.util.zip.Inflater; import java.util.zip.InflaterInputStream; +import datawave.microservice.query.Query; +import datawave.microservice.query.QueryImpl.Parameter; import datawave.query.exceptions.InvalidDocumentHeader; import datawave.query.exceptions.NoSuchDeserializerException; import datawave.query.function.deserializer.DocumentDeserializer; @@ -18,8 +20,6 @@ import datawave.query.function.serializer.DocumentSerializer; import datawave.query.function.serializer.KryoDocumentSerializer; import datawave.query.function.serializer.WritableDocumentSerializer; -import datawave.webservice.query.Query; -import datawave.webservice.query.QueryImpl.Parameter; import datawave.webservice.query.exception.BadRequestQueryException; import datawave.webservice.query.exception.DatawaveErrorCode; import datawave.webservice.query.exception.NotFoundQueryException; diff --git a/warehouse/query-core/src/main/java/datawave/query/cardinality/CardinalityConfiguration.java b/warehouse/query-core/src/main/java/datawave/query/cardinality/CardinalityConfiguration.java index 42d4ff8f608..2bd635e7356 100644 --- a/warehouse/query-core/src/main/java/datawave/query/cardinality/CardinalityConfiguration.java +++ b/warehouse/query-core/src/main/java/datawave/query/cardinality/CardinalityConfiguration.java @@ -15,7 +15,7 @@ import datawave.query.model.QueryModel; public class CardinalityConfiguration { - + private boolean enabled = false; private Set cardinalityFields = null; private Map cardinalityFieldReverseMapping = null; private String cardinalityUidField = null; @@ -24,6 +24,14 @@ public class CardinalityConfiguration { private String[] nonDocumentFields = {"QUERY_USER", "QUERY_SYSTEM_FROM", "QUERY_LOGIC_NAME", "RESULT_DATA_AGE", "RESULT_DATATYPE"}; + public boolean isEnabled() { + return enabled; + } + + public void setEnabled(boolean enabled) { + this.enabled = enabled; + } + public Set getCardinalityFields() { return cardinalityFields; } diff --git a/warehouse/query-core/src/main/java/datawave/query/config/ContentQueryConfiguration.java b/warehouse/query-core/src/main/java/datawave/query/config/ContentQueryConfiguration.java index f5e8bf2c01d..515fd351ca1 100644 --- a/warehouse/query-core/src/main/java/datawave/query/config/ContentQueryConfiguration.java +++ b/warehouse/query-core/src/main/java/datawave/query/config/ContentQueryConfiguration.java @@ -1,63 +1,102 @@ package datawave.query.config; +import java.io.IOException; +import java.io.Serializable; import java.util.ArrayList; import java.util.Collection; +import java.util.Objects; import java.util.TreeSet; import org.apache.accumulo.core.data.Range; -import datawave.webservice.query.Query; -import datawave.webservice.query.configuration.GenericQueryConfiguration; -import datawave.webservice.query.logic.BaseQueryLogic; +import datawave.core.query.configuration.GenericQueryConfiguration; +import datawave.core.query.logic.BaseQueryLogic; +import datawave.microservice.query.Query; +import datawave.microservice.query.QueryImpl; +import datawave.query.tables.content.ContentQueryLogic; /** - * Thin wrapper around GenericQueryConfiguration for use by the {@link datawave.query.tables.content.ContentQueryTable} + * Thin wrapper around GenericQueryConfiguration for use by the {@link ContentQueryLogic} * */ -public class ContentQueryConfiguration extends GenericQueryConfiguration { +public class ContentQueryConfiguration extends GenericQueryConfiguration implements Serializable { @SuppressWarnings("unused") private static final long serialVersionUID = 1662850178943683419L; - private Query query; - private final Collection ranges = new TreeSet<>(); + private transient Collection ranges; + + public ContentQueryConfiguration() { + super(); + setQuery(new QueryImpl()); + } public ContentQueryConfiguration(BaseQueryLogic configuredLogic, Query query) { super(configuredLogic); setQuery(query); + this.ranges = new TreeSet<>(); + } + + /** + * Factory method that instantiates a fresh ContentQueryConfiguration + * + * @return - a clean ContentQueryConfiguration + */ + public static ContentQueryConfiguration create() { + return new ContentQueryConfiguration(); } - public void addRange(final Range range) { + public synchronized void addRange(final Range range) { if (null != range) { - synchronized (this.ranges) { - this.ranges.add(range); - } + this.ranges.add(range); } } - public Query getQuery() { - return query; + public synchronized Collection getRanges() { + return new ArrayList<>(this.ranges); } - public Collection getRanges() { - final Collection orderedCopy; - synchronized (this.ranges) { - orderedCopy = new ArrayList<>(this.ranges); + public synchronized void setRanges(final Collection ranges) { + // As a single atomic operation, clear the range and add all of the + // specified ranges + this.ranges.clear(); + if (null != ranges) { + this.ranges.addAll(ranges); } - return orderedCopy; } - public void setQuery(Query query) { - this.query = query; + @Override + public boolean equals(Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + if (!super.equals(o)) + return false; + ContentQueryConfiguration that = (ContentQueryConfiguration) o; + return Objects.equals(ranges, that.ranges); } - public void setRanges(final Collection ranges) { - // As a single atomic operation, clear the range and add all of the - // specified ranges - synchronized (this.ranges) { - this.ranges.clear(); - if (null != ranges) { - this.ranges.addAll(ranges); - } + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), ranges); + } + + private void writeObject(java.io.ObjectOutputStream out) throws IOException { + out.defaultWriteObject(); + out.writeInt(ranges != null ? ranges.size() : 0); + for (Range range : ranges) { + range.write(out); + } + } + + private void readObject(java.io.ObjectInputStream in) throws IOException, ClassNotFoundException { + in.defaultReadObject(); + ranges = new TreeSet<>(); + int numRanges = in.readInt(); + while (numRanges-- > 0) { + Range range = new Range(); + range.readFields(in); + ranges.add(range); } } } diff --git a/warehouse/query-core/src/main/java/datawave/query/config/EdgeExtendedSummaryConfiguration.java b/warehouse/query-core/src/main/java/datawave/query/config/EdgeExtendedSummaryConfiguration.java index cf254e88d32..41deac5f782 100644 --- a/warehouse/query-core/src/main/java/datawave/query/config/EdgeExtendedSummaryConfiguration.java +++ b/warehouse/query-core/src/main/java/datawave/query/config/EdgeExtendedSummaryConfiguration.java @@ -1,11 +1,15 @@ package datawave.query.config; -import datawave.query.tables.edge.EdgeQueryLogic; -import datawave.webservice.query.Query; -import datawave.webservice.query.QueryImpl; +import java.io.Serializable; +import java.util.Collection; +import java.util.Objects; -public class EdgeExtendedSummaryConfiguration extends EdgeQueryConfiguration { +import datawave.core.query.configuration.QueryData; +import datawave.microservice.query.Query; +import datawave.microservice.query.QueryImpl; +import datawave.query.edge.DefaultExtendedEdgeQueryLogic; +public class EdgeExtendedSummaryConfiguration extends EdgeQueryConfiguration implements Serializable { public static final String SUMMARIZE = "summarize"; public static final String LIMITED_JEXL = "LIMITED_JEXL"; public static final String LIST = "LIST"; @@ -18,27 +22,139 @@ public class EdgeExtendedSummaryConfiguration extends EdgeQueryConfiguration { private char delimiter = '\0'; - private int scannerThreads = 10; - private boolean includeRelationships = true; private String edgeTypes; private boolean summaryInputType = false; - // Use to aggregate results will be false by default - private boolean aggregateResults = false; - private boolean overRideInput = false; private boolean overRideOutput = false; - public EdgeExtendedSummaryConfiguration(EdgeQueryLogic configuredLogic, Query query) { - super(configuredLogic, query); + /** + * Default constructor + */ + public EdgeExtendedSummaryConfiguration() { + super(); + } + + /** + * Performs a deep copy of the provided EdgeExtendedSummaryConfiguration into a new instance + * + * @param other + * - another EdgeExtendedSummaryConfiguration instance + */ + public EdgeExtendedSummaryConfiguration(EdgeExtendedSummaryConfiguration other) { + + // GenericQueryConfiguration copy first + super(other); + + // EdgeExtendedSummaryConfiguration copy + delimiter = other.delimiter; + includeRelationships = other.includeRelationships; + edgeTypes = other.edgeTypes; + summaryInputType = other.summaryInputType; + aggregateResults = other.aggregateResults; + overRideInput = other.overRideInput; + overRideOutput = other.overRideOutput; + } + + /** + * This constructor is used when we are creating a checkpoint for a set of ranges (i.e. QueryData objects). All configuration required for post planning + * needs to be copied over here. + * + * @param other + * @param queries + */ + public EdgeExtendedSummaryConfiguration(EdgeExtendedSummaryConfiguration other, Collection queries) { + this(other); + + this.setQueries(queries); + + // do not preserve the original queries iter. getQueriesIter will create a new + // iterator based off of the queries collection if queriesIter is null + this.setQueriesIter(null); + } + + @Override + public EdgeExtendedSummaryConfiguration checkpoint() { + // Create a new config that only contains what is needed to execute the specified ranges + return new EdgeExtendedSummaryConfiguration(this, getQueries()); + } + + /** + * Delegates deep copy work to appropriate constructor, sets additional values specific to the provided ShardQueryLogic + * + * @param logic + * - a DefaultExtendedEdgeQueryLogic instance or subclass + */ + public EdgeExtendedSummaryConfiguration(DefaultExtendedEdgeQueryLogic logic) { + this(logic.getConfig()); + } + + /** + * Factory method that instantiates an fresh EdgeExtendedSummaryConfiguration + * + * @return - a clean EdgeExtendedSummaryConfiguration + */ + public static EdgeExtendedSummaryConfiguration create() { + return new EdgeExtendedSummaryConfiguration(); + } + + /** + * Factory method that returns a deep copy of the provided EdgeExtendedSummaryConfiguration + * + * @param other + * - another instance of a EdgeExtendedSummaryConfiguration + * @return - copy of provided EdgeExtendedSummaryConfiguration + */ + public static EdgeExtendedSummaryConfiguration create(EdgeExtendedSummaryConfiguration other) { + return new EdgeExtendedSummaryConfiguration(other); + } + + /** + * Factory method that creates a EdgeExtendedSummaryConfiguration deep copy from a DefaultExtendedEdgeQueryLogic + * + * @param logic + * - a configured DefaultExtendedEdgeQueryLogic + * @return - a EdgeExtendedSummaryConfiguration + */ + public static EdgeExtendedSummaryConfiguration create(DefaultExtendedEdgeQueryLogic logic) { + + EdgeExtendedSummaryConfiguration config = create(logic.getConfig()); + + // Lastly, honor overrides passed in via query parameters + config.parseParameters(config.getQuery()); + + return config; + } + + /** + * Factory method that creates a EdgeExtendedSummaryConfiguration from a DefaultExtendedEdgeQueryLogic and a Query + * + * @param logic + * - a configured DefaultExtendedEdgeQueryLogic + * @param query + * - a configured Query object + * @return - a EdgeExtendedSummaryConfiguration + */ + public static EdgeExtendedSummaryConfiguration create(DefaultExtendedEdgeQueryLogic logic, Query query) { + EdgeExtendedSummaryConfiguration config = create(logic); + config.setQuery(query); + return config; } @Override - public EdgeQueryConfiguration parseParameters(Query settings) { + public EdgeExtendedSummaryConfiguration parseParameters(Query settings) { super.parseParameters(settings); + + // first, reset the params to their defaults + overRideInput = false; + summaryInputType = false; + includeRelationships = true; + delimiter = '\0'; + edgeTypes = null; + if (settings.getParameters() != null) { QueryImpl.Parameter p = settings.findParameter(SUMMARIZE); @@ -96,10 +212,6 @@ public boolean isIncludeRelationships() { return includeRelationships; } - public boolean isAggregateResults() { - return aggregateResults; - } - public boolean isSummaryInputType() { return summaryInputType; } @@ -115,4 +227,23 @@ public boolean isOverRideOutput() { public boolean isOverRideInput() { return overRideInput; } + + @Override + public boolean equals(Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + if (!super.equals(o)) + return false; + EdgeExtendedSummaryConfiguration that = (EdgeExtendedSummaryConfiguration) o; + return delimiter == that.delimiter && includeRelationships == that.includeRelationships && summaryInputType == that.summaryInputType + && aggregateResults == that.aggregateResults && overRideInput == that.overRideInput && overRideOutput == that.overRideOutput + && Objects.equals(edgeTypes, that.edgeTypes); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), delimiter, includeRelationships, edgeTypes, summaryInputType, aggregateResults, overRideInput, overRideOutput); + } } diff --git a/warehouse/query-core/src/main/java/datawave/query/config/EdgeQueryConfiguration.java b/warehouse/query-core/src/main/java/datawave/query/config/EdgeQueryConfiguration.java index 4516d3d4563..6fab3abc41c 100644 --- a/warehouse/query-core/src/main/java/datawave/query/config/EdgeQueryConfiguration.java +++ b/warehouse/query-core/src/main/java/datawave/query/config/EdgeQueryConfiguration.java @@ -1,19 +1,26 @@ package datawave.query.config; +import java.io.Serializable; +import java.util.Collection; import java.util.List; +import java.util.Objects; +import datawave.core.query.configuration.CheckpointableQueryConfiguration; +import datawave.core.query.configuration.GenericQueryConfiguration; +import datawave.core.query.configuration.QueryData; import datawave.data.type.Type; +import datawave.microservice.query.Query; +import datawave.microservice.query.QueryImpl; import datawave.query.model.edge.EdgeQueryModel; import datawave.query.tables.edge.EdgeQueryLogic; -import datawave.webservice.query.Query; -import datawave.webservice.query.QueryImpl; -import datawave.webservice.query.configuration.GenericQueryConfiguration; /** * Created with IntelliJ IDEA. To change this template use File | Settings | File Templates. */ -public class EdgeQueryConfiguration extends GenericQueryConfiguration { +public class EdgeQueryConfiguration extends GenericQueryConfiguration implements Serializable, CheckpointableQueryConfiguration { private static final long serialVersionUID = -2795330785878662313L; + public static final int DEFAULT_SKIP_LIMIT = 10; + public static final long DEFAULT_SCAN_LIMIT = Long.MAX_VALUE; // We originally had the two choices: // EVENT => apply date range to edges generated using event date @@ -38,12 +45,11 @@ public enum dateType { // Query model defaults... private String modelName = "DATAWAVE_EDGE"; private String modelTableName = "DatawaveMetadata"; + private String metadataTableName = "DatawaveMetadata"; private EdgeQueryModel edgeQueryModel = null; private List> dataTypes; - private int numQueryThreads; - private Query query; - private boolean protobufEdgeFormat = true; + private List> regexDataTypes = null; // to be backwards compatible, by default we want to return protected boolean includeStats = true; @@ -54,46 +60,141 @@ public enum dateType { private dateType dateRangeType = dateType.EVENT; // Use to aggregate results will be false by default - private boolean aggregateResults = false; + protected boolean aggregateResults = false; - public EdgeQueryConfiguration(EdgeQueryLogic configuredLogic, Query query) { - super(configuredLogic); - setDataTypes(configuredLogic.getDataTypes()); - setNumQueryThreads(configuredLogic.getQueryThreads()); - setQuery(query); - setProtobufEdgeFormat(configuredLogic.isProtobufEdgeFormat()); - setModelName(configuredLogic.getModelName()); - setModelTableName(configuredLogic.getModelTableName()); - setEdgeQueryModel(configuredLogic.getEdgeQueryModel()); - setMaxQueryTerms(configuredLogic.getMaxQueryTerms()); + protected int queryThreads = 8; + + protected int dateFilterSkipLimit = DEFAULT_SKIP_LIMIT; + + protected long dateFilterScanLimit = DEFAULT_SCAN_LIMIT; + + /** + * Default constructor + */ + public EdgeQueryConfiguration() { + super(); } - public List> getDataTypes() { - return dataTypes; + /** + * Performs a deep copy of the provided EdgeQueryConfiguration into a new instance + * + * @param other + * - another EdgeQueryConfiguration instance + */ + public EdgeQueryConfiguration(EdgeQueryConfiguration other) { + + // GenericQueryConfiguration copy first + super(other); + + // EdgeQueryConfiguration copy + setModelName(other.getModelName()); + setModelTableName(other.getModelTableName()); + setMetadataTableName(other.getMetadataTableName()); + setEdgeQueryModel(other.getEdgeQueryModel()); + setDataTypes(other.getDataTypes()); + setRegexDataTypes(other.getRegexDataTypes()); + setQueryThreads(other.getQueryThreads()); + setIncludeStats(other.includeStats()); + setMaxQueryTerms(other.getMaxQueryTerms()); + setMaxPrefilterValues(other.getMaxPrefilterValues()); + setDateRangeType(other.getDateRangeType()); + setAggregateResults(other.isAggregateResults()); + setDateFilterScanLimit(other.getDateFilterScanLimit()); + setDateFilterSkipLimit(other.getDateFilterSkipLimit()); + setMaxQueryTerms(other.getMaxQueryTerms()); } - public void setDataTypes(List> dataTypes) { - this.dataTypes = dataTypes; + /** + * This constructor is used when we are creating a checkpoint for a set of ranges (i.e. QueryData objects). All configuration required for post planning + * needs to be copied over here. + * + * @param other + * @param queries + */ + public EdgeQueryConfiguration(EdgeQueryConfiguration other, Collection queries) { + this(other); + + this.setQueries(queries); + + // do not preserve the original queries iter. getQueriesIter will create a new + // iterator based off of the queries collection if queriesIter is null + this.setQueriesIter(null); + } + + @Override + public EdgeQueryConfiguration checkpoint() { + // Create a new config that only contains what is needed to execute the specified ranges + return new EdgeQueryConfiguration(this, getQueries()); } - public int getNumQueryThreads() { - return numQueryThreads; + /** + * Delegates deep copy work to appropriate constructor, sets additional values specific to the provided ShardQueryLogic + * + * @param logic + * - a EdgeQueryLogic instance or subclass + */ + public EdgeQueryConfiguration(EdgeQueryLogic logic) { + this(logic.getConfig()); } - public void setNumQueryThreads(int numQueryThreads) { - this.numQueryThreads = numQueryThreads; + /** + * Factory method that instantiates an fresh EdgeQueryConfiguration + * + * @return - a clean EdgeQueryConfiguration + */ + public static EdgeQueryConfiguration create() { + return new EdgeQueryConfiguration(); } - public boolean isProtobufEdgeFormat() { - return protobufEdgeFormat; + /** + * Factory method that returns a deep copy of the provided EdgeQueryConfiguration + * + * @param other + * - another instance of a EdgeQueryConfiguration + * @return - copy of provided EdgeQueryConfiguration + */ + public static EdgeQueryConfiguration create(EdgeQueryConfiguration other) { + return new EdgeQueryConfiguration(other); } - public void setProtobufEdgeFormat(boolean protobufEdgeFormat) { - this.protobufEdgeFormat = protobufEdgeFormat; + /** + * Factory method that creates a EdgeQueryConfiguration deep copy from a EdgeQueryLogic + * + * @param logic + * - a configured EdgeQueryLogic + * @return - a EdgeQueryConfiguration + */ + public static EdgeQueryConfiguration create(EdgeQueryLogic logic) { + + EdgeQueryConfiguration config = create(logic.getConfig()); + + // Lastly, honor overrides passed in via query parameters + config.parseParameters(config.getQuery()); + + return config; } - public boolean includeStats() { - return includeStats; + /** + * Factory method that creates a EdgeQueryConfiguration from a EdgeQueryLogic and a Query + * + * @param logic + * - a configured EdgeQueryLogic + * @param query + * - a configured Query object + * @return - a EdgeQueryConfiguration + */ + public static EdgeQueryConfiguration create(EdgeQueryLogic logic, Query query) { + EdgeQueryConfiguration config = create(logic); + config.setQuery(query); + return config; + } + + public List> getDataTypes() { + return dataTypes; + } + + public void setDataTypes(List> dataTypes) { + this.dataTypes = dataTypes; } public dateType getDateRangeType() { @@ -112,6 +213,13 @@ public void setDateRangeType(dateType dateRangeType) { * @return an edge query configuration */ public EdgeQueryConfiguration parseParameters(Query settings) { + setQuery(settings); + + // first, reset the params to their defaults + includeStats = true; + dateRangeType = dateType.EVENT; + aggregateResults = false; + if (settings.getParameters() != null) { QueryImpl.Parameter p = settings.findParameter(INCLUDE_STATS); if (p != null && !p.getParameterValue().isEmpty()) { @@ -152,14 +260,6 @@ public EdgeQueryConfiguration parseParameters(Query settings) { return this; } - public Query getQuery() { - return query; - } - - public void setQuery(Query query) { - this.query = query; - } - public long getMaxQueryTerms() { return maxQueryTerms; } @@ -207,4 +307,75 @@ public String getModelTableName() { public void setModelTableName(String modelTableName) { this.modelTableName = modelTableName; } + + public String getMetadataTableName() { + return metadataTableName; + } + + public void setMetadataTableName(String metadataTableName) { + this.metadataTableName = metadataTableName; + } + + public List> getRegexDataTypes() { + return regexDataTypes; + } + + public void setRegexDataTypes(List> regexDataTypes) { + this.regexDataTypes = regexDataTypes; + } + + public int getQueryThreads() { + return queryThreads; + } + + public void setQueryThreads(int queryThreads) { + this.queryThreads = queryThreads; + } + + public boolean includeStats() { + return includeStats; + } + + public void setIncludeStats(boolean includeStats) { + this.includeStats = includeStats; + } + + public int getDateFilterSkipLimit() { + return dateFilterSkipLimit; + } + + public void setDateFilterSkipLimit(int dateFilterSkipLimit) { + this.dateFilterSkipLimit = dateFilterSkipLimit; + } + + public long getDateFilterScanLimit() { + return dateFilterScanLimit; + } + + public void setDateFilterScanLimit(long dateFilterScanLimit) { + this.dateFilterScanLimit = dateFilterScanLimit; + } + + @Override + public boolean equals(Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + if (!super.equals(o)) + return false; + EdgeQueryConfiguration that = (EdgeQueryConfiguration) o; + return queryThreads == that.queryThreads && includeStats == that.includeStats && maxQueryTerms == that.maxQueryTerms + && maxPrefilterValues == that.maxPrefilterValues && aggregateResults == that.aggregateResults + && Objects.equals(modelName, that.modelName) && Objects.equals(modelTableName, that.modelTableName) + && Objects.equals(edgeQueryModel, that.edgeQueryModel) && Objects.equals(dataTypes, that.dataTypes) + && dateRangeType == that.dateRangeType && dateFilterScanLimit == that.dateFilterScanLimit + && dateFilterSkipLimit == that.dateFilterSkipLimit; + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), modelName, modelTableName, edgeQueryModel, dataTypes, queryThreads, includeStats, maxQueryTerms, + maxPrefilterValues, dateRangeType, aggregateResults, dateFilterScanLimit, dateFilterSkipLimit); + } } diff --git a/warehouse/query-core/src/main/java/datawave/query/config/LookupUUIDTune.java b/warehouse/query-core/src/main/java/datawave/query/config/LookupUUIDTune.java index ccbfae98736..1efa238bb6c 100644 --- a/warehouse/query-core/src/main/java/datawave/query/config/LookupUUIDTune.java +++ b/warehouse/query-core/src/main/java/datawave/query/config/LookupUUIDTune.java @@ -10,6 +10,8 @@ import org.apache.accumulo.core.data.Key; import org.apache.accumulo.core.data.Value; +import datawave.core.query.configuration.GenericQueryConfiguration; +import datawave.core.query.logic.BaseQueryLogic; import datawave.query.Constants; import datawave.query.language.parser.QueryParser; import datawave.query.planner.DefaultQueryPlanner; @@ -17,8 +19,6 @@ import datawave.query.planner.rules.NodeTransformRule; import datawave.query.tables.ShardQueryLogic; import datawave.query.tld.TLDQueryIterator; -import datawave.webservice.query.configuration.GenericQueryConfiguration; -import datawave.webservice.query.logic.BaseQueryLogic; public class LookupUUIDTune implements Profile { diff --git a/warehouse/query-core/src/main/java/datawave/query/config/Profile.java b/warehouse/query-core/src/main/java/datawave/query/config/Profile.java index 4c00c861e32..456331bca1a 100644 --- a/warehouse/query-core/src/main/java/datawave/query/config/Profile.java +++ b/warehouse/query-core/src/main/java/datawave/query/config/Profile.java @@ -5,9 +5,9 @@ import org.apache.accumulo.core.data.Key; import org.apache.accumulo.core.data.Value; +import datawave.core.query.configuration.GenericQueryConfiguration; +import datawave.core.query.logic.BaseQueryLogic; import datawave.query.planner.QueryPlanner; -import datawave.webservice.query.configuration.GenericQueryConfiguration; -import datawave.webservice.query.logic.BaseQueryLogic; /** * Purpose: Provides a mechanism to provide a user configurable way to tune a user's query, such that he or she may take advantage of features that could have diff --git a/warehouse/query-core/src/main/java/datawave/query/config/RemoteQueryConfiguration.java b/warehouse/query-core/src/main/java/datawave/query/config/RemoteQueryConfiguration.java index ead2d40f4c7..88bb1166b2b 100644 --- a/warehouse/query-core/src/main/java/datawave/query/config/RemoteQueryConfiguration.java +++ b/warehouse/query-core/src/main/java/datawave/query/config/RemoteQueryConfiguration.java @@ -1,18 +1,23 @@ package datawave.query.config; +import java.io.ObjectStreamException; import java.io.Serializable; +import java.util.Collection; +import java.util.Collections; import java.util.Objects; +import datawave.core.query.configuration.CheckpointableQueryConfiguration; +import datawave.core.query.configuration.GenericQueryConfiguration; +import datawave.core.query.configuration.QueryData; +import datawave.microservice.query.Query; import datawave.query.tables.RemoteEventQueryLogic; -import datawave.webservice.query.Query; -import datawave.webservice.query.configuration.GenericQueryConfiguration; /** *

    * A GenericQueryConfiguration implementation that provides the additional logic on top of the traditional query that is needed to run a remote query logic * */ -public class RemoteQueryConfiguration extends GenericQueryConfiguration implements Serializable { +public class RemoteQueryConfiguration extends GenericQueryConfiguration implements Serializable, CheckpointableQueryConfiguration { private static final long serialVersionUID = -4354990715046146110L; @@ -21,8 +26,6 @@ public class RemoteQueryConfiguration extends GenericQueryConfiguration implemen private String remoteQueryLogic; - private Query query; - /** * Default constructor */ @@ -44,7 +47,22 @@ public RemoteQueryConfiguration(RemoteQueryConfiguration other) { // RemoteQueryConfiguration copy this.remoteId = other.getRemoteId(); this.remoteQueryLogic = other.getRemoteQueryLogic(); - this.query = other.getQuery(); + } + + /** + * This constructor is used when we are creating a checkpoint for a set of ranges (i.e. QueryData objects). All configuration required for post planning + * needs to be copied over here. + * + * @param other + * @param queries + */ + public RemoteQueryConfiguration(RemoteQueryConfiguration other, Collection queries) { + this(other); + } + + @Override + public RemoteQueryConfiguration checkpoint() { + return new RemoteQueryConfiguration(this, Collections.EMPTY_LIST); } /** @@ -93,14 +111,6 @@ public void setRemoteQueryLogic(String remoteQueryLogic) { this.remoteQueryLogic = remoteQueryLogic; } - public Query getQuery() { - return query; - } - - public void setQuery(Query query) { - this.query = query; - } - @Override public boolean equals(Object o) { if (this == o) @@ -119,4 +129,9 @@ public int hashCode() { return Objects.hash(super.hashCode(), getRemoteId(), getRemoteQueryLogic(), getQuery()); } + // Part of the Serializable interface used to initialize any transient members during deserialization + protected Object readResolve() throws ObjectStreamException { + return this; + } + } diff --git a/warehouse/query-core/src/main/java/datawave/query/config/SSDeepSimilarityQueryConfiguration.java b/warehouse/query-core/src/main/java/datawave/query/config/SSDeepSimilarityQueryConfiguration.java index a2b91d56c2b..096d3bff5ee 100644 --- a/warehouse/query-core/src/main/java/datawave/query/config/SSDeepSimilarityQueryConfiguration.java +++ b/warehouse/query-core/src/main/java/datawave/query/config/SSDeepSimilarityQueryConfiguration.java @@ -6,16 +6,16 @@ import com.google.common.collect.Multimap; +import datawave.core.query.configuration.GenericQueryConfiguration; +import datawave.core.query.logic.BaseQueryLogic; +import datawave.microservice.query.Query; +import datawave.microservice.query.QueryImpl; import datawave.util.ssdeep.BucketAccumuloKeyGenerator; import datawave.util.ssdeep.ChunkSizeEncoding; import datawave.util.ssdeep.IntegerEncoding; import datawave.util.ssdeep.NGramGenerator; import datawave.util.ssdeep.NGramTuple; import datawave.util.ssdeep.SSDeepHash; -import datawave.webservice.query.Query; -import datawave.webservice.query.QueryImpl; -import datawave.webservice.query.configuration.GenericQueryConfiguration; -import datawave.webservice.query.logic.BaseQueryLogic; public class SSDeepSimilarityQueryConfiguration extends GenericQueryConfiguration { @@ -33,15 +33,13 @@ public class SSDeepSimilarityQueryConfiguration extends GenericQueryConfiguratio /** Used to encode the chunk size as a character which is included in the ranges used to retrieve ngram tuples */ private ChunkSizeEncoding chunkSizeEncoder; - private Query query; - private Collection ranges; private Multimap queryMap; public SSDeepSimilarityQueryConfiguration() { super(); - query = new QueryImpl(); + setQuery(new QueryImpl()); } public SSDeepSimilarityQueryConfiguration(BaseQueryLogic configuredLogic) { @@ -52,14 +50,6 @@ public static SSDeepSimilarityQueryConfiguration create() { return new SSDeepSimilarityQueryConfiguration(); } - public Query getQuery() { - return query; - } - - public void setQuery(Query query) { - this.query = query; - } - public Collection getRanges() { return ranges; } diff --git a/warehouse/query-core/src/main/java/datawave/query/config/ShardIndexQueryConfiguration.java b/warehouse/query-core/src/main/java/datawave/query/config/ShardIndexQueryConfiguration.java index 8c9c9a2c7cb..e29851b5e03 100644 --- a/warehouse/query-core/src/main/java/datawave/query/config/ShardIndexQueryConfiguration.java +++ b/warehouse/query-core/src/main/java/datawave/query/config/ShardIndexQueryConfiguration.java @@ -1,7 +1,10 @@ package datawave.query.config; +import java.io.Serializable; +import java.util.Collection; import java.util.Map; import java.util.Map.Entry; +import java.util.Objects; import org.apache.accumulo.core.data.Range; @@ -9,10 +12,12 @@ import com.google.common.collect.Maps; import com.google.common.collect.Multimap; +import datawave.core.query.configuration.CheckpointableQueryConfiguration; +import datawave.core.query.configuration.QueryData; +import datawave.microservice.query.Query; import datawave.query.tables.ShardIndexQueryTable; -import datawave.webservice.query.Query; -public class ShardIndexQueryConfiguration extends ShardQueryConfiguration { +public class ShardIndexQueryConfiguration extends ShardQueryConfiguration implements Serializable, CheckpointableQueryConfiguration { private static final long serialVersionUID = 7616552164239289739L; private Multimap normalizedTerms = HashMultimap.create(); @@ -21,13 +26,78 @@ public class ShardIndexQueryConfiguration extends ShardQueryConfiguration { private Map,Range> rangesForTerms = Maps.newHashMap(); private Map,Entry> rangesForPatterns = Maps.newHashMap(); + private boolean allowLeadingWildcard; + + public ShardIndexQueryConfiguration() {} + + public ShardIndexQueryConfiguration(ShardIndexQueryConfiguration other) { + super(other); + setNormalizedPatterns(other.getNormalizedPatterns()); + setNormalizedTerms(other.getNormalizedTerms()); + setRangesForPatterns(other.getRangesForPatterns()); + setRangesForTerms(other.getRangesForTerms()); + setAllowLeadingWildcard(other.isAllowLeadingWildcard()); + } + public ShardIndexQueryConfiguration(ShardIndexQueryTable logic, Query query) { - this.setIndexTableName(logic.getIndexTableName()); - this.setFullTableScanEnabled(logic.isFullTableScanEnabled()); - this.setQuery(query); - this.setMetadataTableName(logic.getModelTableName()); - this.setRealmSuffixExclusionPatterns(logic.getRealmSuffixExclusionPatterns()); - this.setModelName(logic.getModelName()); + this(logic.getConfig()); + setQuery(query); + } + + public ShardIndexQueryConfiguration(ShardIndexQueryConfiguration other, Collection queries) { + super(other, queries); + setNormalizedPatterns(other.getNormalizedPatterns()); + setNormalizedTerms(other.getNormalizedTerms()); + setRangesForPatterns(other.getRangesForPatterns()); + setRangesForTerms(other.getRangesForTerms()); + setAllowLeadingWildcard(other.isAllowLeadingWildcard()); + } + + /** + * Factory method that instantiates a fresh ShardIndexQueryConfiguration + * + * @return - a clean ShardIndexQueryConfiguration + */ + public static ShardIndexQueryConfiguration create() { + return new ShardIndexQueryConfiguration(); + } + + /** + * Factory method that returns a deep copy of the provided ShardIndexQueryConfiguration + * + * @param other + * - another instance of a ShardIndexQueryConfiguration + * @return - copy of provided ShardIndexQueryConfiguration + */ + public static ShardIndexQueryConfiguration create(ShardIndexQueryConfiguration other) { + return new ShardIndexQueryConfiguration(other); + } + + /** + * Factory method that creates a ShardIndexQueryConfiguration deep copy from a ShardIndexQueryLogic + * + * @param shardQueryLogic + * - a configured ShardIndexQueryLogic + * @return - a ShardIndexQueryConfiguration + */ + public static ShardIndexQueryConfiguration create(ShardIndexQueryTable shardQueryLogic) { + ShardIndexQueryConfiguration config = create(shardQueryLogic.getConfig()); + return config; + } + + /** + * Factory method that creates a ShardIndexQueryConfiguration from a ShardIndexQueryLogic and a Query + * + * @param shardQueryLogic + * - a configured ShardIndexQueryLogic + * @param query + * - a configured Query object + * @return - a ShardIndexQueryConfiguration + */ + public static ShardIndexQueryConfiguration create(ShardIndexQueryTable shardQueryLogic, Query query) { + ShardIndexQueryConfiguration config = create(shardQueryLogic); + config.setQuery(query); + return config; } public void setNormalizedTerms(Multimap normalizedTerms) { @@ -61,4 +131,37 @@ public void setRangesForPatterns(Map,Entry> public Map,Entry> getRangesForPatterns() { return this.rangesForPatterns; } + + public boolean isAllowLeadingWildcard() { + return allowLeadingWildcard; + } + + public void setAllowLeadingWildcard(boolean allowLeadingWildcard) { + this.allowLeadingWildcard = allowLeadingWildcard; + } + + @Override + public ShardIndexQueryConfiguration checkpoint() { + // Create a new config that only contains what is needed to execute the ranges + return new ShardIndexQueryConfiguration(this, getQueries()); + } + + @Override + public boolean equals(Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + if (!super.equals(o)) + return false; + ShardIndexQueryConfiguration that = (ShardIndexQueryConfiguration) o; + return Objects.equals(normalizedTerms, that.normalizedTerms) && Objects.equals(normalizedPatterns, that.normalizedPatterns) + && Objects.equals(rangesForTerms, that.rangesForTerms) && Objects.equals(rangesForPatterns, that.rangesForPatterns) + && Objects.equals(allowLeadingWildcard, that.allowLeadingWildcard); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), normalizedTerms, normalizedPatterns, rangesForTerms, rangesForPatterns, allowLeadingWildcard); + } } diff --git a/warehouse/query-core/src/main/java/datawave/query/config/ShardQueryConfiguration.java b/warehouse/query-core/src/main/java/datawave/query/config/ShardQueryConfiguration.java index a1636d60664..dbc717578b6 100644 --- a/warehouse/query-core/src/main/java/datawave/query/config/ShardQueryConfiguration.java +++ b/warehouse/query-core/src/main/java/datawave/query/config/ShardQueryConfiguration.java @@ -1,9 +1,9 @@ package datawave.query.config; +import java.io.ObjectStreamException; import java.io.Serializable; import java.text.SimpleDateFormat; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Date; @@ -12,6 +12,7 @@ import java.util.List; import java.util.Map; import java.util.Map.Entry; +import java.util.Objects; import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; @@ -23,17 +24,22 @@ import org.apache.commons.lang.StringUtils; import org.apache.log4j.Logger; -import com.fasterxml.jackson.annotation.JsonIgnore; import com.google.common.collect.ArrayListMultimap; import com.google.common.collect.HashMultimap; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Multimap; import com.google.common.collect.Sets; +import com.google.common.hash.BloomFilter; +import datawave.core.query.configuration.CheckpointableQueryConfiguration; +import datawave.core.query.configuration.GenericQueryConfiguration; +import datawave.core.query.configuration.QueryData; import datawave.data.type.DiscreteIndexType; import datawave.data.type.NoOpType; import datawave.data.type.Type; +import datawave.microservice.query.Query; +import datawave.microservice.query.QueryImpl; import datawave.query.Constants; import datawave.query.DocumentSerialization; import datawave.query.DocumentSerialization.ReturnType; @@ -55,9 +61,6 @@ import datawave.query.util.QueryStopwatch; import datawave.util.TableName; import datawave.util.UniversalSet; -import datawave.webservice.query.Query; -import datawave.webservice.query.QueryImpl; -import datawave.webservice.query.configuration.GenericQueryConfiguration; /** *

    @@ -72,7 +75,7 @@ * This class can be initialized with an instance of a ShardQueryLogic or ShardQueryTable which will grab the already configured parameters from the Accumulo * Webservice QueryTable and apply them to this configuration object */ -public class ShardQueryConfiguration extends GenericQueryConfiguration implements Serializable { +public class ShardQueryConfiguration extends GenericQueryConfiguration implements Serializable, CheckpointableQueryConfiguration { public static final String PARAM_VALUE_SEP_STR = new String(new char[] {Constants.PARAM_VALUE_SEP}); public static final String TABLE_NAME_SOURCE = "tableName"; @@ -81,11 +84,11 @@ public class ShardQueryConfiguration extends GenericQueryConfiguration implement @SuppressWarnings("unused") private static final long serialVersionUID = -4354990715046146110L; private static final Logger log = Logger.getLogger(ShardQueryConfiguration.class); + // is this a tld query, explicitly default to false private boolean tldQuery = false; private Map filterOptions = new HashMap<>(); private boolean disableIndexOnlyDocuments = false; - @JsonIgnore private transient QueryStopwatch timers = new QueryStopwatch(); private int maxScannerBatchSize = 1000; /** @@ -205,7 +208,6 @@ public class ShardQueryConfiguration extends GenericQueryConfiguration implement private Integer maxDocScanTimeout = -1; // A counter used to uniquely identify FSTs generated in the // PushdownLargeFieldedListsVisitor - @JsonIgnore private transient AtomicInteger fstCount = new AtomicInteger(0); // the percent shards marked when querying the date index after which the // shards are collapsed down to the entire day. @@ -215,13 +217,14 @@ public class ShardQueryConfiguration extends GenericQueryConfiguration implement private List realmSuffixExclusionPatterns = null; // A default normalizer to use private Class> defaultType = NoOpType.class; - private SimpleDateFormat shardDateFormatter = new SimpleDateFormat("yyyyMMdd"); + private String shardDateFormat = "yyyyMMdd"; + private SimpleDateFormat shardDateFormatter = new SimpleDateFormat(this.shardDateFormat); // Enrichment properties private Boolean useEnrichers = false; private List enricherClassNames = null; // Filter properties private Boolean useFilters = false; - private List filterClassNames = null; + private List filterClassNames = Collections.emptyList(); private List indexFilteringClassNames = new ArrayList<>(); // Used for ignoring 'd' and 'tf' column family in `shard` private Set nonEventKeyPrefixes = Sets.newHashSet("d", "tf"); @@ -375,8 +378,6 @@ public class ShardQueryConfiguration extends GenericQueryConfiguration implement // limit expanded terms to only those fields that are defined in the chosen // model. drop others private boolean shouldLimitTermExpansionToModel = false; - private Query query = null; - @JsonIgnore private transient ASTJexlScript queryTree = null; private boolean compressServerSideResults = false; private boolean indexOnlyFilterFunctionsEnabled = false; @@ -436,6 +437,11 @@ public class ShardQueryConfiguration extends GenericQueryConfiguration implement // The class for the excerpt iterator private Class> excerptIterator = TermFrequencyExcerptIterator.class; + /** + * A bloom filter to avoid duplicate results if needed + */ + private BloomFilter bloom = null; + // controls when to issue a seek. disabled by default. private int fiFieldSeek = -1; private int fiNextSeek = -1; @@ -494,7 +500,7 @@ public class ShardQueryConfiguration extends GenericQueryConfiguration implement */ public ShardQueryConfiguration() { super(); - query = new QueryImpl(); + setQuery(new QueryImpl()); } /** @@ -509,6 +515,7 @@ public ShardQueryConfiguration(ShardQueryConfiguration other) { super(other); // ShardQueryConfiguration copy + this.setCheckpointable(other.isCheckpointable()); this.setTldQuery(other.isTldQuery()); this.putFilterOptions(other.getFilterOptions()); this.setDisableIndexOnlyDocuments(other.isDisableIndexOnlyDocuments()); @@ -572,8 +579,8 @@ public ShardQueryConfiguration(ShardQueryConfiguration other) { this.setRealmSuffixExclusionPatterns( null == other.getRealmSuffixExclusionPatterns() ? null : Lists.newArrayList(other.getRealmSuffixExclusionPatterns())); this.setDefaultType(other.getDefaultType()); - this.setShardDateFormatter(null == other.getShardDateFormatter() ? null : new SimpleDateFormat(other.getShardDateFormatter().toPattern())); // TODO -- - // deep copy + this.setShardDateFormat(other.getShardDateFormat()); + this.setShardDateFormatter(new SimpleDateFormat(this.getShardDateFormat())); this.setUseEnrichers(other.getUseEnrichers()); this.setEnricherClassNames(null == other.getEnricherClassNames() ? null : Lists.newArrayList(other.getEnricherClassNames())); this.setUseFilters(other.getUseFilters()); @@ -581,7 +588,8 @@ public ShardQueryConfiguration(ShardQueryConfiguration other) { this.setIndexFilteringClassNames(null == other.getIndexFilteringClassNames() ? null : Lists.newArrayList(other.getIndexFilteringClassNames())); this.setNonEventKeyPrefixes(null == other.getNonEventKeyPrefixes() ? null : Sets.newHashSet(other.getNonEventKeyPrefixes())); this.setUnevaluatedFields(null == other.getUnevaluatedFields() ? null : Sets.newHashSet(other.getUnevaluatedFields())); - this.setDatatypeFilter(null == other.getDatatypeFilter() ? null : Sets.newHashSet(other.getDatatypeFilter())); + this.setDatatypeFilter(null == other.getDatatypeFilter() ? null + : (other.getDatatypeFilter() instanceof UniversalSet) ? UniversalSet.instance() : Sets.newHashSet(other.getDatatypeFilter())); this.setIndexHoles(null == other.getIndexHoles() ? null : Lists.newArrayList(other.getIndexHoles())); this.setProjectFields(null == other.getProjectFields() ? null : Sets.newHashSet(other.getProjectFields())); this.setRenameFields(null == other.getRenameFields() ? null : Sets.newHashSet(other.getRenameFields())); @@ -668,7 +676,7 @@ public ShardQueryConfiguration(ShardQueryConfiguration other) { this.setQueryModel(null == other.getQueryModel() ? null : new QueryModel(other.getQueryModel())); this.setModelName(other.getModelName()); this.setModelTableName(other.getModelTableName()); - this.setLimitTermExpansionToModel(other.isExpansionLimitedToModelContents()); + this.setLimitTermExpansionToModel(other.isLimitTermExpansionToModel()); this.setQuery(null == other.getQuery() ? null : other.getQuery().duplicate(other.getQuery().getQueryName())); this.setQueryTree(null == other.getQueryTree() ? null : (ASTJexlScript) RebuildingVisitor.copy(other.getQueryTree())); this.setCompressServerSideResults(other.isCompressServerSideResults()); @@ -686,6 +694,7 @@ public ShardQueryConfiguration(ShardQueryConfiguration other) { this.setActiveQueryLogNameSource(other.getActiveQueryLogNameSource()); this.setEnforceUniqueConjunctionsWithinExpression(other.getEnforceUniqueConjunctionsWithinExpression()); this.setEnforceUniqueDisjunctionsWithinExpression(other.getEnforceUniqueDisjunctionsWithinExpression()); + this.setBloom(other.getBloom()); this.setDisableWhindexFieldMappings(other.isDisableWhindexFieldMappings()); this.setWhindexMappingFields(other.getWhindexMappingFields()); this.setWhindexFieldMappings(other.getWhindexFieldMappings()); @@ -713,6 +722,64 @@ public ShardQueryConfiguration(ShardQueryConfiguration other) { this.setSortQueryByCounts(other.isSortQueryByCounts()); } + /** + * This constructor is used when we are creating a checkpoint for a set of ranges (i.e. QueryData objects). All configuration required for post planning + * needs to be copied over here. + * + * @param other + * @param queries + */ + public ShardQueryConfiguration(ShardQueryConfiguration other, Collection queries) { + super(other); + + this.setQueries(queries); + + // do not preserve the original queries iter. getQueriesIter will create a new + // iterator based off of the queries collection if queriesIter is null + this.setQueriesIter(null); + + this.setShardTableName(other.getShardTableName()); + this.setModelTableName(other.getModelTableName()); + this.setModelName(other.getModelName()); + this.setMetadataTableName(other.getMetadataTableName()); + + this.setSpeculativeScanning(other.getSpeculativeScanning()); + this.setMaxDocScanTimeout(other.getMaxDocScanTimeout()); + this.setBackoffEnabled(other.getBackoffEnabled()); + + this.setHdfsSiteConfigURLs(other.getHdfsSiteConfigURLs()); + this.setHdfsFileCompressionCodec(other.getHdfsFileCompressionCodec()); + this.setIvaratorCacheDirConfigs(null == other.getIvaratorCacheDirConfigs() ? null : Lists.newArrayList(other.getIvaratorCacheDirConfigs())); + this.setIvaratorFstHdfsBaseURIs(other.getIvaratorFstHdfsBaseURIs()); + + this.setCleanupShardsAndDaysQueryHints(other.isCleanupShardsAndDaysQueryHints()); + this.setBypassExecutabilityCheck(other.isBypassExecutabilityCheck()); + this.setFullTableScanEnabled(other.getFullTableScanEnabled()); + this.setSerializeQueryIterator(other.getSerializeQueryIterator()); + this.setDatatypeFilter(other.getDatatypeFilter()); + + this.setMaxOrExpansionFstThreshold(other.getMaxOrExpansionFstThreshold()); + this.setMaxOrExpansionThreshold(other.getMaxOrExpansionThreshold()); + this.setMaxOrRangeIvarators(other.getMaxOrRangeIvarators()); + this.setMaxOrRangeThreshold(other.getMaxOrRangeThreshold()); + this.setInitialMaxTermThreshold(other.getInitialMaxTermThreshold()); + this.setIntermediateMaxTermThreshold(other.getIntermediateMaxTermThreshold()); + this.setFinalMaxTermThreshold(other.getFinalMaxTermThreshold()); + this.setMaxDepthThreshold(other.getMaxDepthThreshold()); + this.setMaxRangesPerRangeIvarator(other.getMaxRangesPerRangeIvarator()); + this.setFstCount(other.getFstCount()); + + this.setIndexedFields(null == other.getIndexedFields() ? null : Sets.newHashSet(other.getIndexedFields())); + + this.setSortedUIDs(other.isSortedUIDs()); + this.setBloom(other.getBloom()); + } + + @Override + public ShardQueryConfiguration checkpoint() { + return new ShardQueryConfiguration(this, getQueries()); + } + /** * Delegates deep copy work to appropriate constructor, sets additional values specific to the provided ShardQueryLogic * @@ -917,6 +984,14 @@ public void setFullTableScanEnabled(Boolean fullTableScanEnabled) { this.fullTableScanEnabled = fullTableScanEnabled; } + public String getShardDateFormat() { + return shardDateFormat; + } + + public void setShardDateFormat(String shardDateFormat) { + this.shardDateFormat = shardDateFormat; + } + public SimpleDateFormat getShardDateFormatter() { return shardDateFormatter; } @@ -933,7 +1008,6 @@ public void setDatatypeFilter(Set typeFilter) { this.datatypeFilter = typeFilter; } - @JsonIgnore public String getDatatypeFilterAsString() { return StringUtils.join(this.getDatatypeFilter(), Constants.PARAM_VALUE_SEP); } @@ -950,7 +1024,6 @@ public void setProjectFields(Set projectFields) { this.projectFields = deconstruct(projectFields); } - @JsonIgnore public String getProjectFieldsAsString() { return StringUtils.join(this.getProjectFields(), Constants.PARAM_VALUE_SEP); } @@ -991,10 +1064,6 @@ public void setEnricherClassNames(List enricherClassNames) { this.enricherClassNames = enricherClassNames; } - public String getEnricherClassNamesAsString() { - return StringUtils.join(this.getEnricherClassNames(), Constants.PARAM_VALUE_SEP); - } - public boolean isTldQuery() { return tldQuery; } @@ -1135,15 +1204,15 @@ public void putFilterOptions(final Map options) { } } - public void setFilterOptions(Map options) { - filterOptions.clear(); - putFilterOptions(options); - } - public Map getFilterOptions() { return Collections.unmodifiableMap(filterOptions); } + public void setFilterOptions(Map options) { + this.filterOptions.clear(); + this.filterOptions.putAll(options); + } + public List getFilterClassNames() { return filterClassNames; } @@ -1177,10 +1246,6 @@ public void setIndexFilteringClassNames(List classNames) { this.indexFilteringClassNames = new ArrayList<>((classNames != null ? classNames : Collections.EMPTY_LIST)); } - public String getFilterClassNamesAsString() { - return StringUtils.join(this.getFilterClassNames(), Constants.PARAM_VALUE_SEP); - } - public Class> getDefaultType() { return defaultType; } @@ -1210,7 +1275,6 @@ public void setNonEventKeyPrefixes(Collection nonEventKeyPrefixes) { } } - @JsonIgnore public String getNonEventKeyPrefixesAsString() { return StringUtils.join(this.getNonEventKeyPrefixes(), Constants.PARAM_VALUE_SEP); } @@ -1227,19 +1291,6 @@ public void setUnevaluatedFields(Collection unevaluatedFields) { } } - /** - * Join unevaluated fields together on comma - * - * @return the unevaluated fields string - */ - public String getUnevaluatedFieldsAsString() { - return StringUtils.join(this.unevaluatedFields, Constants.PARAM_VALUE_SEP); - } - - public void setUnevaluatedFields(String unevaluatedFieldList) { - this.setUnevaluatedFields(Arrays.asList(unevaluatedFieldList.split(PARAM_VALUE_SEP_STR))); - } - public int getEventPerDayThreshold() { return eventPerDayThreshold; } @@ -1559,7 +1610,6 @@ public void setExpandAllTerms(boolean expandAllTerms) { * * @return FIELDNAME1:normalizer.class;FIELDNAME2:normalizer.class; */ - @JsonIgnore public String getIndexedFieldDataTypesAsString() { if (null == this.getIndexedFields() || this.getIndexedFields().isEmpty()) { @@ -1578,7 +1628,6 @@ public String getIndexedFieldDataTypesAsString() { return sb.toString(); } - @JsonIgnore public String getNormalizedFieldNormalizersAsString() { if (null == this.getNormalizedFields() || this.getNormalizedFields().isEmpty()) { @@ -1710,7 +1759,6 @@ public void setLimitFields(Set limitFields) { this.limitFields = deconstruct(limitFields); } - @JsonIgnore public String getLimitFieldsAsString() { return StringUtils.join(this.getLimitFields(), Constants.PARAM_VALUE_SEP); } @@ -1723,7 +1771,6 @@ public void setMatchingFieldSets(Set matchingFieldSets) { this.matchingFieldSets = matchingFieldSets; } - @JsonIgnore public String getMatchingFieldSetsAsString() { return StringUtils.join(this.getMatchingFieldSets(), Constants.PARAM_VALUE_SEP); } @@ -1784,7 +1831,6 @@ public void setGroupFieldsBatchSize(int groupFieldsBatchSize) { this.groupFieldsBatchSize = groupFieldsBatchSize; } - @JsonIgnore public String getGroupFieldsBatchSizeAsString() { return "" + groupFieldsBatchSize; } @@ -2013,14 +2059,6 @@ public QueryStopwatch getTimers() { return timers; } - public Query getQuery() { - return query; - } - - public void setQuery(Query query) { - this.query = query; - } - public ASTJexlScript getQueryTree() { return queryTree; } @@ -2109,11 +2147,7 @@ public void setLimitTermExpansionToModel(boolean shouldLimitTermExpansionToModel this.shouldLimitTermExpansionToModel = shouldLimitTermExpansionToModel; } - public void setExpansionLimitedToModelContents(boolean shouldLimitTermExpansionToModel) { - this.shouldLimitTermExpansionToModel = shouldLimitTermExpansionToModel; - } - - public boolean isExpansionLimitedToModelContents() { + public boolean isLimitTermExpansionToModel() { return shouldLimitTermExpansionToModel; } @@ -2421,7 +2455,6 @@ public void setActiveQueryLogNameSource(String activeQueryLogNameSource) { * * @return the custom active query name to use, or a blank value if the default active query log should be used */ - @JsonIgnore public String getActiveQueryLogName() { if (activeQueryLogNameSource == null) { return ""; @@ -2484,6 +2517,14 @@ public void setEnforceUniqueDisjunctionsWithinExpression(boolean enforceUniqueDi this.enforceUniqueDisjunctionsWithinExpression = enforceUniqueDisjunctionsWithinExpression; } + public BloomFilter getBloom() { + return bloom; + } + + public void setBloom(BloomFilter bloom) { + this.bloom = bloom; + } + public Set getNoExpansionFields() { return this.noExpansionFields; } @@ -2698,4 +2739,427 @@ public boolean isSortQueryByCounts() { public void setSortQueryByCounts(boolean sortQueryByCounts) { this.sortQueryByCounts = sortQueryByCounts; } + + @Override + public boolean equals(Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + if (!super.equals(o)) + return false; + // @formatter:off + ShardQueryConfiguration that = (ShardQueryConfiguration) o; + return isTldQuery() == that.isTldQuery() && + isDisableIndexOnlyDocuments() == that.isDisableIndexOnlyDocuments() && + getMaxScannerBatchSize() == that.getMaxScannerBatchSize() && + getMaxIndexBatchSize() == that.getMaxIndexBatchSize() && + isAllTermsIndexOnly() == that.isAllTermsIndexOnly() && + getMaxIndexScanTimeMillis() == that.getMaxIndexScanTimeMillis() && + getParseTldUids() == that.getParseTldUids() && + getCollapseUids() == that.getCollapseUids() && + getCollapseUidsThreshold() == that.getCollapseUidsThreshold() && + getEnforceUniqueTermsWithinExpressions() == that.getEnforceUniqueTermsWithinExpressions() && + getReduceIngestTypes() == that.getReduceIngestTypes() && + getReduceIngestTypesPerShard() == that.getReduceIngestTypesPerShard() && + getPruneQueryByIngestTypes() == that.getPruneQueryByIngestTypes() && + getReduceQueryFields() == that.getReduceQueryFields() && + getReduceQueryFieldsPerShard() == that.getReduceQueryFieldsPerShard() && + getReduceTypeMetadata() == that.getReduceTypeMetadata() && + getReduceTypeMetadataPerShard() == that.getReduceTypeMetadataPerShard() && + isRebuildDatatypeFilter() == that.isRebuildDatatypeFilter() && + isRebuildDatatypeFilterPerShard() == that.isRebuildDatatypeFilterPerShard() && + getSequentialScheduler() == that.getSequentialScheduler() && + getCollectTimingDetails() == that.getCollectTimingDetails() && + getLogTimingDetails() == that.getLogTimingDetails() && + getSendTimingToStatsd() == that.getSendTimingToStatsd() && + getStatsdPort() == that.getStatsdPort() && + getStatsdMaxQueueSize() == that.getStatsdMaxQueueSize() && + getLimitAnyFieldLookups() == that.getLimitAnyFieldLookups() && + isBypassExecutabilityCheck() == that.isBypassExecutabilityCheck() && + isGeneratePlanOnly() == that.isGeneratePlanOnly() && + getBackoffEnabled() == that.getBackoffEnabled() && + getUnsortedUIDsEnabled() == that.getUnsortedUIDsEnabled() && + getSerializeQueryIterator() == that.getSerializeQueryIterator() && + isDebugMultithreadedSources() == that.isDebugMultithreadedSources() && + isSortGeoWaveQueryRanges() == that.isSortGeoWaveQueryRanges() && + getNumRangesToBuffer() == that.getNumRangesToBuffer() && + getRangeBufferTimeoutMillis() == that.getRangeBufferTimeoutMillis() && + getRangeBufferPollMillis() == that.getRangeBufferPollMillis() && + getGeometryMaxExpansion() == that.getGeometryMaxExpansion() && + getPointMaxExpansion() == that.getPointMaxExpansion() && + getGeoMaxExpansion() == that.getGeoMaxExpansion() && + getGeoWaveRangeSplitThreshold() == that.getGeoWaveRangeSplitThreshold() && + Double.compare(that.getGeoWaveMaxRangeOverlap(), getGeoWaveMaxRangeOverlap()) == 0 && + isOptimizeGeoWaveRanges() == that.isOptimizeGeoWaveRanges() && + getGeoWaveMaxEnvelopes() == that.getGeoWaveMaxEnvelopes() && + isCleanupShardsAndDaysQueryHints() == that.isCleanupShardsAndDaysQueryHints() && + Float.compare(that.getCollapseDatePercentThreshold(), getCollapseDatePercentThreshold()) == 0 && + isSortedUIDs() == that.isSortedUIDs() && + isTermFrequenciesRequired() == that.isTermFrequenciesRequired() && + isLimitFieldsPreQueryEvaluation() == that.isLimitFieldsPreQueryEvaluation() && + isHitList() == that.isHitList() && + isDateIndexTimeTravel() == that.isDateIndexTimeTravel() && + getIgnoreNonExistentFields() == that.getIgnoreNonExistentFields() && + getBeginDateCap() == that.getBeginDateCap() && + isFailOutsideValidDateRange() == that.isFailOutsideValidDateRange() && + isRawTypes() == that.isRawTypes() && + Double.compare(that.getMinSelectivity(), getMinSelectivity()) == 0 && + getIncludeDataTypeAsField() == that.getIncludeDataTypeAsField() && + getIncludeRecordId() == that.getIncludeRecordId() && + getIncludeHierarchyFields() == that.getIncludeHierarchyFields() && + getIncludeGroupingContext() == that.getIncludeGroupingContext() && + getFilterMaskedValues() == that.getFilterMaskedValues() && + isReducedResponse() == that.isReducedResponse() && + getAllowShortcutEvaluation() == that.getAllowShortcutEvaluation() && + getSpeculativeScanning() == that.getSpeculativeScanning() && + isDisableEvaluation() == that.isDisableEvaluation() && + isContainsIndexOnlyTerms() == that.isContainsIndexOnlyTerms() && + isContainsCompositeTerms() == that.isContainsCompositeTerms() && + isAllowFieldIndexEvaluation() == that.isAllowFieldIndexEvaluation() && + isAllowTermFrequencyLookup() == that.isAllowTermFrequencyLookup() && + isExpandUnfieldedNegations() == that.isExpandUnfieldedNegations() && + getEventPerDayThreshold() == that.getEventPerDayThreshold() && + getShardsPerDayThreshold() == that.getShardsPerDayThreshold() && + getInitialMaxTermThreshold() == that.getInitialMaxTermThreshold() && + getIntermediateMaxTermThreshold() == that.getIntermediateMaxTermThreshold() && + getIndexedMaxTermThreshold() == that.getIndexedMaxTermThreshold() && + getFinalMaxTermThreshold() == that.getFinalMaxTermThreshold() && + getMaxDepthThreshold() == that.getMaxDepthThreshold() && + isExpandFields() == that.isExpandFields() && + getMaxUnfieldedExpansionThreshold() == that.getMaxUnfieldedExpansionThreshold() && + isExpandValues() == that.isExpandValues() && + getMaxValueExpansionThreshold() == that.getMaxValueExpansionThreshold() && + getMaxOrExpansionThreshold() == that.getMaxOrExpansionThreshold() && + getMaxOrRangeThreshold() == that.getMaxOrRangeThreshold() && + getMaxOrRangeIvarators() == that.getMaxOrRangeIvarators() && + getMaxRangesPerRangeIvarator() == that.getMaxRangesPerRangeIvarator() && + getMaxOrExpansionFstThreshold() == that.getMaxOrExpansionFstThreshold() && + getYieldThresholdMs() == that.getYieldThresholdMs() && + getIvaratorCacheBufferSize() == that.getIvaratorCacheBufferSize() && + getIvaratorCacheScanPersistThreshold() == that.getIvaratorCacheScanPersistThreshold() && + getIvaratorCacheScanTimeout() == that.getIvaratorCacheScanTimeout() && + getMaxFieldIndexRangeSplit() == that.getMaxFieldIndexRangeSplit() && + getIvaratorMaxOpenFiles() == that.getIvaratorMaxOpenFiles() && + getIvaratorNumRetries() == that.getIvaratorNumRetries() && + isIvaratorPersistVerify() == that.isIvaratorPersistVerify() && + getIvaratorPersistVerifyCount() == that.getIvaratorPersistVerifyCount() && + getMaxIvaratorSources() == that.getMaxIvaratorSources() && + getMaxIvaratorSourceWait() == that.getMaxIvaratorSourceWait() && + getMaxIvaratorResults() == that.getMaxIvaratorResults() && + getMaxIvaratorTerms() == that.getMaxIvaratorTerms() && + getMaxEvaluationPipelines() == that.getMaxEvaluationPipelines() && + getMaxPipelineCachedResults() == that.getMaxPipelineCachedResults() && + isExpandAllTerms() == that.isExpandAllTerms() && + shouldLimitTermExpansionToModel == that.shouldLimitTermExpansionToModel && + isCompressServerSideResults() == that.isCompressServerSideResults() && + isIndexOnlyFilterFunctionsEnabled() == that.isIndexOnlyFilterFunctionsEnabled() && + isCompositeFilterFunctionsEnabled() == that.isCompositeFilterFunctionsEnabled() && + getGroupFieldsBatchSize() == that.getGroupFieldsBatchSize() && + getAccrueStats() == that.getAccrueStats() && + Objects.equals(getUniqueFields(), that.getUniqueFields()) && + getUniqueCacheBufferSize() == that.getUniqueCacheBufferSize() && + getCacheModel() == that.getCacheModel() && + isTrackSizes() == that.isTrackSizes() && + getEnforceUniqueConjunctionsWithinExpression() == that.getEnforceUniqueConjunctionsWithinExpression() && + getEnforceUniqueDisjunctionsWithinExpression() == that.getEnforceUniqueDisjunctionsWithinExpression() && + Objects.equals(getFilterOptions(), that.getFilterOptions()) && + Objects.equals(getAccumuloPassword(), that.getAccumuloPassword()) && + Objects.equals(getStatsdHost(), that.getStatsdHost()) && + Objects.equals(getShardTableName(), that.getShardTableName()) && + Objects.equals(getIndexTableName(), that.getIndexTableName()) && + Objects.equals(getReverseIndexTableName(), that.getReverseIndexTableName()) && + Objects.equals(getMetadataTableName(), that.getMetadataTableName()) && + Objects.equals(getDateIndexTableName(), that.getDateIndexTableName()) && + Objects.equals(getIndexStatsTableName(), that.getIndexStatsTableName()) && + Objects.equals(getDefaultDateTypeName(), that.getDefaultDateTypeName()) && + Objects.equals(getNumQueryThreads(), that.getNumQueryThreads()) && + Objects.equals(numLookupThreads, that.numLookupThreads) && + Objects.equals(getNumDateIndexThreads(), that.getNumDateIndexThreads()) && + Objects.equals(getMaxDocScanTimeout(), that.getMaxDocScanTimeout()) && + Objects.equals(getFullTableScanEnabled(), that.getFullTableScanEnabled()) && + Objects.equals(getRealmSuffixExclusionPatterns(), that.getRealmSuffixExclusionPatterns()) && + Objects.equals(getDefaultType(), that.getDefaultType()) && + Objects.equals(getShardDateFormat(), that.getShardDateFormat()) && + Objects.equals(getUseEnrichers(), that.getUseEnrichers()) && + Objects.equals(getEnricherClassNames(), that.getEnricherClassNames()) && + Objects.equals(getUseFilters(), that.getUseFilters()) && + Objects.equals(getFilterClassNames(), that.getFilterClassNames()) && + Objects.equals(getIndexFilteringClassNames(), that.getIndexFilteringClassNames()) && + Objects.equals(getNonEventKeyPrefixes(), that.getNonEventKeyPrefixes()) && + Objects.equals(getUnevaluatedFields(), that.getUnevaluatedFields()) && + Objects.equals(getDatatypeFilter(), that.getDatatypeFilter()) && + Objects.equals(getIndexHoles(), that.getIndexHoles()) && + Objects.equals(getProjectFields(), that.getProjectFields()) && + Objects.equals(getRenameFields(), that.getRenameFields()) && + Objects.equals(getDisallowlistedFields(), that.getDisallowlistedFields()) && + Objects.equals(getIndexedFields(), that.getIndexedFields()) && + Objects.equals(getReverseIndexedFields(), that.getReverseIndexedFields()) && + Objects.equals(getNormalizedFields(), that.getNormalizedFields()) && + Objects.equals(getDataTypes(), that.getDataTypes()) && + Objects.equals(getQueryFieldsDatatypes(), that.getQueryFieldsDatatypes()) && + Objects.equals(getNormalizedFieldsDatatypes(), that.getNormalizedFieldsDatatypes()) && + Objects.equals(getFieldToDiscreteIndexTypes(), that.getFieldToDiscreteIndexTypes()) && + Objects.equals(getCompositeToFieldMap(), that.getCompositeToFieldMap()) && + Objects.equals(getCompositeTransitionDates(), that.getCompositeTransitionDates()) && + Objects.equals(getCompositeFieldSeparators(), that.getCompositeFieldSeparators()) && + Objects.equals(getWhindexCreationDates(), that.getWhindexCreationDates()) && + isDisableWhindexFieldMappings() == that.isDisableWhindexFieldMappings() && + Objects.equals(getWhindexMappingFields(), that.getWhindexMappingFields()) && + Objects.equals(getWhindexFieldMappings(), that.getWhindexFieldMappings()) && + Objects.equals(getEvaluationOnlyFields(), that.getEvaluationOnlyFields()) && + Objects.equals(getQueryTermFrequencyFields(), that.getQueryTermFrequencyFields()) && + Objects.equals(getLimitFields(), that.getLimitFields()) && + Objects.equals(getMatchingFieldSets(), that.getMatchingFieldSets()) && + Objects.equals(getLimitFieldsField(), that.getLimitFieldsField()) && + Objects.equals(getHierarchyFieldOptions(), that.getHierarchyFieldOptions()) && + Objects.equals(getDocumentPermutations(), that.getDocumentPermutations()) && + getReturnType() == that.getReturnType() && + Objects.equals(getHdfsSiteConfigURLs(), that.getHdfsSiteConfigURLs()) && + Objects.equals(getHdfsFileCompressionCodec(), that.getHdfsFileCompressionCodec()) && + Objects.equals(getZookeeperConfig(), that.getZookeeperConfig()) && + Objects.equals(getIvaratorCacheDirConfigs(), that.getIvaratorCacheDirConfigs()) && + Objects.equals(getIvaratorFstHdfsBaseURIs(), that.getIvaratorFstHdfsBaseURIs()) && + Objects.equals(getQueryModel(), that.getQueryModel()) && + Objects.equals(getModelName(), that.getModelName()) && + Objects.equals(getModelTableName(), that.getModelTableName()) && + Objects.equals(getGroupFields(), that.getGroupFields()) && + Objects.equals(getUniqueFields(), that.getUniqueFields()) && + Objects.equals(getContentFieldNames(), that.getContentFieldNames()) && + Objects.equals(getActiveQueryLogNameSource(), that.getActiveQueryLogNameSource()) && + Objects.equals(getBloom(), that.getBloom()) && + Objects.equals(getNoExpansionFields(), that.getNoExpansionFields()) && + Objects.equals(getLenientFields(), that.getLenientFields()) && + Objects.equals(getStrictFields(), that.getStrictFields()) && + Objects.equals(getExcerptFields(), that.getExcerptFields()) && + getFiFieldSeek() == that.getFiFieldSeek() && + getFiNextSeek() == that.getFiNextSeek() && + getEventFieldSeek() == that.getEventFieldSeek() && + getEventNextSeek() == that.getEventNextSeek() && + getTfFieldSeek() == that.getTfFieldSeek() && + getTfNextSeek() == that.getTfNextSeek() && + getVisitorFunctionMaxWeight() == that.getVisitorFunctionMaxWeight() && + getQueryExecutionForPageTimeout() == that.getQueryExecutionForPageTimeout() && + isLazySetMechanismEnabled() == that.isLazySetMechanismEnabled() && + getDocAggregationThresholdMs() == that.getDocAggregationThresholdMs() && + getTfAggregationThresholdMs() == that.getTfAggregationThresholdMs() && + getPruneQueryOptions() == that.getPruneQueryOptions() && + getUseFieldCounts() == that.getUseFieldCounts() && + getUseTermCounts() == that.getUseTermCounts() && + isSortQueryBeforeGlobalIndex() == that.isSortQueryBeforeGlobalIndex() && + isSortQueryByCounts() == that.isSortQueryByCounts(); + // @formatter:on + } + + @Override + public int hashCode() { + // @formatter:off + return Objects.hash( + super.hashCode(), + isTldQuery(), + getFilterOptions(), + isDisableIndexOnlyDocuments(), + getMaxScannerBatchSize(), + getMaxIndexBatchSize(), + isAllTermsIndexOnly(), + getAccumuloPassword(), + getMaxIndexScanTimeMillis(), + getParseTldUids(), + getCollapseUids(), + getCollapseUidsThreshold(), + getEnforceUniqueTermsWithinExpressions(), + getReduceIngestTypes(), + getReduceIngestTypesPerShard(), + getPruneQueryByIngestTypes(), + getReduceQueryFields(), + getReduceQueryFieldsPerShard(), + getReduceTypeMetadata(), + getReduceTypeMetadataPerShard(), + isRebuildDatatypeFilter(), + isRebuildDatatypeFilterPerShard(), + getSequentialScheduler(), + getCollectTimingDetails(), + getLogTimingDetails(), + getSendTimingToStatsd(), + getStatsdHost(), + getStatsdPort(), + getStatsdMaxQueueSize(), + getLimitAnyFieldLookups(), + isBypassExecutabilityCheck(), + isGeneratePlanOnly(), + getBackoffEnabled(), + getUnsortedUIDsEnabled(), + getSerializeQueryIterator(), + isDebugMultithreadedSources(), + isSortGeoWaveQueryRanges(), + getNumRangesToBuffer(), + getRangeBufferTimeoutMillis(), + getRangeBufferPollMillis(), + getGeometryMaxExpansion(), + getPointMaxExpansion(), + getGeoMaxExpansion(), + getGeoWaveRangeSplitThreshold(), + getGeoWaveMaxRangeOverlap(), + isOptimizeGeoWaveRanges(), + getGeoWaveMaxEnvelopes(), + getShardTableName(), + getIndexTableName(), + getReverseIndexTableName(), + getMetadataTableName(), + getDateIndexTableName(), + getIndexStatsTableName(), + getDefaultDateTypeName(), + isCleanupShardsAndDaysQueryHints(), + getNumQueryThreads(), + numLookupThreads, + getNumDateIndexThreads(), + getMaxDocScanTimeout(), + getCollapseDatePercentThreshold(), + getFullTableScanEnabled(), + getRealmSuffixExclusionPatterns(), + getDefaultType(), + getShardDateFormat(), + getUseEnrichers(), + getEnricherClassNames(), + getUseFilters(), + getFilterClassNames(), + getIndexFilteringClassNames(), + getNonEventKeyPrefixes(), + getUnevaluatedFields(), + getDatatypeFilter(), + getIndexHoles(), + getProjectFields(), + getRenameFields(), + getDisallowlistedFields(), + getIndexedFields(), + getReverseIndexedFields(), + getNormalizedFields(), + getDataTypes(), + getQueryFieldsDatatypes(), + getNormalizedFieldsDatatypes(), + getFieldToDiscreteIndexTypes(), + getCompositeToFieldMap(), + getCompositeTransitionDates(), + getCompositeFieldSeparators(), + getWhindexCreationDates(), + isDisableWhindexFieldMappings(), + getWhindexMappingFields(), + getWhindexFieldMappings(), + getEvaluationOnlyFields(), + isSortedUIDs(), + getQueryTermFrequencyFields(), + isTermFrequenciesRequired(), + getLimitFields(), + getMatchingFieldSets(), + isLimitFieldsPreQueryEvaluation(), + getLimitFieldsField(), + isHitList(), + isDateIndexTimeTravel(), + getIgnoreNonExistentFields(), + getBeginDateCap(), + isFailOutsideValidDateRange(), + isRawTypes(), + getMinSelectivity(), + getIncludeDataTypeAsField(), + getIncludeRecordId(), + getIncludeHierarchyFields(), + getHierarchyFieldOptions(), + getIncludeGroupingContext(), + getDocumentPermutations(), + getFilterMaskedValues(), + isReducedResponse(), + getAllowShortcutEvaluation(), + getSpeculativeScanning(), + isDisableEvaluation(), + isContainsIndexOnlyTerms(), + isContainsCompositeTerms(), + isAllowFieldIndexEvaluation(), + isAllowTermFrequencyLookup(), + isExpandUnfieldedNegations(), + getReturnType(), + getEventPerDayThreshold(), + getShardsPerDayThreshold(), + getInitialMaxTermThreshold(), + getIntermediateMaxTermThreshold(), + getIndexedMaxTermThreshold(), + getFinalMaxTermThreshold(), + getMaxDepthThreshold(), + isExpandFields(), + getMaxUnfieldedExpansionThreshold(), + isExpandValues(), + getMaxValueExpansionThreshold(), + getMaxOrExpansionThreshold(), + getMaxOrRangeThreshold(), + getMaxOrRangeIvarators(), + getMaxRangesPerRangeIvarator(), + getMaxOrExpansionFstThreshold(), + getYieldThresholdMs(), + getHdfsSiteConfigURLs(), + getHdfsFileCompressionCodec(), + getZookeeperConfig(), + getIvaratorCacheDirConfigs(), + getIvaratorFstHdfsBaseURIs(), + getIvaratorCacheBufferSize(), + getIvaratorCacheScanPersistThreshold(), + getIvaratorCacheScanTimeout(), + getMaxFieldIndexRangeSplit(), + getIvaratorMaxOpenFiles(), + getIvaratorNumRetries(), + isIvaratorPersistVerify(), + getIvaratorPersistVerifyCount(), + getMaxIvaratorSources(), + getMaxIvaratorSourceWait(), + getMaxIvaratorResults(), + getMaxIvaratorTerms(), + getMaxEvaluationPipelines(), + getMaxPipelineCachedResults(), + isExpandAllTerms(), + getQueryModel(), + getModelName(), + getModelTableName(), + shouldLimitTermExpansionToModel, + isCompressServerSideResults(), + isIndexOnlyFilterFunctionsEnabled(), + isCompositeFilterFunctionsEnabled(), + getGroupFieldsBatchSize(), + getAccrueStats(), + getGroupFields(), + getUniqueFields(), + getUniqueCacheBufferSize(), + getCacheModel(), + isTrackSizes(), + getContentFieldNames(), + getActiveQueryLogNameSource(), + getEnforceUniqueConjunctionsWithinExpression(), + getEnforceUniqueDisjunctionsWithinExpression(), + getNoExpansionFields(), + getBloom(), + getLenientFields(), + getStrictFields(), + getExcerptFields(), + getFiFieldSeek(), + getFiNextSeek(), + getEventFieldSeek(), + getEventNextSeek(), + getTfFieldSeek(), + getTfNextSeek(), + getVisitorFunctionMaxWeight(), + getQueryExecutionForPageTimeout(), + isLazySetMechanismEnabled(), + getDocAggregationThresholdMs(), + getTfAggregationThresholdMs(), + getPruneQueryOptions(), + getUseFieldCounts(), + getUseTermCounts(), + isSortQueryBeforeGlobalIndex(), + isSortQueryByCounts()); + // @formatter:on + } + + // Part of the Serializable interface used to initialize any transient members during deserialization + protected Object readResolve() throws ObjectStreamException { + this.timers = new QueryStopwatch(); + this.fstCount = new AtomicInteger(0); + return this; + } } diff --git a/warehouse/query-core/src/main/java/datawave/query/config/TermFrequencyQueryConfiguration.java b/warehouse/query-core/src/main/java/datawave/query/config/TermFrequencyQueryConfiguration.java index d238fe33ca3..892f947ed89 100644 --- a/warehouse/query-core/src/main/java/datawave/query/config/TermFrequencyQueryConfiguration.java +++ b/warehouse/query-core/src/main/java/datawave/query/config/TermFrequencyQueryConfiguration.java @@ -1,17 +1,20 @@ package datawave.query.config; +import java.io.IOException; +import java.io.Serializable; +import java.util.Objects; + import org.apache.accumulo.core.data.Range; -import datawave.webservice.query.Query; -import datawave.webservice.query.configuration.GenericQueryConfiguration; -import datawave.webservice.query.logic.BaseQueryLogic; +import datawave.core.query.configuration.GenericQueryConfiguration; +import datawave.core.query.logic.BaseQueryLogic; +import datawave.microservice.query.Query; -public class TermFrequencyQueryConfiguration extends GenericQueryConfiguration { +public class TermFrequencyQueryConfiguration extends GenericQueryConfiguration implements Serializable { private static final long serialVersionUID = 1L; - private Range range = null; - private Query query; + private transient Range range = null; public TermFrequencyQueryConfiguration(BaseQueryLogic configuredLogic, Query query) { super(configuredLogic); @@ -26,11 +29,37 @@ public void setRange(Range range) { this.range = range; } - public Query getQuery() { - return query; + @Override + public boolean equals(Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + if (!super.equals(o)) + return false; + TermFrequencyQueryConfiguration that = (TermFrequencyQueryConfiguration) o; + return Objects.equals(range, that.range); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), range); + } + + private void writeObject(java.io.ObjectOutputStream out) throws IOException { + out.defaultWriteObject(); + out.writeBoolean(range != null); + if (range != null) { + range.write(out); + } } - public void setQuery(Query query) { - this.query = query; + private void readObject(java.io.ObjectInputStream in) throws IOException, ClassNotFoundException { + in.defaultReadObject(); + if (in.readBoolean()) { + Range range = new Range(); + range.readFields(in); + this.range = range; + } } } diff --git a/warehouse/query-core/src/main/java/datawave/query/dashboard/DashboardQueryLogic.java b/warehouse/query-core/src/main/java/datawave/query/dashboard/DashboardQueryLogic.java index 5f49ae25fed..5a5ee52f3ec 100644 --- a/warehouse/query-core/src/main/java/datawave/query/dashboard/DashboardQueryLogic.java +++ b/warehouse/query-core/src/main/java/datawave/query/dashboard/DashboardQueryLogic.java @@ -6,15 +6,14 @@ import org.apache.commons.collections4.iterators.TransformIterator; +import datawave.core.common.extjs.ExtJsResponse; +import datawave.core.query.cache.ResultsPage; +import datawave.core.query.dashboard.DashboardFields; +import datawave.core.query.dashboard.DashboardSummary; +import datawave.core.query.logic.QueryLogicTransformer; +import datawave.core.query.logic.ResponseEnricher; +import datawave.microservice.query.Query; import datawave.query.tables.ShardQueryLogic; -import datawave.webservice.common.extjs.ExtJsResponse; -import datawave.webservice.query.Query; -import datawave.webservice.query.cache.ResultsPage; -import datawave.webservice.query.dashboard.DashboardFields; -import datawave.webservice.query.dashboard.DashboardSummary; -import datawave.webservice.query.exception.QueryException; -import datawave.webservice.query.logic.QueryLogicTransformer; -import datawave.webservice.query.logic.ResponseEnricher; import datawave.webservice.query.result.event.EventBase; import datawave.webservice.result.BaseQueryResponse; diff --git a/warehouse/query-core/src/main/java/datawave/query/discovery/DiscoveredThing.java b/warehouse/query-core/src/main/java/datawave/query/discovery/DiscoveredThing.java index 5198f57db5a..ec0987fdb88 100644 --- a/warehouse/query-core/src/main/java/datawave/query/discovery/DiscoveredThing.java +++ b/warehouse/query-core/src/main/java/datawave/query/discovery/DiscoveredThing.java @@ -11,7 +11,10 @@ import com.google.common.base.Objects; +import datawave.core.query.configuration.ResultContext; + public class DiscoveredThing implements WritableComparable { + private ResultContext context; private String term, field, type, date, columnVisibility; private final VLongWritable count; private final MapWritable countsByColumnVisibility; diff --git a/warehouse/query-core/src/main/java/datawave/query/discovery/DiscoveryLogic.java b/warehouse/query-core/src/main/java/datawave/query/discovery/DiscoveryLogic.java index 59c2ba05d98..91424b1afb1 100644 --- a/warehouse/query-core/src/main/java/datawave/query/discovery/DiscoveryLogic.java +++ b/warehouse/query-core/src/main/java/datawave/query/discovery/DiscoveryLogic.java @@ -8,6 +8,7 @@ import java.util.Arrays; import java.util.Calendar; import java.util.Collection; +import java.util.Collections; import java.util.Date; import java.util.HashSet; import java.util.Iterator; @@ -21,7 +22,6 @@ import org.apache.accumulo.core.client.AccumuloClient; import org.apache.accumulo.core.client.BatchScanner; import org.apache.accumulo.core.client.IteratorSetting; -import org.apache.accumulo.core.client.ScannerBase; import org.apache.accumulo.core.client.TableNotFoundException; import org.apache.accumulo.core.data.Key; import org.apache.accumulo.core.data.Range; @@ -47,7 +47,10 @@ import com.google.common.collect.Multimap; import com.google.common.collect.Sets; +import datawave.core.query.configuration.GenericQueryConfiguration; +import datawave.core.query.configuration.QueryData; import datawave.data.type.Type; +import datawave.microservice.query.Query; import datawave.query.Constants; import datawave.query.QueryParameters; import datawave.query.discovery.FindLiteralsAndPatternsVisitor.QueryValues; @@ -63,8 +66,6 @@ import datawave.query.tables.ScannerFactory; import datawave.query.tables.ShardIndexQueryTable; import datawave.query.util.MetadataHelper; -import datawave.webservice.query.Query; -import datawave.webservice.query.configuration.GenericQueryConfiguration; import datawave.webservice.query.exception.QueryException; public class DiscoveryLogic extends ShardIndexQueryTable { @@ -74,9 +75,7 @@ public class DiscoveryLogic extends ShardIndexQueryTable { public static final String SEPARATE_COUNTS_BY_COLVIS = "separate.counts.by.colvis"; public static final String SHOW_REFERENCE_COUNT = "show.reference.count"; public static final String REVERSE_INDEX = "reverse.index"; - - private Boolean separateCountsByColVis = false; - private Boolean showReferenceCount = false; + private DiscoveryQueryConfiguration config; private MetadataHelper metadataHelper; public DiscoveryLogic() { @@ -85,15 +84,21 @@ public DiscoveryLogic() { public DiscoveryLogic(DiscoveryLogic other) { super(other); - this.separateCountsByColVis = other.separateCountsByColVis; - this.showReferenceCount = other.showReferenceCount; this.metadataHelper = other.metadataHelper; } @Override - public GenericQueryConfiguration initialize(AccumuloClient client, Query settings, Set auths) throws Exception { - DiscoveryQueryConfiguration config = new DiscoveryQueryConfiguration(this, settings); + public DiscoveryQueryConfiguration getConfig() { + if (this.config == null) { + this.config = DiscoveryQueryConfiguration.create(); + } + return this.config; + } + + @Override + public GenericQueryConfiguration initialize(AccumuloClient client, Query settings, Set auths) throws Exception { + this.config = new DiscoveryQueryConfiguration(this, settings); this.scannerFactory = new ScannerFactory(client); this.metadataHelper = initializeMetadataHelper(client, config.getMetadataTableName(), auths); @@ -109,80 +114,69 @@ public GenericQueryConfiguration initialize(AccumuloClient client, Query setting // Check if the default modelName and modelTableNames have been overriden by custom parameters. if (null != settings.findParameter(QueryParameters.PARAMETER_MODEL_NAME) && !settings.findParameter(QueryParameters.PARAMETER_MODEL_NAME).getParameterValue().trim().isEmpty()) { - modelName = settings.findParameter(QueryParameters.PARAMETER_MODEL_NAME).getParameterValue().trim(); + setModelName(settings.findParameter(QueryParameters.PARAMETER_MODEL_NAME).getParameterValue().trim()); } if (null != settings.findParameter(QueryParameters.PARAMETER_MODEL_TABLE_NAME) && !settings.findParameter(QueryParameters.PARAMETER_MODEL_TABLE_NAME).getParameterValue().trim().isEmpty()) { - modelTableName = settings.findParameter(QueryParameters.PARAMETER_MODEL_TABLE_NAME).getParameterValue().trim(); + setModelTableName(settings.findParameter(QueryParameters.PARAMETER_MODEL_TABLE_NAME).getParameterValue().trim()); } // Check if user would like counts separated by column visibility if (null != settings.findParameter(SEPARATE_COUNTS_BY_COLVIS) && !settings.findParameter(SEPARATE_COUNTS_BY_COLVIS).getParameterValue().trim().isEmpty()) { - separateCountsByColVis = Boolean.valueOf(settings.findParameter(SEPARATE_COUNTS_BY_COLVIS).getParameterValue().trim()); - config.setSeparateCountsByColVis(separateCountsByColVis); + boolean separateCountsByColVis = Boolean.valueOf(settings.findParameter(SEPARATE_COUNTS_BY_COLVIS).getParameterValue().trim()); + getConfig().setSeparateCountsByColVis(separateCountsByColVis); } // Check if user would like to show reference counts instead of term counts if (null != settings.findParameter(SHOW_REFERENCE_COUNT) && !settings.findParameter(SHOW_REFERENCE_COUNT).getParameterValue().trim().isEmpty()) { - showReferenceCount = Boolean.valueOf(settings.findParameter(SHOW_REFERENCE_COUNT).getParameterValue().trim()); - config.setShowReferenceCount(showReferenceCount); + boolean showReferenceCount = Boolean.valueOf(settings.findParameter(SHOW_REFERENCE_COUNT).getParameterValue().trim()); + getConfig().setShowReferenceCount(showReferenceCount); } - - this.queryModel = metadataHelper.getQueryModel(modelTableName, modelName, null); - + setQueryModel(metadataHelper.getQueryModel(getModelTableName(), getModelName(), null)); // get the data type filter set if any if (null != settings.findParameter(QueryParameters.DATATYPE_FILTER_SET) && !settings.findParameter(QueryParameters.DATATYPE_FILTER_SET).getParameterValue().trim().isEmpty()) { Set dataTypeFilter = new HashSet<>(Arrays.asList(StringUtils .split(settings.findParameter(QueryParameters.DATATYPE_FILTER_SET).getParameterValue().trim(), Constants.PARAM_VALUE_SEP))); - config.setDatatypeFilter(dataTypeFilter); + getConfig().setDatatypeFilter(dataTypeFilter); if (log.isDebugEnabled()) { log.debug("Data type filter set to " + dataTypeFilter); } } // Set the connector - config.setClient(client); - + getConfig().setClient(client); // Set the auths - config.setAuthorizations(auths); - - // set the table names - if (getIndexTableName() != null) { - config.setIndexTableName(getIndexTableName()); - } - if (getReverseIndexTableName() != null) { - config.setReverseIndexTableName(getReverseIndexTableName()); - } + getConfig().setAuthorizations(auths); // Get the ranges - config.setBeginDate(settings.getBeginDate()); - config.setEndDate(settings.getEndDate()); + getConfig().setBeginDate(settings.getBeginDate()); + getConfig().setEndDate(settings.getEndDate()); - if (null == config.getBeginDate() || null == config.getEndDate()) { - config.setBeginDate(new Date(0)); - config.setEndDate(new Date(Long.MAX_VALUE)); + if (null == getConfig().getBeginDate() || null == getConfig().getEndDate()) { + getConfig().setBeginDate(new Date(0)); + getConfig().setEndDate(new Date(Long.MAX_VALUE)); log.warn("Dates not specified, using entire date range"); } // start with a trimmed version of the query, converted to JEXL LuceneToJexlQueryParser parser = new LuceneToJexlQueryParser(); - parser.setAllowLeadingWildCard(this.isAllowLeadingWildcard()); + parser.setAllowLeadingWildCard(isAllowLeadingWildcard()); QueryNode node = parser.parse(settings.getQuery().trim()); // TODO: Validate that this is a simple list of terms type of query - config.setQueryString(node.getOriginalQuery()); + getConfig().setQueryString(node.getOriginalQuery()); if (log.isDebugEnabled()) { log.debug("Original Query = " + settings.getQuery().trim()); log.debug("JEXL Query = " + node.getOriginalQuery()); } // Parse & flatten the query - ASTJexlScript script = JexlASTHelper.parseAndFlattenJexlQuery(config.getQueryString()); + ASTJexlScript script = JexlASTHelper.parseAndFlattenJexlQuery(getConfig().getQueryString()); - script = CaseSensitivityVisitor.upperCaseIdentifiers(config, metadataHelper, script); + script = CaseSensitivityVisitor.upperCaseIdentifiers(getConfig(), metadataHelper, script); - Set dataTypes = config.getDatatypeFilter(); + Set dataTypes = getConfig().getDatatypeFilter(); Set allFields; allFields = metadataHelper.getAllFields(dataTypes); script = QueryModelVisitor.applyModel(script, getQueryModel(), allFields); @@ -190,67 +184,99 @@ public GenericQueryConfiguration initialize(AccumuloClient client, Query setting QueryValues literalsAndPatterns = FindLiteralsAndPatternsVisitor.find(script); Stopwatch timer = Stopwatch.createStarted(); // no caching for getAllNormalizers, so try some magic with getFields... - Multimap> dataTypeMap = ArrayListMultimap.create(metadataHelper.getFieldsToDatatypes(config.getDatatypeFilter())); + Multimap> dataTypeMap = ArrayListMultimap.create(metadataHelper.getFieldsToDatatypes(getConfig().getDatatypeFilter())); /* * we have a mapping of FIELD->DataType, but not a mapping of ANYFIELD->DataType which should be all dataTypes */ dataTypeMap.putAll(Constants.ANY_FIELD, uniqueByType(dataTypeMap.values())); timer.stop(); log.debug("Took " + timer.elapsed(TimeUnit.MILLISECONDS) + "ms to get all the dataTypes."); - config.setLiterals(normalize(new LiteralNormalization(), literalsAndPatterns.getLiterals(), dataTypeMap)); - config.setPatterns(normalize(new PatternNormalization(), literalsAndPatterns.getPatterns(), dataTypeMap)); - config.setRanges(normalizeRanges(new LiteralNormalization(), literalsAndPatterns.getRanges(), dataTypeMap)); - + getConfig().setLiterals(normalize(new LiteralNormalization(), literalsAndPatterns.getLiterals(), dataTypeMap)); + getConfig().setPatterns(normalize(new PatternNormalization(), literalsAndPatterns.getPatterns(), dataTypeMap)); + getConfig().setRanges(normalizeRanges(new LiteralNormalization(), literalsAndPatterns.getRanges(), dataTypeMap)); if (log.isDebugEnabled()) { - log.debug("Normalized Literals = " + config.getLiterals()); - log.debug("Normalized Patterns = " + config.getPatterns()); + log.debug("Normalized Literals = " + getConfig().getLiterals()); + log.debug("Normalized Patterns = " + getConfig().getPatterns()); } - return config; + getConfig().setQueries(createQueries(getConfig())); + + return getConfig(); } - @Override - public void setupQuery(GenericQueryConfiguration genericConfig) throws QueryException, TableNotFoundException, IOException, ExecutionException { - DiscoveryQueryConfiguration config = (DiscoveryQueryConfiguration) genericConfig; - List> iterators = Lists.newArrayList(); - Set familiesToSeek = Sets.newHashSet(); - Pair,Set> seekRanges = makeRanges(config, familiesToSeek, metadataHelper); + public List createQueries(DiscoveryQueryConfiguration config) throws QueryException, TableNotFoundException, IOException, ExecutionException { + final List queries = Lists.newLinkedList(); + + Set familiesToSeek = Sets.newHashSet(); + Pair,Set> seekRanges = makeRanges(getConfig(), familiesToSeek, metadataHelper); Collection forward = seekRanges.getValue0(); + if (!forward.isEmpty()) { - BatchScanner bs = configureBatchScannerForDiscovery(config, scannerFactory, config.getIndexTableName(), forward, familiesToSeek, - config.getLiterals(), config.getPatterns(), config.getRanges(), false); - iterators.add(transformScanner(bs)); + List settings = getIteratorSettingsForDiscovery(getConfig(), getConfig().getLiterals(), getConfig().getPatterns(), + getConfig().getRanges(), false); + if (isCheckpointable()) { + // if checkpointable, then only one range per query data so that the whole checkpointing thing works correctly + for (Range range : forward) { + queries.add(new QueryData(config.getIndexTableName(), null, Collections.singleton(range), familiesToSeek, settings)); + } + } else { + queries.add(new QueryData(config.getIndexTableName(), null, forward, familiesToSeek, settings)); + } } + Collection reverse = seekRanges.getValue1(); if (!reverse.isEmpty()) { - BatchScanner bs = configureBatchScannerForDiscovery(config, scannerFactory, config.getReverseIndexTableName(), reverse, familiesToSeek, - config.getLiterals(), config.getPatterns(), config.getRanges(), true); - iterators.add(transformScanner(bs)); + List settings = getIteratorSettingsForDiscovery(getConfig(), getConfig().getLiterals(), getConfig().getPatterns(), + getConfig().getRanges(), true); + if (isCheckpointable()) { + // if checkpointable, then only one range per query data so that the whole checkpointing thing works correctly + for (Range range : reverse) { + queries.add(new QueryData(config.getReverseIndexTableName(), null, Collections.singleton(range), familiesToSeek, settings)); + } + } else { + queries.add(new QueryData(config.getReverseIndexTableName(), null, reverse, familiesToSeek, settings)); + } } - config.setSeparateCountsByColVis(separateCountsByColVis); - config.setShowReferenceCount(showReferenceCount); + if (log.isDebugEnabled()) { + log.debug("Created ranges: " + queries); + } - this.iterator = concat(iterators.iterator()); + return queries; } - public static BatchScanner configureBatchScannerForDiscovery(DiscoveryQueryConfiguration config, ScannerFactory scannerFactory, String tableName, - Collection seekRanges, Set columnFamilies, Multimap literals, Multimap patterns, - Multimap> ranges, boolean reverseIndex) throws TableNotFoundException { - - // if we have no ranges, then nothing to scan - if (seekRanges.isEmpty()) { - return null; + @Override + public void setupQuery(GenericQueryConfiguration genericConfig) throws QueryException, TableNotFoundException, IOException, ExecutionException { + if (!genericConfig.getClass().getName().equals(DiscoveryQueryConfiguration.class.getName())) { + throw new QueryException("Did not receive a DiscoveryQueryConfiguration instance!!"); } + this.config = (DiscoveryQueryConfiguration) genericConfig; + final List> iterators = Lists.newArrayList(); - BatchScanner bs = scannerFactory.newScanner(tableName, config.getAuthorizations(), config.getNumQueryThreads(), config.getQuery()); - bs.setRanges(seekRanges); - if (!columnFamilies.isEmpty()) { - for (Text family : columnFamilies) { - bs.fetchColumnFamily(family); + for (QueryData qd : config.getQueries()) { + if (log.isDebugEnabled()) { + log.debug("Creating scanner for " + qd); + } + // scan the table + BatchScanner bs = scannerFactory.newScanner(qd.getTableName(), config.getAuthorizations(), config.getNumQueryThreads(), config.getQuery()); + + bs.setRanges(qd.getRanges()); + for (IteratorSetting setting : qd.getSettings()) { + bs.addScanIterator(setting); } + for (String cf : qd.getColumnFamilies()) { + bs.fetchColumnFamily(new Text(cf)); + } + + iterators.add(transformScanner(bs, qd)); } + this.iterator = concat(iterators.iterator()); + } + + public static List getIteratorSettingsForDiscovery(DiscoveryQueryConfiguration config, Multimap literals, + Multimap patterns, Multimap> ranges, boolean reverseIndex) { + List settings = Lists.newLinkedList(); // The begin date from the query may be down to the second, for doing lookups in the index we want to use the day because // the times in the index table have been truncated to the day. Date begin = DateUtils.truncate(config.getBeginDate(), Calendar.DAY_OF_MONTH); @@ -259,10 +285,13 @@ public static BatchScanner configureBatchScannerForDiscovery(DiscoveryQueryConfi LongRange dateRange = new LongRange(begin.getTime(), end.getTime()); - ShardIndexQueryTableStaticMethods.configureGlobalIndexDateRangeFilter(config, bs, dateRange); - ShardIndexQueryTableStaticMethods.configureGlobalIndexDataTypeFilter(config, bs, config.getDatatypeFilter()); + settings.add(ShardIndexQueryTableStaticMethods.configureGlobalIndexDateRangeFilter(config, dateRange)); + settings.add(ShardIndexQueryTableStaticMethods.configureGlobalIndexDataTypeFilter(config, config.getDatatypeFilter())); - configureIndexMatchingIterator(config, bs, literals, patterns, ranges, reverseIndex); + IteratorSetting matchingIterator = configureIndexMatchingIterator(config, literals, patterns, ranges, reverseIndex); + if (matchingIterator != null) { + settings.add(matchingIterator); + } IteratorSetting discoveryIteratorSetting = new IteratorSetting(config.getBaseIteratorPriority() + 50, DiscoveryIterator.class); discoveryIteratorSetting.addOption(REVERSE_INDEX, Boolean.toString(reverseIndex)); @@ -270,15 +299,15 @@ public static BatchScanner configureBatchScannerForDiscovery(DiscoveryQueryConfi if (config.getShowReferenceCount()) { discoveryIteratorSetting.addOption(SHOW_REFERENCE_COUNT, config.getShowReferenceCount().toString()); } - bs.addScanIterator(discoveryIteratorSetting); + settings.add(discoveryIteratorSetting); - return bs; + return settings; } - public static final void configureIndexMatchingIterator(DiscoveryQueryConfiguration config, ScannerBase bs, Multimap literals, + public static final IteratorSetting configureIndexMatchingIterator(DiscoveryQueryConfiguration config, Multimap literals, Multimap patterns, Multimap> ranges, boolean reverseIndex) { if ((literals == null || literals.isEmpty()) && (patterns == null || patterns.isEmpty()) && (ranges == null || ranges.isEmpty())) { - return; + return null; } log.debug("Configuring IndexMatchingIterator with " + literals + " and " + patterns); @@ -317,7 +346,7 @@ public static final void configureIndexMatchingIterator(DiscoveryQueryConfigurat cfg.addOption(IndexMatchingIterator.REVERSE_INDEX, Boolean.toString(reverseIndex)); - bs.addScanIterator(cfg); + return cfg; } @Override @@ -332,12 +361,13 @@ public ShardIndexQueryTable clone() { * a batch scanner * @return iterator for discoveredthings */ - public static Iterator transformScanner(final BatchScanner scanner) { + public static Iterator transformScanner(final BatchScanner scanner, final QueryData queryData) { return concat(transform(scanner.iterator(), new Function,Iterator>() { DataInputBuffer in = new DataInputBuffer(); @Override public Iterator apply(Entry from) { + queryData.setLastResult(from.getKey()); Value value = from.getValue(); in.reset(value.get(), value.getSize()); ArrayWritable aw = new ArrayWritable(DiscoveredThing.class); @@ -375,7 +405,7 @@ public Iterator apply(Entry from) { * for execution exceptions */ @SuppressWarnings("unchecked") - public static Pair,Set> makeRanges(DiscoveryQueryConfiguration config, Set familiesToSeek, MetadataHelper metadataHelper) + public static Pair,Set> makeRanges(DiscoveryQueryConfiguration config, Set familiesToSeek, MetadataHelper metadataHelper) throws TableNotFoundException, ExecutionException { Set forwardRanges = new HashSet<>(); for (Entry literalAndField : config.getLiterals().entries()) { @@ -383,7 +413,7 @@ public static Pair,Set> makeRanges(DiscoveryQueryConfiguration // if we're _ANYFIELD_, then use null when making the literal range field = Constants.ANY_FIELD.equals(field) ? null : field; if (field != null) { - familiesToSeek.add(new Text(field)); + familiesToSeek.add(field); } forwardRanges.add(ShardIndexQueryTableStaticMethods.getLiteralRange(field, literal)); } @@ -393,7 +423,7 @@ public static Pair,Set> makeRanges(DiscoveryQueryConfiguration // if we're _ANYFIELD_, then use null when making the literal range field = Constants.ANY_FIELD.equals(field) ? null : field; if (field != null) { - familiesToSeek.add(new Text(field)); + familiesToSeek.add(field); } try { forwardRanges.add(ShardIndexQueryTableStaticMethods.getBoundedRangeRange(range)); @@ -410,7 +440,7 @@ public static Pair,Set> makeRanges(DiscoveryQueryConfiguration ShardIndexQueryTableStaticMethods.RefactoredRangeDescription description; try { if (field != null) { - familiesToSeek.add(new Text(field)); + familiesToSeek.add(field); } description = ShardIndexQueryTableStaticMethods.getRegexRange(field, pattern, false, metadataHelper, config); } catch (JavaRegexParseException e) { @@ -520,19 +550,19 @@ public Set getOptionalQueryParameters() { } public Boolean getSeparateCountsByColVis() { - return separateCountsByColVis; + return getConfig().getSeparateCountsByColVis(); } public void setSeparateCountsByColVis(Boolean separateCountsByColVis) { - this.separateCountsByColVis = separateCountsByColVis; + getConfig().setSeparateCountsByColVis(separateCountsByColVis); } public Boolean getShowReferenceCount() { - return showReferenceCount; + return getConfig().getShowReferenceCount(); } public void setShowReferenceCount(Boolean showReferenceCount) { - this.showReferenceCount = showReferenceCount; + getConfig().setShowReferenceCount(showReferenceCount); } } diff --git a/warehouse/query-core/src/main/java/datawave/query/discovery/DiscoveryQueryConfiguration.java b/warehouse/query-core/src/main/java/datawave/query/discovery/DiscoveryQueryConfiguration.java index 57828ae33f7..13c8fa25d75 100644 --- a/warehouse/query-core/src/main/java/datawave/query/discovery/DiscoveryQueryConfiguration.java +++ b/warehouse/query-core/src/main/java/datawave/query/discovery/DiscoveryQueryConfiguration.java @@ -1,23 +1,95 @@ package datawave.query.discovery; +import java.io.Serializable; +import java.util.Collection; +import java.util.Objects; + import com.google.common.collect.Multimap; +import datawave.core.query.configuration.QueryData; +import datawave.microservice.query.Query; import datawave.query.config.ShardIndexQueryConfiguration; import datawave.query.jexl.LiteralRange; -import datawave.query.tables.ShardIndexQueryTable; -import datawave.webservice.query.Query; /** * Adds the ability to hold on to two multimaps. They map literals and patterns to the fields they were associated with in the query. */ -public class DiscoveryQueryConfiguration extends ShardIndexQueryConfiguration { +public class DiscoveryQueryConfiguration extends ShardIndexQueryConfiguration implements Serializable { private Multimap literals, patterns; private Multimap> ranges; private Boolean separateCountsByColVis = false; private Boolean showReferenceCount = false; - public DiscoveryQueryConfiguration(ShardIndexQueryTable logic, Query query) { - super(logic, query); + public DiscoveryQueryConfiguration() {} + + public DiscoveryQueryConfiguration(DiscoveryQueryConfiguration other) { + super(other); + setSeparateCountsByColVis(other.separateCountsByColVis); + setShowReferenceCount(other.showReferenceCount); + setLiterals(other.literals); + setPatterns(other.patterns); + setRanges(other.ranges); + } + + public DiscoveryQueryConfiguration(DiscoveryLogic logic, Query query) { + this(logic.getConfig()); + setQuery(query); + } + + public DiscoveryQueryConfiguration(DiscoveryQueryConfiguration other, Collection queries) { + super(other, queries); + setSeparateCountsByColVis(other.separateCountsByColVis); + setShowReferenceCount(other.showReferenceCount); + setLiterals(other.literals); + setPatterns(other.patterns); + setRanges(other.ranges); + } + + /** + * Factory method that instantiates a fresh DiscoveryQueryConfiguration + * + * @return - a clean DiscoveryQueryConfiguration + */ + public static DiscoveryQueryConfiguration create() { + return new DiscoveryQueryConfiguration(); + } + + /** + * Factory method that returns a deep copy of the provided DiscoveryQueryConfiguration + * + * @param other + * - another instance of a DiscoveryQueryConfiguration + * @return - copy of provided DiscoveryQueryConfiguration + */ + public static DiscoveryQueryConfiguration create(DiscoveryQueryConfiguration other) { + return new DiscoveryQueryConfiguration(other); + } + + /** + * Factory method that creates a DiscoveryQueryConfiguration deep copy from a DiscoveryQueryLogic + * + * @param shardQueryLogic + * - a configured DiscoveryQueryLogic + * @return - a DiscoveryQueryConfiguration + */ + public static DiscoveryQueryConfiguration create(DiscoveryLogic shardQueryLogic) { + DiscoveryQueryConfiguration config = create(shardQueryLogic.getConfig()); + return config; + } + + /** + * Factory method that creates a DiscoveryQueryConfiguration from a DiscoveryQueryLogic and a Query + * + * @param shardQueryLogic + * - a configured DiscoveryQueryLogic + * @param query + * - a configured Query object + * @return - a DiscoveryQueryConfiguration + */ + public static DiscoveryQueryConfiguration create(DiscoveryLogic shardQueryLogic, Query query) { + DiscoveryQueryConfiguration config = create(shardQueryLogic); + config.setQuery(query); + return config; } public Multimap getLiterals() { @@ -60,4 +132,28 @@ public void setShowReferenceCount(Boolean showReferenceCount) { this.showReferenceCount = showReferenceCount; } + + @Override + public DiscoveryQueryConfiguration checkpoint() { + // Create a new config that only contains what is needed to execute the specified ranges + return new DiscoveryQueryConfiguration(this, getQueries()); + } + + @Override + public boolean equals(Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + if (!super.equals(o)) + return false; + DiscoveryQueryConfiguration that = (DiscoveryQueryConfiguration) o; + return Objects.equals(literals, that.literals) && Objects.equals(patterns, that.patterns) && Objects.equals(ranges, that.ranges) + && Objects.equals(separateCountsByColVis, that.separateCountsByColVis) && Objects.equals(showReferenceCount, that.showReferenceCount); + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), literals, patterns, ranges, separateCountsByColVis, showReferenceCount); + } } diff --git a/warehouse/query-core/src/main/java/datawave/query/discovery/DiscoveryTransformer.java b/warehouse/query-core/src/main/java/datawave/query/discovery/DiscoveryTransformer.java index 49ae5cfefe1..b853a8442f3 100644 --- a/warehouse/query-core/src/main/java/datawave/query/discovery/DiscoveryTransformer.java +++ b/warehouse/query-core/src/main/java/datawave/query/discovery/DiscoveryTransformer.java @@ -10,15 +10,15 @@ import com.google.common.base.Preconditions; +import datawave.core.query.cachedresults.CacheableLogic; +import datawave.core.query.logic.BaseQueryLogic; +import datawave.core.query.logic.BaseQueryLogicTransformer; import datawave.marking.MarkingFunctions; import datawave.marking.MarkingFunctions.Exception; +import datawave.microservice.query.Query; import datawave.query.model.QueryModel; -import datawave.webservice.query.Query; -import datawave.webservice.query.cachedresults.CacheableLogic; import datawave.webservice.query.cachedresults.CacheableQueryRow; import datawave.webservice.query.exception.QueryException; -import datawave.webservice.query.logic.BaseQueryLogic; -import datawave.webservice.query.logic.BaseQueryLogicTransformer; import datawave.webservice.query.result.event.EventBase; import datawave.webservice.query.result.event.FieldBase; import datawave.webservice.query.result.event.Metadata; @@ -87,6 +87,7 @@ public EventBase transform(DiscoveredThing thing) { metadata.setRow(thing.getTerm()); // duplicate metadata.setTable(logic.getTableName()); event.setMetadata(metadata); + return event; } @@ -115,12 +116,11 @@ public BaseQueryResponse createResponse(List resultList) { } @Override - public List writeToCache(Object o) throws QueryException { - - List cqoList = new ArrayList<>(); + public CacheableQueryRow writeToCache(Object o) throws QueryException { EventBase event = (EventBase) o; CacheableQueryRow cqo = responseObjectFactory.getCacheableQueryRow(); + cqo.setMarkingFunctions(this.markingFunctions); Metadata metadata = event.getMetadata(); cqo.setColFam(metadata.getDataType() + ":" + cqo.getEventId()); cqo.setDataType(metadata.getDataType()); @@ -131,55 +131,47 @@ public List writeToCache(Object o) throws QueryException { for (FieldBase f : fields) { cqo.addColumn(f.getName(), f.getTypedValue(), f.getMarkings(), f.getColumnVisibility(), f.getTimestamp()); } - cqoList.add(cqo); - return cqoList; + return cqo; } @Override - public List readFromCache(List cacheableQueryRowList) { + public Object readFromCache(CacheableQueryRow cacheableQueryRow) { + if (this.variableFieldList == null) { + this.variableFieldList = cacheableQueryRow.getVariableColumnNames(); + } + Map markings = cacheableQueryRow.getMarkings(); + String dataType = cacheableQueryRow.getDataType(); + String internalId = cacheableQueryRow.getEventId(); + String row = cacheableQueryRow.getRow(); - List eventList = new ArrayList<>(); + EventBase event = this.responseObjectFactory.getEvent(); - for (CacheableQueryRow cqr : cacheableQueryRowList) { - if (this.variableFieldList == null) { - this.variableFieldList = cqr.getVariableColumnNames(); - } - Map markings = cqr.getMarkings(); - String dataType = cqr.getDataType(); - String internalId = cqr.getEventId(); - String row = cqr.getRow(); - - EventBase event = this.responseObjectFactory.getEvent(); - - event.setMarkings(markings); - - Metadata metadata = new Metadata(); - metadata.setDataType(dataType); - metadata.setInternalId(internalId); - metadata.setRow(row); - metadata.setTable(logic.getTableName()); - event.setMetadata(metadata); - - List fieldList = new ArrayList<>(); - Map columnValueMap = cqr.getColumnValues(); - for (Map.Entry entry : columnValueMap.entrySet()) { - String columnName = entry.getKey(); - String columnValue = entry.getValue(); - String columnVisibility = cqr.getColumnVisibility(columnName); - Long columnTimestamp = cqr.getColumnTimestamp(columnName); - Map columnMarkings = cqr.getColumnMarkings(columnName); - FieldBase field = this.responseObjectFactory.getField(); - field.setName(columnName); - field.setMarkings(columnMarkings); - field.setColumnVisibility(columnVisibility); - field.setTimestamp(columnTimestamp); - field.setValue(columnValue); - fieldList.add(field); - } - event.setFields(fieldList); - eventList.add(event); - } + event.setMarkings(markings); - return eventList; + Metadata metadata = new Metadata(); + metadata.setDataType(dataType); + metadata.setInternalId(internalId); + metadata.setRow(row); + metadata.setTable(logic.getTableName()); + event.setMetadata(metadata); + + List fieldList = new ArrayList<>(); + Map columnValueMap = cacheableQueryRow.getColumnValues(); + for (Map.Entry entry : columnValueMap.entrySet()) { + String columnName = entry.getKey(); + String columnValue = entry.getValue(); + String columnVisibility = cacheableQueryRow.getColumnVisibility(columnName); + Long columnTimestamp = cacheableQueryRow.getColumnTimestamp(columnName); + Map columnMarkings = cacheableQueryRow.getColumnMarkings(columnName); + FieldBase field = this.responseObjectFactory.getField(); + field.setName(columnName); + field.setMarkings(columnMarkings); + field.setColumnVisibility(columnVisibility); + field.setTimestamp(columnTimestamp); + field.setValue(columnValue); + fieldList.add(field); + } + event.setFields(fieldList); + return event; } } diff --git a/warehouse/query-core/src/main/java/datawave/query/edge/DefaultExtendedEdgeQueryLogic.java b/warehouse/query-core/src/main/java/datawave/query/edge/DefaultExtendedEdgeQueryLogic.java index 4f5d31cc023..28fb84cf6e7 100644 --- a/warehouse/query-core/src/main/java/datawave/query/edge/DefaultExtendedEdgeQueryLogic.java +++ b/warehouse/query-core/src/main/java/datawave/query/edge/DefaultExtendedEdgeQueryLogic.java @@ -2,35 +2,37 @@ import java.util.Arrays; import java.util.Collection; +import java.util.Collections; import java.util.HashSet; import java.util.LinkedHashSet; import java.util.List; import java.util.Set; -import org.apache.accumulo.core.client.BatchScanner; +import org.apache.accumulo.core.client.AccumuloClient; import org.apache.accumulo.core.client.IteratorSetting; import org.apache.accumulo.core.data.Range; +import org.apache.accumulo.core.security.Authorizations; import org.apache.commons.jexl3.JexlException; import org.apache.commons.jexl3.parser.ParseException; import org.apache.hadoop.io.Text; import org.apache.log4j.Logger; import datawave.audit.SelectorExtractor; +import datawave.core.query.configuration.GenericQueryConfiguration; +import datawave.core.query.configuration.QueryData; +import datawave.core.query.logic.QueryLogicTransformer; import datawave.data.type.LcNoDiacriticsType; import datawave.data.type.Type; import datawave.edge.util.EdgeKeyUtil; +import datawave.microservice.query.Query; import datawave.query.config.EdgeExtendedSummaryConfiguration; -import datawave.query.config.EdgeQueryConfiguration; import datawave.query.iterator.filter.EdgeFilterIterator; +import datawave.query.tables.ScannerFactory; import datawave.query.tables.edge.EdgeQueryLogic; import datawave.query.tables.edge.contexts.VisitationContext; import datawave.query.transformer.EdgeQueryTransformer; import datawave.query.util.MetadataHelper; import datawave.util.StringUtils; -import datawave.webservice.query.Query; -import datawave.webservice.query.configuration.GenericQueryConfiguration; -import datawave.webservice.query.configuration.QueryData; -import datawave.webservice.query.logic.QueryLogicTransformer; public class DefaultExtendedEdgeQueryLogic extends EdgeQueryLogic { @@ -48,6 +50,10 @@ public DefaultExtendedEdgeQueryLogic() { public DefaultExtendedEdgeQueryLogic(DefaultExtendedEdgeQueryLogic logic) { super(logic); + + // Set EdgeQueryConfiguration variables + this.config = EdgeExtendedSummaryConfiguration.create(logic); + summaryInputType = logic.isSummaryInputType(); summaryOutputType = logic.isSummaryOutputType(); allowOverrideIO = logic.isAllowOverrideIO(); @@ -55,41 +61,55 @@ public DefaultExtendedEdgeQueryLogic(DefaultExtendedEdgeQueryLogic logic) { } @Override - public DefaultExtendedEdgeQueryLogic clone() { - return new DefaultExtendedEdgeQueryLogic(this); + public EdgeExtendedSummaryConfiguration getConfig() { + if (config == null) { + config = new EdgeExtendedSummaryConfiguration(); + } + return (EdgeExtendedSummaryConfiguration) config; } @Override - protected EdgeQueryConfiguration setUpConfig(Query settings) { - return new EdgeExtendedSummaryConfiguration(this, settings).parseParameters(settings); + public DefaultExtendedEdgeQueryLogic clone() { + return new DefaultExtendedEdgeQueryLogic(this); } @Override - public void setupQuery(GenericQueryConfiguration configuration) throws Exception { - EdgeExtendedSummaryConfiguration localConf = (EdgeExtendedSummaryConfiguration) configuration; + public GenericQueryConfiguration initialize(AccumuloClient client, Query settings, Set auths) throws Exception { + currentIteratorPriority = super.getBaseIteratorPriority() + 30; - config = (EdgeExtendedSummaryConfiguration) configuration; - prefilterValues = null; - EdgeExtendedSummaryConfiguration.dateType dateFilterType = localConf.getDateRangeType(); + EdgeExtendedSummaryConfiguration config = getConfig().parseParameters(settings); + + config.setClient(client); + config.setAuthorizations(auths); - if (log.isTraceEnabled()) { - log.trace("Performing edge table query: " + config.getQueryString()); + String queryString = getJexlQueryString(settings); + + if (null == queryString) { + throw new IllegalArgumentException("Query cannot be null"); + } else { + config.setQueryString(queryString); } + config.setBeginDate(settings.getBeginDate()); + config.setEndDate(settings.getEndDate()); + + scannerFactory = new ScannerFactory(client); + + prefilterValues = null; + EdgeExtendedSummaryConfiguration.dateType dateFilterType = config.getDateRangeType(); + + log.debug("Performing edge table query: " + config.getQueryString()); // TODO check to see if overriding I/O necessary - if (allowOverrideIO && localConf.isOverRideInput()) { - this.summaryInputType = localConf.isSummaryInputType(); + if (allowOverrideIO && config.isOverRideInput()) { + this.summaryInputType = config.isSummaryInputType(); } - if (allowOverrideIO && localConf.isOverRideOutput()) { - this.summaryOutputType = localConf.isAggregateResults(); + if (allowOverrideIO && config.isOverRideOutput()) { + this.summaryOutputType = config.isAggregateResults(); } - boolean includeStats = localConf.includeStats(); - - String queryString = config.getQueryString(); - - MetadataHelper metadataHelper = super.prepareMetadataHelper(config.getClient(), config.getModelTableName(), config.getAuthorizations()); + boolean includeStats = config.includeStats(); + MetadataHelper metadataHelper = super.prepareMetadataHelper(config.getClient(), config.getMetadataTableName(), config.getAuthorizations()); loadQueryModel(metadataHelper, config); // Don't apply model if this.summaryInputType == true, which indicates that @@ -105,8 +125,7 @@ public void setupQuery(GenericQueryConfiguration configuration) throws Exception String normalizedQuery = ""; String statsNormalizedQuery = ""; - QueryData qData = configureRanges(queryString); - setRanges(qData.getRanges()); + Set ranges = configureRanges(queryString); VisitationContext context = null; if (this.summaryInputType == false) { @@ -118,20 +137,20 @@ public void setupQuery(GenericQueryConfiguration configuration) throws Exception log.trace("Jexl after normalizing both vertices: " + normalizedQuery); } } catch (JexlException ex) { - try { - log.error("Error parsing user query.", ex); - } catch (Exception ex2) { - log.error("Exception thrown by logger (???)"); - } + log.error("Error parsing user query.", ex); } } - if ((null == normalizedQuery || normalizedQuery.equals("")) && qData.getRanges().size() < 1) { + if ((null == normalizedQuery || normalizedQuery.equals("")) && ranges.size() < 1) { throw new IllegalStateException("Query string is empty after initial processing, no ranges or filters can be generated to execute."); } - addIterators(qData, getDateBasedIterators(config.getBeginDate(), config.getEndDate(), currentIteratorPriority, dateFilterSkipLimit, dateFilterScanLimit, - dateFilterType)); + QueryData qData = new QueryData(); + qData.setTableName(config.getTableName()); + qData.setRanges(ranges); + + addIterators(qData, getDateBasedIterators(config.getBeginDate(), config.getEndDate(), currentIteratorPriority, config.getDateFilterSkipLimit(), + config.getDateFilterScanLimit(), dateFilterType)); if (!normalizedQuery.equals("")) { if (log.isTraceEnabled()) { @@ -159,39 +178,24 @@ public void setupQuery(GenericQueryConfiguration configuration) throws Exception addIterator(qData, edgeIteratorSetting); } - if (log.isTraceEnabled()) { - log.trace("Configuring connection: tableName: " + config.getTableName() + ", auths: " + config.getAuthorizations()); - } - - BatchScanner scanner = createBatchScanner(config); - - if (log.isTraceEnabled()) { - log.trace("Using the following ranges: " + qData.getRanges()); - } - if (context != null && context.isHasAllCompleteColumnFamilies()) { for (Text columnFamily : context.getColumnFamilies()) { - scanner.fetchColumnFamily(columnFamily); + qData.addColumnFamily(columnFamily); } - } - scanner.setRanges(qData.getRanges()); addCustomFilters(qData, currentIteratorPriority); - for (IteratorSetting setting : qData.getSettings()) { - scanner.addScanIterator(setting); - } + config.setQueries(Collections.singletonList(qData)); - this.scanner = scanner; - this.iterator = scanner.iterator(); + return config; } @Override - protected QueryData configureRanges(String queryString) throws ParseException { + protected Set configureRanges(String queryString) throws ParseException { if (this.summaryInputType) { Set ranges = computeRanges((EdgeExtendedSummaryConfiguration) this.config); - return new QueryData().withRanges(ranges); + return ranges; } else { return super.configureRanges(queryString); } @@ -241,12 +245,12 @@ protected Collection normalizeQualifiedSource(String qualifiedSource) { @Override public QueryLogicTransformer getTransformer(Query settings) { - return new EdgeQueryTransformer(settings, this.markingFunctions, this.responseObjectFactory); + return new EdgeQueryTransformer(settings, this.markingFunctions, this.responseObjectFactory, this.getEdgeFields()); } @Override public List getSelectors(Query settings) throws IllegalArgumentException { - EdgeExtendedSummaryConfiguration conf = (EdgeExtendedSummaryConfiguration) setUpConfig(settings); + EdgeExtendedSummaryConfiguration conf = new EdgeExtendedSummaryConfiguration().parseParameters(settings); List selectorList = null; SelectorExtractor selExtr; @@ -300,4 +304,5 @@ public void setAllowOverrideIO(boolean allowOverrideIO) { public void setListSelectorExtractor(SelectorExtractor listSelectorExtractor) { this.listSelectorExtractor = listSelectorExtractor; } + } diff --git a/warehouse/query-core/src/main/java/datawave/query/index/lookup/EntryParser.java b/warehouse/query-core/src/main/java/datawave/query/index/lookup/EntryParser.java index 2c3d97bbe28..faec849a3a3 100644 --- a/warehouse/query-core/src/main/java/datawave/query/index/lookup/EntryParser.java +++ b/warehouse/query-core/src/main/java/datawave/query/index/lookup/EntryParser.java @@ -9,17 +9,15 @@ import java.io.ByteArrayInputStream; import java.io.DataInputStream; import java.io.IOException; -import java.util.Map.Entry; import java.util.Set; -import org.apache.accumulo.core.data.Key; -import org.apache.accumulo.core.data.Value; import org.apache.commons.jexl3.parser.ASTEQNode; import org.apache.commons.jexl3.parser.JexlNode; import org.apache.log4j.Logger; import com.google.common.base.Function; +import datawave.core.query.configuration.Result; import datawave.query.jexl.JexlNodeFactory; import datawave.query.jexl.nodes.QueryPropertyMarker; import datawave.query.jexl.visitors.JexlStringBuildingVisitor; @@ -33,7 +31,7 @@ * * A delayed predicate node is build if the IndexInfo does not have any document ids or if the column qualifier indicates a day range. */ -public class EntryParser implements Function,Tuple2> { +public class EntryParser implements Function> { protected ASTEQNode currNode; protected String fieldName; @@ -63,7 +61,7 @@ public EntryParser(ASTEQNode node, String fieldName, String literal, Set } @Override - public Tuple2 apply(Entry entry) { + public Tuple2 apply(Result entry) { IndexInfo info = new IndexInfo(); try { info.readFields(new DataInputStream(new ByteArrayInputStream(entry.getValue().get()))); diff --git a/warehouse/query-core/src/main/java/datawave/query/index/lookup/RangeStream.java b/warehouse/query-core/src/main/java/datawave/query/index/lookup/RangeStream.java index 89341a76d0c..7da86528ab8 100644 --- a/warehouse/query-core/src/main/java/datawave/query/index/lookup/RangeStream.java +++ b/warehouse/query-core/src/main/java/datawave/query/index/lookup/RangeStream.java @@ -67,6 +67,7 @@ import com.google.common.collect.Multimap; import com.google.common.collect.Sets; +import datawave.core.common.logging.ThreadConfigurableLogger; import datawave.data.type.Type; import datawave.query.CloseableIterable; import datawave.query.Constants; @@ -98,7 +99,6 @@ import datawave.query.util.TypeMetadata; import datawave.util.StringUtils; import datawave.util.time.DateHelper; -import datawave.webservice.common.logging.ThreadConfigurableLogger; import datawave.webservice.query.exception.DatawaveErrorCode; import datawave.webservice.query.exception.PreConditionFailedQueryException; import datawave.webservice.query.exception.QueryException; @@ -261,7 +261,8 @@ public Iterator iterator() { } } - this.itr = filter(concat(transform(queryStream, new TupleToRange(queryStream.currentNode(), config))), getEmptyPlanPruner()); + this.itr = filter(concat(transform(queryStream, new TupleToRange(config.getShardTableName(), queryStream.currentNode(), config))), + getEmptyPlanPruner()); if (config.isSortQueryByCounts() && (config.getUseFieldCounts() || config.getUseTermCounts())) { this.itr = transform(itr, new OrderingTransform(config.getUseFieldCounts(), config.getUseTermCounts())); diff --git a/warehouse/query-core/src/main/java/datawave/query/index/lookup/ShardRangeStream.java b/warehouse/query-core/src/main/java/datawave/query/index/lookup/ShardRangeStream.java index cb8c1b72db5..2b437ea61c5 100644 --- a/warehouse/query-core/src/main/java/datawave/query/index/lookup/ShardRangeStream.java +++ b/warehouse/query-core/src/main/java/datawave/query/index/lookup/ShardRangeStream.java @@ -128,6 +128,7 @@ public QueryPlan apply(Entry entry) { Range range = new Range(start, true, end, false); // @formatter:off return new QueryPlan() + .withTableName(config.getShardTableName()) .withQueryTree(node) .withRanges(Collections.singleton(range)); // @formatter:on diff --git a/warehouse/query-core/src/main/java/datawave/query/index/lookup/TupleToRange.java b/warehouse/query-core/src/main/java/datawave/query/index/lookup/TupleToRange.java index ae46320ade8..db8fd9a2beb 100644 --- a/warehouse/query-core/src/main/java/datawave/query/index/lookup/TupleToRange.java +++ b/warehouse/query-core/src/main/java/datawave/query/index/lookup/TupleToRange.java @@ -24,6 +24,7 @@ public class TupleToRange implements Function,Iterator> { private static final Logger log = Logger.getLogger(TupleToRange.class); + protected String tableName; protected JexlNode currentScript; protected JexlNode tree = null; protected ShardQueryConfiguration config; @@ -34,7 +35,8 @@ public class TupleToRange implements Function,Iterator< * @param config * a configuration */ - public TupleToRange(JexlNode currentNode, ShardQueryConfiguration config) { + public TupleToRange(String tableName, JexlNode currentNode, ShardQueryConfiguration config) { + this.tableName = tableName; this.currentScript = currentNode; this.config = config; } @@ -57,15 +59,15 @@ public Iterator apply(Tuple2 tuple) { if (isDocumentRange(indexInfo)) { - return createDocumentRanges(queryNode, shard, indexInfo, config.isTldQuery()); + return createDocumentRanges(tableName, queryNode, shard, indexInfo, config.isTldQuery()); } else if (isShardRange(shard)) { - return createShardRange(queryNode, shard, indexInfo); + return createShardRange(tableName, queryNode, shard, indexInfo); } else { - return createDayRange(queryNode, shard, indexInfo); + return createDayRange(tableName, queryNode, shard, indexInfo); } } @@ -103,7 +105,7 @@ public static boolean isShardRange(String shard) { * check for tld query * @return an iterator of query plans */ - public static Iterator createDocumentRanges(JexlNode queryNode, String shard, IndexInfo indexInfo, boolean isTldQuery) { + public static Iterator createDocumentRanges(String tableName, JexlNode queryNode, String shard, IndexInfo indexInfo, boolean isTldQuery) { List queryPlans = Lists.newArrayListWithCapacity(indexInfo.uids().size()); for (IndexMatch indexMatch : indexInfo.uids()) { @@ -129,6 +131,7 @@ public static Iterator createDocumentRanges(JexlNode queryNode, Strin // @formatter:off QueryPlan queryPlan = new QueryPlan() + .withTableName(tableName) .withQueryTree(indexMatch.getNode()) .withRanges(Collections.singleton(range)) .withFieldCounts(indexInfo.getFieldCounts()) @@ -140,7 +143,7 @@ public static Iterator createDocumentRanges(JexlNode queryNode, Strin return queryPlans.iterator(); } - public static Iterator createShardRange(JexlNode queryNode, String shard, IndexInfo indexInfo) { + public static Iterator createShardRange(String tableName, JexlNode queryNode, String shard, IndexInfo indexInfo) { JexlNode myNode = queryNode; if (indexInfo.getNode() != null) { myNode = indexInfo.getNode(); @@ -154,6 +157,7 @@ public static Iterator createShardRange(JexlNode queryNode, String sh // @formatter:off QueryPlan queryPlan = new QueryPlan() + .withTableName(tableName) .withQueryTree(myNode) .withRanges(Collections.singleton(range)) .withFieldCounts(indexInfo.getFieldCounts()) @@ -163,7 +167,7 @@ public static Iterator createShardRange(JexlNode queryNode, String sh return Collections.singleton(queryPlan).iterator(); } - public static Iterator createDayRange(JexlNode queryNode, String shard, IndexInfo indexInfo) { + public static Iterator createDayRange(String tableName, JexlNode queryNode, String shard, IndexInfo indexInfo) { JexlNode myNode = queryNode; if (indexInfo.getNode() != null) { myNode = indexInfo.getNode(); @@ -176,6 +180,7 @@ public static Iterator createDayRange(JexlNode queryNode, String shar // @formatter:off QueryPlan queryPlan = new QueryPlan() + .withTableName(tableName) .withQueryTree(myNode) .withRanges(Collections.singleton(range)) .withFieldCounts(indexInfo.getFieldCounts()) diff --git a/warehouse/query-core/src/main/java/datawave/query/iterator/QueryIterator.java b/warehouse/query-core/src/main/java/datawave/query/iterator/QueryIterator.java index fff762e7e98..23954939d0c 100644 --- a/warehouse/query-core/src/main/java/datawave/query/iterator/QueryIterator.java +++ b/warehouse/query-core/src/main/java/datawave/query/iterator/QueryIterator.java @@ -359,7 +359,7 @@ public void seek(Range range, Collection columnFamilies, boolean i ActiveQueryLog.getInstance().get(getQueryId()).beginCall(this.originalRange, ActiveQuery.CallType.SEEK); try { - if (this.isIncludeGroupingContext() == false && (this.query.contains("grouping:") || this.query.contains("matchesInGroup") + if (!this.isIncludeGroupingContext() && (this.query.contains("grouping:") || this.query.contains("matchesInGroup") || this.query.contains("MatchesInGroup") || this.query.contains("atomValuesMatch"))) { this.setIncludeGroupingContext(true); this.groupingContextAddedByMe = true; @@ -1084,7 +1084,6 @@ protected Iterator> mapDocument(SortedKeyValueIterator> retDocuments = Iterators.transform(mappedDocuments, new TupleToEntry<>()); // Inject the document permutations if required diff --git a/warehouse/query-core/src/main/java/datawave/query/iterator/filter/EdgeFilterIterator.java b/warehouse/query-core/src/main/java/datawave/query/iterator/filter/EdgeFilterIterator.java index 572b88fcd9a..02ceea8e4cc 100644 --- a/warehouse/query-core/src/main/java/datawave/query/iterator/filter/EdgeFilterIterator.java +++ b/warehouse/query-core/src/main/java/datawave/query/iterator/filter/EdgeFilterIterator.java @@ -24,9 +24,7 @@ import com.google.common.collect.HashMultimap; -import datawave.edge.model.EdgeModelAware; -import datawave.edge.model.EdgeModelAware.Fields; -import datawave.edge.model.EdgeModelAware.Fields.FieldKey; +import datawave.edge.model.EdgeModelFields.FieldKey; import datawave.edge.util.EdgeKeyUtil; import datawave.query.jexl.ArithmeticJexlEngines; import datawave.query.jexl.DatawaveJexlEngine; @@ -134,14 +132,14 @@ private void setupContext(JexlContext ctx, Map keyComponents) { if (null != edgeDate) edgeDate = edgeDate.toLowerCase(); - ctx.set(EdgeModelAware.EDGE_SOURCE.toLowerCase(), source); - ctx.set(EdgeModelAware.EDGE_SINK.toLowerCase(), sink); - ctx.set(EdgeModelAware.EDGE_TYPE.toLowerCase(), edgeType); - ctx.set(EdgeModelAware.EDGE_RELATIONSHIP.toLowerCase(), edgeRelationship); - ctx.set(EdgeModelAware.EDGE_ATTRIBUTE1.toLowerCase(), edgeAttribute1); - ctx.set(EdgeModelAware.EDGE_ATTRIBUTE2.toLowerCase(), edgeAttribute2); - ctx.set(EdgeModelAware.EDGE_ATTRIBUTE3.toLowerCase(), edgeAttribute3); - ctx.set(EdgeModelAware.DATE.toLowerCase(), edgeDate); + ctx.set(FieldKey.EDGE_SOURCE.name().toLowerCase(), source); + ctx.set(FieldKey.EDGE_SINK.name().toLowerCase(), sink); + ctx.set(FieldKey.EDGE_TYPE.name().toLowerCase(), edgeType); + ctx.set(FieldKey.EDGE_RELATIONSHIP.name().toLowerCase(), edgeRelationship); + ctx.set(FieldKey.EDGE_ATTRIBUTE1.name().toLowerCase(), edgeAttribute1); + ctx.set(FieldKey.EDGE_ATTRIBUTE2.name().toLowerCase(), edgeAttribute2); + ctx.set(FieldKey.EDGE_ATTRIBUTE3.name().toLowerCase(), edgeAttribute3); + ctx.set(FieldKey.DATE.name().toLowerCase(), edgeDate); } /** @@ -207,7 +205,7 @@ private boolean prefilter(Map keyComponents) { boolean retVal = true; if (preFilterValues != null) { for (Map.Entry entry : keyComponents.entrySet()) { - String fieldName = Fields.getInstance().getFieldName(entry.getKey()); + String fieldName = entry.getKey().name(); Set values = preFilterValues.get(fieldName); if (values == null || values.size() < 1) { // if we encountered a regex, we'll just let the jexl engine handle it, or filter it by a different field diff --git a/warehouse/query-core/src/main/java/datawave/query/iterator/ivarator/IvaratorCacheDirConfig.java b/warehouse/query-core/src/main/java/datawave/query/iterator/ivarator/IvaratorCacheDirConfig.java index 4131b0713b8..d861280cc4d 100644 --- a/warehouse/query-core/src/main/java/datawave/query/iterator/ivarator/IvaratorCacheDirConfig.java +++ b/warehouse/query-core/src/main/java/datawave/query/iterator/ivarator/IvaratorCacheDirConfig.java @@ -1,5 +1,6 @@ package datawave.query.iterator.ivarator; +import java.io.Serializable; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -14,7 +15,7 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.SerializationFeature; -public class IvaratorCacheDirConfig { +public class IvaratorCacheDirConfig implements Serializable { private static final Logger log = Logger.getLogger(IvaratorCacheDir.class); public static final int DEFAULT_PRIORITY = Integer.MAX_VALUE; @@ -24,23 +25,23 @@ public class IvaratorCacheDirConfig { private static final ObjectMapper objectMapper = new ObjectMapper(); // the base path for caching ivarator output for this filesystem - final protected String basePathURI; + protected String basePathURI; // a number >= 0 used to determine the order in which ivarator cache dirs are used (ascending order) - final protected int priority; + protected int priority; // the minimum amount of available storage required to use this filesystem - final protected long minAvailableStorageMiB; + protected long minAvailableStorageMiB; // the minimum percent of available storage required to use this filesystem - final protected double minAvailableStoragePercent; + protected double minAvailableStoragePercent; static { objectMapper.configure(SerializationFeature.WRITE_SINGLE_ELEM_ARRAYS_UNWRAPPED, true); objectMapper.configure(DeserializationFeature.ACCEPT_SINGLE_VALUE_AS_ARRAY, true); } - protected IvaratorCacheDirConfig() { + private IvaratorCacheDirConfig() { this(null); } @@ -93,18 +94,34 @@ public String getBasePathURI() { return basePathURI; } + public void setBasePathURI(String basePathURI) { + this.basePathURI = basePathURI; + } + public int getPriority() { return priority; } + public void setPriority(int priority) { + this.priority = priority; + } + public long getMinAvailableStorageMiB() { return minAvailableStorageMiB; } + public void setMinAvailableStorageMiB(long minAvailableStorageMiB) { + this.minAvailableStorageMiB = minAvailableStorageMiB; + } + public double getMinAvailableStoragePercent() { return minAvailableStoragePercent; } + public void setMinAvailableStoragePercent(double minAvailableStoragePercent) { + this.minAvailableStoragePercent = minAvailableStoragePercent; + } + public static String toJson(IvaratorCacheDirConfig ivaratorCacheDirConfig) throws JsonProcessingException { return toJson(Collections.singletonList(ivaratorCacheDirConfig)); } diff --git a/warehouse/query-core/src/main/java/datawave/query/iterator/profile/EvaluationTrackingNestedIterator.java b/warehouse/query-core/src/main/java/datawave/query/iterator/profile/EvaluationTrackingNestedIterator.java index 9f5d5b71401..e6804440103 100644 --- a/warehouse/query-core/src/main/java/datawave/query/iterator/profile/EvaluationTrackingNestedIterator.java +++ b/warehouse/query-core/src/main/java/datawave/query/iterator/profile/EvaluationTrackingNestedIterator.java @@ -25,6 +25,7 @@ public EvaluationTrackingNestedIterator(QuerySpan.Stage stageName, QuerySpan myS @Override public T next() { + mySpan.next(); long start = System.currentTimeMillis(); T next = super.next(); mySpan.addStageTimer(stageName, System.currentTimeMillis() - start); @@ -33,6 +34,7 @@ public T next() { @Override public T move(T minimum) { + mySpan.seek(); long start = System.currentTimeMillis(); T next = super.move(minimum); mySpan.addStageTimer(stageName, System.currentTimeMillis() - start); @@ -41,6 +43,7 @@ public T move(T minimum) { @Override public void seek(Range range, Collection columnFamilies, boolean inclusive) throws IOException { + mySpan.seek(); long start = System.currentTimeMillis(); super.seek(range, columnFamilies, inclusive); mySpan.addStageTimer(stageName, System.currentTimeMillis() - start); diff --git a/warehouse/query-core/src/main/java/datawave/query/jexl/lookups/AsyncIndexLookup.java b/warehouse/query-core/src/main/java/datawave/query/jexl/lookups/AsyncIndexLookup.java index e6e261cd45d..137d21e5c3b 100644 --- a/warehouse/query-core/src/main/java/datawave/query/jexl/lookups/AsyncIndexLookup.java +++ b/warehouse/query-core/src/main/java/datawave/query/jexl/lookups/AsyncIndexLookup.java @@ -9,9 +9,9 @@ import org.apache.log4j.Logger; +import datawave.core.common.logging.ThreadConfigurableLogger; import datawave.query.config.ShardQueryConfiguration; import datawave.query.tables.ScannerFactory; -import datawave.webservice.common.logging.ThreadConfigurableLogger; /** * Abstract index lookup which provides a framework for creating and populating the {@link IndexLookupMap} asynchronously in a separate thread. Async index diff --git a/warehouse/query-core/src/main/java/datawave/query/jexl/lookups/BoundedRangeIndexLookup.java b/warehouse/query-core/src/main/java/datawave/query/jexl/lookups/BoundedRangeIndexLookup.java index afdf773be2c..a3b352f2a98 100644 --- a/warehouse/query-core/src/main/java/datawave/query/jexl/lookups/BoundedRangeIndexLookup.java +++ b/warehouse/query-core/src/main/java/datawave/query/jexl/lookups/BoundedRangeIndexLookup.java @@ -24,6 +24,7 @@ import com.google.common.base.Preconditions; +import datawave.core.common.logging.ThreadConfigurableLogger; import datawave.core.iterators.ColumnQualifierRangeIterator; import datawave.core.iterators.CompositeSeekingIterator; import datawave.core.iterators.TimeoutExceptionIterator; @@ -36,7 +37,6 @@ import datawave.query.jexl.LiteralRange; import datawave.query.tables.ScannerFactory; import datawave.util.time.DateHelper; -import datawave.webservice.common.logging.ThreadConfigurableLogger; import datawave.webservice.query.exception.DatawaveErrorCode; import datawave.webservice.query.exception.NotFoundQueryException; import datawave.webservice.query.exception.QueryException; diff --git a/warehouse/query-core/src/main/java/datawave/query/jexl/lookups/FieldNameIndexLookup.java b/warehouse/query-core/src/main/java/datawave/query/jexl/lookups/FieldNameIndexLookup.java index 631e477e9ea..ec1e7a50ed6 100644 --- a/warehouse/query-core/src/main/java/datawave/query/jexl/lookups/FieldNameIndexLookup.java +++ b/warehouse/query-core/src/main/java/datawave/query/jexl/lookups/FieldNameIndexLookup.java @@ -21,6 +21,7 @@ import com.google.common.collect.Iterators; import com.google.common.collect.Lists; +import datawave.core.query.configuration.Result; import datawave.query.Constants; import datawave.query.config.ShardQueryConfiguration; import datawave.query.tables.ScannerFactory; @@ -98,7 +99,7 @@ public void submit() { sessions.add(bs); - iter = Iterators.concat(iter, bs); + iter = Iterators.concat(iter, Result.keyValueIterator(bs)); } } diff --git a/warehouse/query-core/src/main/java/datawave/query/jexl/lookups/RegexIndexLookup.java b/warehouse/query-core/src/main/java/datawave/query/jexl/lookups/RegexIndexLookup.java index 6276bed3b8c..96ec7dab0d9 100644 --- a/warehouse/query-core/src/main/java/datawave/query/jexl/lookups/RegexIndexLookup.java +++ b/warehouse/query-core/src/main/java/datawave/query/jexl/lookups/RegexIndexLookup.java @@ -25,8 +25,10 @@ import com.google.common.collect.Lists; import com.google.common.collect.Multimap; +import datawave.core.common.logging.ThreadConfigurableLogger; import datawave.core.iterators.TimeoutExceptionIterator; import datawave.core.iterators.TimeoutIterator; +import datawave.core.query.configuration.Result; import datawave.query.Constants; import datawave.query.config.ShardQueryConfiguration; import datawave.query.exceptions.DatawaveFatalQueryException; @@ -37,7 +39,6 @@ import datawave.query.tables.ScannerSession; import datawave.query.tables.SessionOptions; import datawave.query.util.MetadataHelper; -import datawave.webservice.common.logging.ThreadConfigurableLogger; import datawave.webservice.query.exception.DatawaveErrorCode; import datawave.webservice.query.exception.PreConditionFailedQueryException; @@ -180,7 +181,7 @@ public synchronized void submit() { } forwardLookupData.getSessions().add(bs); - iter = Iterators.concat(iter, bs); + iter = Iterators.concat(iter, Result.keyValueIterator(bs)); } forwardLookupData.setTimedScanFuture(execService.submit(createTimedCallable(iter, fields, forwardLookupData, indexLookupMap))); @@ -211,7 +212,7 @@ public synchronized void submit() { } reverseLookupData.getSessions().add(bs); - iter = Iterators.concat(iter, bs); + iter = Iterators.concat(iter, Result.keyValueIterator(bs)); } reverseLookupData.setTimedScanFuture(execService.submit(createTimedCallable(iter, reverseFields, reverseLookupData, indexLookupMap))); diff --git a/warehouse/query-core/src/main/java/datawave/query/jexl/lookups/ShardIndexQueryTableStaticMethods.java b/warehouse/query-core/src/main/java/datawave/query/jexl/lookups/ShardIndexQueryTableStaticMethods.java index dc2db8df405..61a827704ed 100644 --- a/warehouse/query-core/src/main/java/datawave/query/jexl/lookups/ShardIndexQueryTableStaticMethods.java +++ b/warehouse/query-core/src/main/java/datawave/query/jexl/lookups/ShardIndexQueryTableStaticMethods.java @@ -31,6 +31,7 @@ import org.apache.hadoop.io.Text; import org.apache.log4j.Logger; +import com.google.common.collect.Lists; import com.google.common.collect.Sets; import datawave.core.iterators.ColumnQualifierRangeIterator; @@ -578,7 +579,13 @@ public static final void configureGlobalIndexTermMatchingIterator(ShardQueryConf } public static final void setExpansionFields(ShardQueryConfiguration config, ScannerBase bs, boolean reverseIndex, Collection expansionFields) { + for (String field : getColumnFamilies(config, reverseIndex, expansionFields)) { + bs.fetchColumnFamily(new Text(field)); + } + } + public static final List getColumnFamilies(ShardQueryConfiguration config, boolean reverseIndex, Collection expansionFields) { + List cfs = Lists.newLinkedList(); // Now restrict the fields returned to those that are specified and then only those that are indexed or reverse indexed if (expansionFields == null || expansionFields.isEmpty()) { expansionFields = (reverseIndex ? config.getReverseIndexedFields() : config.getIndexedFields()); @@ -587,16 +594,14 @@ public static final void setExpansionFields(ShardQueryConfiguration config, Scan expansionFields.retainAll(reverseIndex ? config.getReverseIndexedFields() : config.getIndexedFields()); } if (expansionFields.isEmpty()) { - bs.fetchColumnFamily(new Text(Constants.NO_FIELD)); + cfs.add(Constants.NO_FIELD); } else { - for (String field : expansionFields) { - bs.fetchColumnFamily(new Text(field)); - } + cfs.addAll(expansionFields); } - + return cfs; } - private static final IteratorSetting configureGlobalIndexTermMatchingIterator(ShardQueryConfiguration config, Collection literals, + public static final IteratorSetting configureGlobalIndexTermMatchingIterator(ShardQueryConfiguration config, Collection literals, Collection patterns, boolean reverseIndex, boolean limitToUniqueTerms) { if (CollectionUtils.isEmpty(literals) && CollectionUtils.isEmpty(patterns)) { return null; diff --git a/warehouse/query-core/src/main/java/datawave/query/jexl/nodes/ExceededOr.java b/warehouse/query-core/src/main/java/datawave/query/jexl/nodes/ExceededOr.java index 5221519fde4..d1c3f09fd9a 100644 --- a/warehouse/query-core/src/main/java/datawave/query/jexl/nodes/ExceededOr.java +++ b/warehouse/query-core/src/main/java/datawave/query/jexl/nodes/ExceededOr.java @@ -22,9 +22,9 @@ import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; +import datawave.core.common.logging.ThreadConfigurableLogger; import datawave.query.jexl.JexlASTHelper; import datawave.query.jexl.JexlNodeFactory; -import datawave.webservice.common.logging.ThreadConfigurableLogger; public class ExceededOr { private static final Logger log = ThreadConfigurableLogger.getLogger(ExceededOr.class); diff --git a/warehouse/query-core/src/main/java/datawave/query/jexl/visitors/BoundedRangeIndexExpansionVisitor.java b/warehouse/query-core/src/main/java/datawave/query/jexl/visitors/BoundedRangeIndexExpansionVisitor.java index a5c39038385..6c60e3cc8c8 100644 --- a/warehouse/query-core/src/main/java/datawave/query/jexl/visitors/BoundedRangeIndexExpansionVisitor.java +++ b/warehouse/query-core/src/main/java/datawave/query/jexl/visitors/BoundedRangeIndexExpansionVisitor.java @@ -13,6 +13,7 @@ import org.apache.commons.jexl3.parser.JexlNode; import org.apache.log4j.Logger; +import datawave.core.common.logging.ThreadConfigurableLogger; import datawave.query.config.ShardQueryConfiguration; import datawave.query.exceptions.IllegalRangeArgumentException; import datawave.query.jexl.JexlASTHelper; @@ -24,7 +25,6 @@ import datawave.query.jexl.nodes.QueryPropertyMarker; import datawave.query.tables.ScannerFactory; import datawave.query.util.MetadataHelper; -import datawave.webservice.common.logging.ThreadConfigurableLogger; /** * Visits a Jexl tree, looks for bounded ranges, and replaces them with concrete values from the index diff --git a/warehouse/query-core/src/main/java/datawave/query/jexl/visitors/EdgeTableRangeBuildingVisitor.java b/warehouse/query-core/src/main/java/datawave/query/jexl/visitors/EdgeTableRangeBuildingVisitor.java index 3365160bef0..5ff8330563c 100644 --- a/warehouse/query-core/src/main/java/datawave/query/jexl/visitors/EdgeTableRangeBuildingVisitor.java +++ b/warehouse/query-core/src/main/java/datawave/query/jexl/visitors/EdgeTableRangeBuildingVisitor.java @@ -26,7 +26,7 @@ import com.google.common.collect.Sets; import datawave.data.type.Type; -import datawave.edge.model.EdgeModelAware; +import datawave.edge.model.EdgeModelFields; import datawave.edge.util.EdgeKeyUtil; import datawave.query.tables.edge.contexts.EdgeContext; import datawave.query.tables.edge.contexts.IdentityContext; @@ -91,7 +91,7 @@ * {@code (SOURCE == 's1' && SINK == 's2') || (SOURCE == 's2 && SINK == 's2) || ...}
    *
    */ -public class EdgeTableRangeBuildingVisitor extends BaseVisitor implements EdgeModelAware { +public class EdgeTableRangeBuildingVisitor extends BaseVisitor { private static final Logger log = Logger.getLogger(EdgeTableRangeBuildingVisitor.class); protected boolean includeStats; protected List> regexDataTypes; @@ -104,12 +104,14 @@ public class EdgeTableRangeBuildingVisitor extends BaseVisitor implements EdgeMo private long maxTerms = 10000; private boolean sawEquivalenceRegexSource = false; private boolean sawEquivalenceRegexSink = false; + private final EdgeModelFields fields; - public EdgeTableRangeBuildingVisitor(boolean stats, List> types, long maxTerms, List> rTypes) { + public EdgeTableRangeBuildingVisitor(boolean stats, List> types, long maxTerms, List> rTypes, EdgeModelFields fields) { this.includeStats = stats; this.dataTypes = types; this.maxTerms = maxTerms; regexDataTypes = rTypes; + this.fields = fields; } /* @@ -133,7 +135,7 @@ public Object visit(ASTJexlScript node, Object data) { if (context.get(0) instanceof IdentityContext) { // this can only happen if there is no AND node in the query // Build singleton list of QueryContexts then create VisitationContext - QueryContext qContext = new QueryContext(); + QueryContext qContext = new QueryContext(fields); qContext.packageIdentities((List) context); return computeVisitaionContext(Collections.singletonList(qContext)); @@ -186,7 +188,7 @@ public Object visit(ASTAndNode node, Object data) { List childContext = childContexts.remove(childContexts.size() - 1); if ((childContext.get(0) instanceof IdentityContext) && (mergedContext.get(0) instanceof IdentityContext)) { - QueryContext qContext = new QueryContext(); + QueryContext qContext = new QueryContext(fields); qContext.packageIdentities((List) childContext); qContext.packageIdentities((List) mergedContext); @@ -329,7 +331,7 @@ public Object visit(ASTOrNode node, Object data) { else if ((childContext.get(0) instanceof IdentityContext) && (mergedContext.get(0) instanceof QueryContext)) { checkNotExclusion((IdentityContext) childContext.get(0), "Can't OR exclusion expressions"); - QueryContext queryContext = new QueryContext(); + QueryContext queryContext = new QueryContext(fields); queryContext.packageIdentities((List) childContext, false); if (isSourceList((List) childContext)) { @@ -342,7 +344,7 @@ else if ((childContext.get(0) instanceof IdentityContext) && (mergedContext.get( } else if ((childContext.get(0) instanceof QueryContext) && (mergedContext.get(0) instanceof IdentityContext)) { checkNotExclusion((IdentityContext) mergedContext.get(0), "Can't OR exclusion expressions"); - QueryContext queryContext = new QueryContext(); + QueryContext queryContext = new QueryContext(fields); queryContext.packageIdentities((List) mergedContext, false); if (isSourceList((List) mergedContext)) { @@ -373,7 +375,7 @@ private void runCombine(List q1, List q2) { } private boolean isSourceList(List context) { - if (context.get(0).getIdentity().equals(EDGE_SOURCE)) { + if (context.get(0).getIdentity().equals(EdgeModelFields.FieldKey.EDGE_SOURCE)) { return true; } else { return false; @@ -401,7 +403,7 @@ private void checkNotExclusion(IdentityContext context, String msg) { @Override public Object visit(ASTEQNode node, Object data) { incrementTermCountAndCheck(); - return visitExpresionNode(node, EQUALS); + return visitExpresionNode(node, EdgeModelFields.EQUALS); } private void incrementTermCountAndCheck() { @@ -427,10 +429,10 @@ private void incrementTermCountAndCheck() { @Override public Object visit(ASTERNode node, Object data) { incrementTermCountAndCheck(); - List contexts = (List) visitExpresionNode(node, EQUALS_REGEX); - if (contexts.get(0).getIdentity().equals(EDGE_SOURCE)) { + List contexts = (List) visitExpresionNode(node, EdgeModelFields.EQUALS_REGEX); + if (contexts.get(0).getIdentity().equals(EdgeModelFields.FieldKey.EDGE_SOURCE)) { sawEquivalenceRegexSource = true; - } else if (contexts.get(0).getIdentity().equals(EDGE_SINK)) { + } else if (contexts.get(0).getIdentity().equals(EdgeModelFields.FieldKey.EDGE_SINK)) { sawEquivalenceRegexSink = true; } return contexts; @@ -439,13 +441,13 @@ public Object visit(ASTERNode node, Object data) { @Override public Object visit(ASTNRNode node, Object data) { incrementTermCountAndCheck(); - return visitExpresionNode(node, NOT_EQUALS_REGEX); + return visitExpresionNode(node, EdgeModelFields.NOT_EQUALS_REGEX); } @Override public Object visit(ASTNENode node, Object data) { incrementTermCountAndCheck(); - return visitExpresionNode(node, NOT_EQUALS); + return visitExpresionNode(node, EdgeModelFields.NOT_EQUALS); } private Object visitExpresionNode(SimpleNode node, String operator) { @@ -460,9 +462,10 @@ private Object visitExpresionNode(SimpleNode node, String operator) { String literal = JexlNodes.getIdentifierOrLiteralAsString(node.jjtGetChild(1)); List contexts = new ArrayList<>(); - if (identifier.equals(EDGE_SOURCE) || identifier.equals(EDGE_SINK) || identifier.equals(EDGE_ATTRIBUTE3) || identifier.equals(EDGE_ATTRIBUTE2)) { + if (identifier.equals(fields.getSourceFieldName()) || identifier.equals(fields.getSinkFieldName()) || identifier.equals(fields.getAttribute3FieldName()) + || identifier.equals(fields.getAttribute2FieldName())) { - if (operator.equals(EQUALS_REGEX) || operator.equals(NOT_EQUALS_REGEX)) { + if (operator.equals(EdgeModelFields.EQUALS_REGEX) || operator.equals(EdgeModelFields.NOT_EQUALS_REGEX)) { for (String normalizedLiteral : EdgeKeyUtil.normalizeRegexSource(literal, regexDataTypes, true)) { try { // verify that the normalized regex is valid here instead of letting it fail on tserver // TODO: right now the edge filter iterator calls toLowerCase on the query string by default @@ -470,7 +473,7 @@ private Object visitExpresionNode(SimpleNode node, String operator) { // so for right now throw out any regex's that would cause the edge filter iterator to fail but this should probably change in the // future Pattern.compile(normalizedLiteral.toLowerCase()); - IdentityContext iContext = new IdentityContext(identifier, normalizedLiteral, operator); + IdentityContext iContext = new IdentityContext(identifier, normalizedLiteral, operator, fields); contexts.add(iContext); } catch (PatternSyntaxException e) { continue; @@ -482,13 +485,13 @@ private Object visitExpresionNode(SimpleNode node, String operator) { } } else { for (String normalizedLiteral : EdgeKeyUtil.normalizeSource(literal, dataTypes, true)) { - IdentityContext iContext = new IdentityContext(identifier, normalizedLiteral, operator); + IdentityContext iContext = new IdentityContext(identifier, normalizedLiteral, operator, fields); contexts.add(iContext); } } } else { - IdentityContext iContext = new IdentityContext(identifier, literal, operator); + IdentityContext iContext = new IdentityContext(identifier, literal, operator, fields); contexts.add(iContext); } @@ -539,7 +542,8 @@ public Object visit(ASTFunctionNode node, Object data) { sb.append(")"); List contexts = new ArrayList<>(); - IdentityContext iContext = new IdentityContext(FUNCTION, sb.toString(), FUNCTION); + IdentityContext iContext = new IdentityContext(EdgeModelFields.FieldKey.FUNCTION.name(), sb.toString(), EdgeModelFields.FieldKey.FUNCTION.name(), + fields); contexts.add(iContext); return contexts; @@ -593,8 +597,7 @@ private VisitationContext computeVisitaionContext(List queryContex } } } - VisitationContext vContext = new VisitationContext(includeStats); - + VisitationContext vContext = new VisitationContext(fields, includeStats); vContext.setHasAllCompleteColumnFamilies(!includColumnFamilyTerms); for (QueryContext qContext : queryContexts) { diff --git a/warehouse/query-core/src/main/java/datawave/query/jexl/visitors/ExpandCompositeTerms.java b/warehouse/query-core/src/main/java/datawave/query/jexl/visitors/ExpandCompositeTerms.java index 510529f46cc..9f3d4ba0202 100644 --- a/warehouse/query-core/src/main/java/datawave/query/jexl/visitors/ExpandCompositeTerms.java +++ b/warehouse/query-core/src/main/java/datawave/query/jexl/visitors/ExpandCompositeTerms.java @@ -39,6 +39,7 @@ import com.google.common.collect.Lists; import com.google.common.collect.Multimap; +import datawave.core.common.logging.ThreadConfigurableLogger; import datawave.data.type.DiscreteIndexType; import datawave.data.type.NoOpType; import datawave.ingest.data.config.ingest.CompositeIngest; @@ -52,7 +53,6 @@ import datawave.query.jexl.JexlNodeFactory; import datawave.query.jexl.LiteralRange; import datawave.query.jexl.nodes.QueryPropertyMarker; -import datawave.webservice.common.logging.ThreadConfigurableLogger; import datawave.webservice.query.exception.DatawaveErrorCode; import datawave.webservice.query.exception.QueryException; diff --git a/warehouse/query-core/src/main/java/datawave/query/jexl/visitors/ExpandMultiNormalizedTerms.java b/warehouse/query-core/src/main/java/datawave/query/jexl/visitors/ExpandMultiNormalizedTerms.java index 3e6330571cd..b4b5b56a511 100644 --- a/warehouse/query-core/src/main/java/datawave/query/jexl/visitors/ExpandMultiNormalizedTerms.java +++ b/warehouse/query-core/src/main/java/datawave/query/jexl/visitors/ExpandMultiNormalizedTerms.java @@ -39,6 +39,7 @@ import com.google.common.collect.Lists; import com.google.common.collect.Sets; +import datawave.core.common.logging.ThreadConfigurableLogger; import datawave.data.normalizer.IpAddressNormalizer; import datawave.data.type.IpAddressType; import datawave.data.type.OneToManyNormalizerType; @@ -52,7 +53,6 @@ import datawave.query.jexl.nodes.DroppedExpression; import datawave.query.jexl.nodes.QueryPropertyMarker; import datawave.query.util.MetadataHelper; -import datawave.webservice.common.logging.ThreadConfigurableLogger; import datawave.webservice.query.exception.DatawaveErrorCode; import datawave.webservice.query.exception.QueryException; diff --git a/warehouse/query-core/src/main/java/datawave/query/jexl/visitors/GeoFeatureVisitor.java b/warehouse/query-core/src/main/java/datawave/query/jexl/visitors/GeoFeatureVisitor.java index d951567c6e5..d0d2beba8b2 100644 --- a/warehouse/query-core/src/main/java/datawave/query/jexl/visitors/GeoFeatureVisitor.java +++ b/warehouse/query-core/src/main/java/datawave/query/jexl/visitors/GeoFeatureVisitor.java @@ -11,11 +11,11 @@ import org.geotools.geojson.geom.GeometryJSON; import org.locationtech.jts.io.WKTReader; +import datawave.core.common.logging.ThreadConfigurableLogger; import datawave.query.jexl.functions.GeoFunctionsDescriptor; import datawave.query.jexl.functions.GeoWaveFunctionsDescriptor; import datawave.query.jexl.functions.JexlFunctionArgumentDescriptorFactory; import datawave.query.jexl.functions.arguments.JexlArgumentDescriptor; -import datawave.webservice.common.logging.ThreadConfigurableLogger; import datawave.webservice.query.map.QueryGeometry; /** diff --git a/warehouse/query-core/src/main/java/datawave/query/jexl/visitors/GeoWavePruningVisitor.java b/warehouse/query-core/src/main/java/datawave/query/jexl/visitors/GeoWavePruningVisitor.java index 6103c77e0fe..cb2cb55367c 100644 --- a/warehouse/query-core/src/main/java/datawave/query/jexl/visitors/GeoWavePruningVisitor.java +++ b/warehouse/query-core/src/main/java/datawave/query/jexl/visitors/GeoWavePruningVisitor.java @@ -18,6 +18,7 @@ import com.google.common.collect.HashMultimap; import com.google.common.collect.Multimap; +import datawave.core.common.logging.ThreadConfigurableLogger; import datawave.data.normalizer.GeometryNormalizer; import datawave.data.type.AbstractGeometryType; import datawave.data.type.GeoType; @@ -29,7 +30,6 @@ import datawave.query.util.GeoUtils; import datawave.query.util.GeoWaveUtils; import datawave.query.util.MetadataHelper; -import datawave.webservice.common.logging.ThreadConfigurableLogger; /** * This visitor should be run after bounded ranges have been expanded in order to check for expanded GeoWave terms which do not intersect with the original diff --git a/warehouse/query-core/src/main/java/datawave/query/jexl/visitors/IsNotNullPruningVisitor.java b/warehouse/query-core/src/main/java/datawave/query/jexl/visitors/IsNotNullPruningVisitor.java index 783c2a1aede..4f6a8c6fb2c 100644 --- a/warehouse/query-core/src/main/java/datawave/query/jexl/visitors/IsNotNullPruningVisitor.java +++ b/warehouse/query-core/src/main/java/datawave/query/jexl/visitors/IsNotNullPruningVisitor.java @@ -60,9 +60,9 @@ import org.apache.commons.jexl3.parser.JexlNodes; import org.apache.log4j.Logger; +import datawave.core.common.logging.ThreadConfigurableLogger; import datawave.query.jexl.JexlASTHelper; import datawave.query.jexl.functions.FunctionJexlNodeVisitor; -import datawave.webservice.common.logging.ThreadConfigurableLogger; /** * This visitor prunes unnecessary 'is not null' functions from the query tree. diff --git a/warehouse/query-core/src/main/java/datawave/query/jexl/visitors/JexlFormattedStringBuildingVisitor.java b/warehouse/query-core/src/main/java/datawave/query/jexl/visitors/JexlFormattedStringBuildingVisitor.java index 093d9bc082e..10391d0dd6b 100644 --- a/warehouse/query-core/src/main/java/datawave/query/jexl/visitors/JexlFormattedStringBuildingVisitor.java +++ b/warehouse/query-core/src/main/java/datawave/query/jexl/visitors/JexlFormattedStringBuildingVisitor.java @@ -16,11 +16,11 @@ import org.apache.commons.jexl3.parser.JexlNode; import org.apache.commons.jexl3.parser.ParseException; +import datawave.microservice.query.QueryImpl.Parameter; import datawave.microservice.querymetric.BaseQueryMetric; import datawave.query.exceptions.DatawaveFatalQueryException; import datawave.query.jexl.JexlASTHelper; import datawave.query.jexl.nodes.QueryPropertyMarker; -import datawave.webservice.query.QueryImpl.Parameter; import datawave.webservice.query.exception.DatawaveErrorCode; import datawave.webservice.query.exception.QueryException; diff --git a/warehouse/query-core/src/main/java/datawave/query/jexl/visitors/PushFunctionsIntoExceededValueRanges.java b/warehouse/query-core/src/main/java/datawave/query/jexl/visitors/PushFunctionsIntoExceededValueRanges.java index aae18477308..3dfb979dd89 100644 --- a/warehouse/query-core/src/main/java/datawave/query/jexl/visitors/PushFunctionsIntoExceededValueRanges.java +++ b/warehouse/query-core/src/main/java/datawave/query/jexl/visitors/PushFunctionsIntoExceededValueRanges.java @@ -27,12 +27,12 @@ import com.google.common.collect.Multimap; import com.google.common.collect.Sets; +import datawave.core.common.logging.ThreadConfigurableLogger; import datawave.query.jexl.JexlASTHelper; import datawave.query.jexl.functions.JexlFunctionArgumentDescriptorFactory; import datawave.query.jexl.functions.arguments.JexlArgumentDescriptor; import datawave.query.jexl.nodes.QueryPropertyMarker; import datawave.query.util.MetadataHelper; -import datawave.webservice.common.logging.ThreadConfigurableLogger; /** * Visits an JexlNode tree, pushing functions into exceeded value ranges. This is to enable use of the filtering ivarator instead of simply the range ivarator. diff --git a/warehouse/query-core/src/main/java/datawave/query/jexl/visitors/PushdownLargeFieldedListsVisitor.java b/warehouse/query-core/src/main/java/datawave/query/jexl/visitors/PushdownLargeFieldedListsVisitor.java index 339d2d9ede8..3e113e0958d 100644 --- a/warehouse/query-core/src/main/java/datawave/query/jexl/visitors/PushdownLargeFieldedListsVisitor.java +++ b/warehouse/query-core/src/main/java/datawave/query/jexl/visitors/PushdownLargeFieldedListsVisitor.java @@ -42,6 +42,7 @@ import com.google.common.collect.LinkedListMultimap; import com.google.common.collect.Multimap; +import datawave.core.common.logging.ThreadConfigurableLogger; import datawave.core.iterators.DatawaveFieldIndexListIteratorJexl; import datawave.query.Constants; import datawave.query.config.ShardQueryConfiguration; @@ -50,7 +51,6 @@ import datawave.query.jexl.LiteralRange; import datawave.query.jexl.nodes.ExceededOr; import datawave.query.jexl.nodes.QueryPropertyMarker; -import datawave.webservice.common.logging.ThreadConfigurableLogger; import datawave.webservice.query.exception.DatawaveErrorCode; import datawave.webservice.query.exception.QueryException; diff --git a/warehouse/query-core/src/main/java/datawave/query/jexl/visitors/QueryModelVisitor.java b/warehouse/query-core/src/main/java/datawave/query/jexl/visitors/QueryModelVisitor.java index 0fb2589ec9b..dd4f46a3cfa 100644 --- a/warehouse/query-core/src/main/java/datawave/query/jexl/visitors/QueryModelVisitor.java +++ b/warehouse/query-core/src/main/java/datawave/query/jexl/visitors/QueryModelVisitor.java @@ -41,6 +41,7 @@ import com.google.common.collect.SetMultimap; import com.google.common.collect.Sets; +import datawave.core.common.logging.ThreadConfigurableLogger; import datawave.query.Constants; import datawave.query.exceptions.DatawaveFatalQueryException; import datawave.query.jexl.JexlASTHelper; @@ -48,7 +49,6 @@ import datawave.query.jexl.LiteralRange; import datawave.query.jexl.nodes.QueryPropertyMarker; import datawave.query.model.QueryModel; -import datawave.webservice.common.logging.ThreadConfigurableLogger; import datawave.webservice.query.exception.DatawaveErrorCode; import datawave.webservice.query.exception.QueryException; diff --git a/warehouse/query-core/src/main/java/datawave/query/jexl/visitors/RegexFunctionVisitor.java b/warehouse/query-core/src/main/java/datawave/query/jexl/visitors/RegexFunctionVisitor.java index e7e43ff3f42..f4bf39807ca 100644 --- a/warehouse/query-core/src/main/java/datawave/query/jexl/visitors/RegexFunctionVisitor.java +++ b/warehouse/query-core/src/main/java/datawave/query/jexl/visitors/RegexFunctionVisitor.java @@ -13,6 +13,7 @@ import org.apache.commons.jexl3.parser.JexlNodes; import org.apache.log4j.Logger; +import datawave.core.common.logging.ThreadConfigurableLogger; import datawave.query.config.ShardQueryConfiguration; import datawave.query.exceptions.DatawaveFatalQueryException; import datawave.query.jexl.JexlASTHelper; @@ -21,7 +22,6 @@ import datawave.query.parser.JavaRegexAnalyzer; import datawave.query.parser.JavaRegexAnalyzer.JavaRegexParseException; import datawave.query.util.MetadataHelper; -import datawave.webservice.common.logging.ThreadConfigurableLogger; import datawave.webservice.query.exception.DatawaveErrorCode; import datawave.webservice.query.exception.QueryException; diff --git a/warehouse/query-core/src/main/java/datawave/query/jexl/visitors/RegexIndexExpansionVisitor.java b/warehouse/query-core/src/main/java/datawave/query/jexl/visitors/RegexIndexExpansionVisitor.java index fb1e08d1dc7..43ab6893045 100644 --- a/warehouse/query-core/src/main/java/datawave/query/jexl/visitors/RegexIndexExpansionVisitor.java +++ b/warehouse/query-core/src/main/java/datawave/query/jexl/visitors/RegexIndexExpansionVisitor.java @@ -34,6 +34,7 @@ import com.google.common.collect.Lists; import com.google.common.collect.Maps; +import datawave.core.common.logging.ThreadConfigurableLogger; import datawave.query.Constants; import datawave.query.config.ShardQueryConfiguration; import datawave.query.exceptions.DatawaveFatalQueryException; @@ -49,7 +50,6 @@ import datawave.query.planner.pushdown.Cost; import datawave.query.tables.ScannerFactory; import datawave.query.util.MetadataHelper; -import datawave.webservice.common.logging.ThreadConfigurableLogger; /** * Visits a Jexl tree, looks for regex terms, and replaces them with concrete values from the index @@ -84,7 +84,7 @@ protected RegexIndexExpansionVisitor(ShardQueryConfiguration config, ScannerFact this.expandUnfieldedNegations = config.isExpandUnfieldedNegations(); - if (config.isExpansionLimitedToModelContents()) { + if (config.isLimitTermExpansionToModel()) { try { QueryModel queryModel = helper.getQueryModel(config.getModelTableName(), config.getModelName()); this.onlyUseThese = queryModel.getForwardQueryMapping().values(); diff --git a/warehouse/query-core/src/main/java/datawave/query/jexl/visitors/TermCountingVisitor.java b/warehouse/query-core/src/main/java/datawave/query/jexl/visitors/TermCountingVisitor.java index 7e6de3858cd..5b63f209165 100644 --- a/warehouse/query-core/src/main/java/datawave/query/jexl/visitors/TermCountingVisitor.java +++ b/warehouse/query-core/src/main/java/datawave/query/jexl/visitors/TermCountingVisitor.java @@ -16,8 +16,8 @@ import org.apache.commons.lang.mutable.MutableInt; import org.apache.log4j.Logger; +import datawave.core.common.logging.ThreadConfigurableLogger; import datawave.query.jexl.nodes.QueryPropertyMarker; -import datawave.webservice.common.logging.ThreadConfigurableLogger; /** * Count the number of terms where bounded ranges count as 1 term diff --git a/warehouse/query-core/src/main/java/datawave/query/jexl/visitors/UnfieldedIndexExpansionVisitor.java b/warehouse/query-core/src/main/java/datawave/query/jexl/visitors/UnfieldedIndexExpansionVisitor.java index 58e2ab5f50c..ab61593e4a1 100644 --- a/warehouse/query-core/src/main/java/datawave/query/jexl/visitors/UnfieldedIndexExpansionVisitor.java +++ b/warehouse/query-core/src/main/java/datawave/query/jexl/visitors/UnfieldedIndexExpansionVisitor.java @@ -20,6 +20,7 @@ import org.apache.commons.jexl3.parser.JexlNode; import org.apache.log4j.Logger; +import datawave.core.common.logging.ThreadConfigurableLogger; import datawave.data.type.Type; import datawave.query.config.ShardQueryConfiguration; import datawave.query.exceptions.DatawaveFatalQueryException; @@ -30,7 +31,6 @@ import datawave.query.jexl.nodes.QueryPropertyMarker; import datawave.query.tables.ScannerFactory; import datawave.query.util.MetadataHelper; -import datawave.webservice.common.logging.ThreadConfigurableLogger; import datawave.webservice.query.exception.DatawaveErrorCode; import datawave.webservice.query.exception.NotFoundQueryException; diff --git a/warehouse/query-core/src/main/java/datawave/query/jexl/visitors/ValidateFilterFunctionVisitor.java b/warehouse/query-core/src/main/java/datawave/query/jexl/visitors/ValidateFilterFunctionVisitor.java index f4fd67f26f7..1a3b4fe385b 100644 --- a/warehouse/query-core/src/main/java/datawave/query/jexl/visitors/ValidateFilterFunctionVisitor.java +++ b/warehouse/query-core/src/main/java/datawave/query/jexl/visitors/ValidateFilterFunctionVisitor.java @@ -37,10 +37,12 @@ import org.apache.commons.jexl3.parser.JexlNode; import org.apache.log4j.Logger; +import com.google.common.collect.Sets; + +import datawave.core.common.logging.ThreadConfigurableLogger; import datawave.query.exceptions.DatawaveFatalQueryException; import datawave.query.jexl.JexlASTHelper; import datawave.query.jexl.functions.FunctionJexlNodeVisitor; -import datawave.webservice.common.logging.ThreadConfigurableLogger; import datawave.webservice.query.exception.BadRequestQueryException; import datawave.webservice.query.exception.DatawaveErrorCode; diff --git a/warehouse/query-core/src/main/java/datawave/query/jexl/visitors/whindex/WhindexVisitor.java b/warehouse/query-core/src/main/java/datawave/query/jexl/visitors/whindex/WhindexVisitor.java index 7640a9cb267..b54a72fc143 100644 --- a/warehouse/query-core/src/main/java/datawave/query/jexl/visitors/whindex/WhindexVisitor.java +++ b/warehouse/query-core/src/main/java/datawave/query/jexl/visitors/whindex/WhindexVisitor.java @@ -40,6 +40,7 @@ import com.google.common.collect.Lists; import com.google.common.collect.Multimap; +import datawave.core.common.logging.ThreadConfigurableLogger; import datawave.query.config.ShardQueryConfiguration; import datawave.query.jexl.JexlASTHelper; import datawave.query.jexl.JexlNodeFactory; @@ -53,7 +54,6 @@ import datawave.query.jexl.visitors.RebuildingVisitor; import datawave.query.jexl.visitors.TreeFlatteningRebuildingVisitor; import datawave.query.util.MetadataHelper; -import datawave.webservice.common.logging.ThreadConfigurableLogger; /** * The 'WhindexVisitor' is used to replace wide-scoped geowave fields with value-specific, narrow-scoped geowave fields where appropriate. diff --git a/warehouse/query-core/src/main/java/datawave/query/metrics/AccumuloRecordWriter.java b/warehouse/query-core/src/main/java/datawave/query/metrics/AccumuloRecordWriter.java index b170c403d27..48fa184b8e5 100644 --- a/warehouse/query-core/src/main/java/datawave/query/metrics/AccumuloRecordWriter.java +++ b/warehouse/query-core/src/main/java/datawave/query/metrics/AccumuloRecordWriter.java @@ -17,6 +17,7 @@ import org.apache.accumulo.core.client.MutationsRejectedException; import org.apache.accumulo.core.client.TableExistsException; import org.apache.accumulo.core.client.TableNotFoundException; +import org.apache.accumulo.core.client.ZooKeeperInstance; import org.apache.accumulo.core.client.security.tokens.PasswordToken; import org.apache.accumulo.core.data.ColumnUpdate; import org.apache.accumulo.core.data.Mutation; @@ -33,9 +34,9 @@ import datawave.accumulo.inmemory.InMemoryAccumuloClient; import datawave.accumulo.inmemory.InMemoryInstance; import datawave.common.util.ArgumentChecker; -import datawave.webservice.common.connection.AccumuloConnectionFactory; -import datawave.webservice.common.connection.AccumuloConnectionFactory.Priority; -import datawave.webservice.util.EnvProvider; +import datawave.core.common.connection.AccumuloConnectionFactory; +import datawave.core.common.connection.AccumuloConnectionFactory.Priority; +import datawave.core.common.util.EnvProvider; public class AccumuloRecordWriter extends RecordWriter { private MultiTableBatchWriter mtbw = null; @@ -100,7 +101,7 @@ public AccumuloRecordWriter(AccumuloConnectionFactory connectionFactory, Configu } else { this.connFactory = connectionFactory; Map trackingMap = connectionFactory.getTrackingMap(Thread.currentThread().getStackTrace()); - this.client = connectionFactory.getClient(Priority.ADMIN, trackingMap); + this.client = connectionFactory.getClient(null, null, Priority.ADMIN, trackingMap); } mtbw = client.createMultiTableBatchWriter(new BatchWriterConfig().setMaxMemory(getMaxMutationBufferSize(conf)) .setMaxLatency(getMaxLatency(conf), TimeUnit.MILLISECONDS).setMaxWriteThreads(getMaxWriteThreads(conf))); diff --git a/warehouse/query-core/src/main/java/datawave/query/metrics/BaseQueryMetricHandler.java b/warehouse/query-core/src/main/java/datawave/query/metrics/BaseQueryMetricHandler.java index 916365e8d6a..039a197da47 100644 --- a/warehouse/query-core/src/main/java/datawave/query/metrics/BaseQueryMetricHandler.java +++ b/warehouse/query-core/src/main/java/datawave/query/metrics/BaseQueryMetricHandler.java @@ -12,6 +12,7 @@ import org.apache.commons.lang.time.DateUtils; import org.apache.log4j.Logger; +import datawave.core.query.metric.QueryMetricHandler; import datawave.microservice.querymetric.BaseQueryMetric; import datawave.microservice.querymetric.BaseQueryMetric.Lifecycle; import datawave.microservice.querymetric.QueryMetricSummary; @@ -20,7 +21,6 @@ import datawave.query.jexl.visitors.TreeFlatteningRebuildingVisitor; import datawave.query.language.parser.jexl.LuceneToJexlQueryParser; import datawave.query.language.tree.QueryNode; -import datawave.webservice.query.metric.QueryMetricHandler; /** * diff --git a/warehouse/query-core/src/main/java/datawave/query/metrics/ContentQueryMetricsIngestHelper.java b/warehouse/query-core/src/main/java/datawave/query/metrics/ContentQueryMetricsIngestHelper.java index 5aa146c6353..d8cf0f8ff58 100644 --- a/warehouse/query-core/src/main/java/datawave/query/metrics/ContentQueryMetricsIngestHelper.java +++ b/warehouse/query-core/src/main/java/datawave/query/metrics/ContentQueryMetricsIngestHelper.java @@ -7,27 +7,21 @@ import java.util.Map; import java.util.Set; -import org.apache.commons.jexl3.parser.ASTEQNode; -import org.apache.commons.jexl3.parser.ASTJexlScript; import org.apache.commons.lang.StringUtils; import org.apache.log4j.Logger; import com.google.common.collect.HashMultimap; import com.google.common.collect.Multimap; +import datawave.core.query.util.QueryUtil; import datawave.ingest.data.config.NormalizedContentInterface; import datawave.ingest.data.config.NormalizedFieldAndValue; import datawave.ingest.data.config.ingest.CSVIngestHelper; import datawave.ingest.data.config.ingest.TermFrequencyIngestHelperInterface; +import datawave.microservice.query.QueryImpl.Parameter; import datawave.microservice.querymetric.BaseQueryMetric; import datawave.microservice.querymetric.BaseQueryMetric.PageMetric; import datawave.microservice.querymetric.BaseQueryMetric.Prediction; -import datawave.query.jexl.JexlASTHelper; -import datawave.query.jexl.visitors.TreeFlatteningRebuildingVisitor; -import datawave.query.language.parser.jexl.LuceneToJexlQueryParser; -import datawave.query.language.tree.QueryNode; -import datawave.webservice.query.QueryImpl.Parameter; -import datawave.webservice.query.util.QueryUtil; public class ContentQueryMetricsIngestHelper extends CSVIngestHelper implements TermFrequencyIngestHelperInterface { diff --git a/warehouse/query-core/src/main/java/datawave/query/metrics/QueryMetricQueryLogic.java b/warehouse/query-core/src/main/java/datawave/query/metrics/QueryMetricQueryLogic.java index 6e8c6eba576..de449497dc3 100644 --- a/warehouse/query-core/src/main/java/datawave/query/metrics/QueryMetricQueryLogic.java +++ b/warehouse/query-core/src/main/java/datawave/query/metrics/QueryMetricQueryLogic.java @@ -1,20 +1,14 @@ package datawave.query.metrics; -import java.util.ArrayList; -import java.util.Collection; import java.util.Set; -import javax.inject.Inject; - import org.apache.accumulo.core.client.AccumuloClient; import org.apache.accumulo.core.security.Authorizations; +import datawave.core.query.configuration.GenericQueryConfiguration; +import datawave.microservice.query.Query; import datawave.query.language.parser.ParseException; import datawave.query.tables.ShardQueryLogic; -import datawave.security.authorization.DatawavePrincipal; -import datawave.security.system.CallerPrincipal; -import datawave.webservice.query.Query; -import datawave.webservice.query.configuration.GenericQueryConfiguration; /** * Extension to the query logic that enforces the current user is equal to the USER field in the QueryMetrics
    @@ -44,15 +38,7 @@ */ public class QueryMetricQueryLogic extends ShardQueryLogic { - @Inject - @CallerPrincipal - private DatawavePrincipal callerPrincipal; - - private Collection roles = null; - - public void setRolesSets(Collection roleSets) { - this.roles = roleSets; - } + private static final String METRICS_ADMIN_ROLE = "MetricsAdministrator"; public QueryMetricQueryLogic() { super(); @@ -60,11 +46,6 @@ public QueryMetricQueryLogic() { public QueryMetricQueryLogic(QueryMetricQueryLogic other) { super(other); - callerPrincipal = other.callerPrincipal; - if (other.roles != null) { - roles = new ArrayList<>(); - roles.addAll(other.roles); - } } @Override @@ -79,13 +60,8 @@ public final GenericQueryConfiguration initialize(AccumuloClient client, Query s @Override public final String getJexlQueryString(Query settings) throws ParseException { - - if (null == this.roles) { - this.roles = callerPrincipal.getPrimaryUser().getRoles(); - } - String query = super.getJexlQueryString(settings); - if (this.roles.contains("MetricsAdministrator")) { + if (this.getCurrentUser().getPrimaryUser().getRoles().contains(METRICS_ADMIN_ROLE)) { return query; } diff --git a/warehouse/query-core/src/main/java/datawave/query/model/edge/EdgeQueryModel.java b/warehouse/query-core/src/main/java/datawave/query/model/edge/EdgeQueryModel.java index bbe67a4444a..98ea410b2c8 100644 --- a/warehouse/query-core/src/main/java/datawave/query/model/edge/EdgeQueryModel.java +++ b/warehouse/query-core/src/main/java/datawave/query/model/edge/EdgeQueryModel.java @@ -2,11 +2,12 @@ import java.util.Collection; import java.util.Map.Entry; -import java.util.Set; -import datawave.edge.model.EdgeModelAware; +import datawave.edge.model.DefaultEdgeModelFieldsFactory; +import datawave.edge.model.EdgeModelFields; +import datawave.edge.model.EdgeModelFieldsFactory; import datawave.query.model.QueryModel; -import datawave.query.model.util.LoadModelFromXml; +import datawave.query.model.util.LoadModel; /** * This class defines a typical QueryModel, allowing the query syntax for edge queries to be easily customized for an external client's needs/preferences. @@ -16,7 +17,7 @@ *
    * (1) Unlike event-based query models, edge field names don't exist on disk in the way that internal event attributes do. The edge data model is relatively * simple and static with respect to the set of all possible field names. And because we're not constrained by the physical representation of field names on - * disk, we have the flexibility to choose an internal naming scheme to suit the targeted deployment environment. See {@link EdgeModelAware}. For example, with + * disk, we have the flexibility to choose an internal naming scheme to suit the targeted deployment environment. See {@link EdgeModelFields}. For example, with * respect to the superclass method {@code addTermToModel(String alias, String nameOnDisk)}, 'nameOnDisk' can be whatever we want and can be managed with * configuration as needed.
    *
    @@ -27,7 +28,9 @@ *
    * (3) Additionally, index-only/unevaluated fields are ignored, as this concept is not applicable to edges. */ -public class EdgeQueryModel extends QueryModel implements EdgeModelAware { +public class EdgeQueryModel extends QueryModel { + + private final EdgeModelFields fields; /** * This constructor allows the class to be used in conjunction with existing QueryModel loaders. @@ -37,14 +40,15 @@ public class EdgeQueryModel extends QueryModel implements EdgeModelAware { * @throws InvalidModelException * if the model is invalid */ - public EdgeQueryModel(QueryModel other) throws InvalidModelException { + public EdgeQueryModel(QueryModel other, EdgeModelFields fields) throws InvalidModelException { super(other); + this.fields = fields; validateModel(this); } - /** This constructor should never be used */ - @SuppressWarnings("unused") - private EdgeQueryModel() {} + public EdgeModelFields getFields() { + return fields; + } /** * Simple factory method to load a query model from the specified classpath resource. @@ -55,8 +59,26 @@ private EdgeQueryModel() {} * @throws Exception * if there is an issue */ + public static EdgeQueryModel loadModel(String queryModelXml, EdgeModelFields fields) throws Exception { + return new EdgeQueryModel(LoadModel.loadModelFromXml(queryModelXml), fields); + } + + /** + * Simple factory method to load a query model from the specified classpath resource. + * + * @return EdgeQueryModel instance + */ + public static EdgeQueryModel loadModel(String queryModelXml, EdgeModelFieldsFactory fieldsFactory) throws Exception { + return loadModel(queryModelXml, fieldsFactory.createFields()); + } + + /** + * Simple factory method to load a query model from the specified classpath resource. + * + * @return EdgeQueryModel instance + */ public static EdgeQueryModel loadModel(String queryModelXml) throws Exception { - return new EdgeQueryModel(LoadModelFromXml.loadModel(queryModelXml)); + return loadModel(queryModelXml, new DefaultEdgeModelFieldsFactory()); } /** @@ -119,6 +141,6 @@ public static void validateModel(EdgeQueryModel model) throws InvalidModelExcept } public Collection getAllInternalFieldNames() { - return Fields.getInstance().getBaseFieldNames(); + return fields.getBaseFieldNames(); } } diff --git a/warehouse/query-core/src/main/java/datawave/query/model/util/LoadModelFromXml.java b/warehouse/query-core/src/main/java/datawave/query/model/util/LoadModel.java similarity index 84% rename from warehouse/query-core/src/main/java/datawave/query/model/util/LoadModelFromXml.java rename to warehouse/query-core/src/main/java/datawave/query/model/util/LoadModel.java index ee8ea7602a4..4b546cdd4d9 100644 --- a/warehouse/query-core/src/main/java/datawave/query/model/util/LoadModelFromXml.java +++ b/warehouse/query-core/src/main/java/datawave/query/model/util/LoadModel.java @@ -1,6 +1,7 @@ package datawave.query.model.util; import java.io.InputStream; +import java.util.Collection; import javax.xml.bind.JAXBContext; import javax.xml.bind.Unmarshaller; @@ -16,11 +17,11 @@ import datawave.webservice.model.Model; /** - * Utility class to load a model from XML using jaxb objects generated in web service + * Utility class to load a model */ -public class LoadModelFromXml { +public class LoadModel { - private static final Logger log = Logger.getLogger(LoadModelFromXml.class); + private static final Logger log = Logger.getLogger(LoadModel.class); public static QueryModel loadModelFromXml(InputStream stream) throws Exception { @@ -43,8 +44,12 @@ public static QueryModel loadModelFromXml(InputStream stream) throws Exception { } } + return loadModelFromFieldMappings(xmlModel.getFields()); + } + + public static QueryModel loadModelFromFieldMappings(Collection fieldMappings) { QueryModel model = new QueryModel(); - for (FieldMapping mapping : xmlModel.getFields()) { + for (FieldMapping mapping : fieldMappings) { if (mapping.isFieldMapping()) { switch (mapping.getDirection()) { case FORWARD: @@ -78,12 +83,10 @@ public static QueryModel loadModelFromXml(InputStream stream) throws Exception { * @throws Exception * if there are issues */ - public static QueryModel loadModel(String queryModelXml) throws Exception { - QueryModel model = null; - try (InputStream modelStream = LoadModelFromXml.class.getResourceAsStream(queryModelXml)) { + public static QueryModel loadModelFromXml(String queryModelXml) throws Exception { + QueryModel model; + try (InputStream modelStream = LoadModel.class.getResourceAsStream(queryModelXml)) { model = loadModelFromXml(modelStream); - } catch (Throwable t) { - throw t; } return model; } diff --git a/warehouse/query-core/src/main/java/datawave/query/planner/BooleanChunkingQueryPlanner.java b/warehouse/query-core/src/main/java/datawave/query/planner/BooleanChunkingQueryPlanner.java index 681c297c9e9..d71a27d1461 100644 --- a/warehouse/query-core/src/main/java/datawave/query/planner/BooleanChunkingQueryPlanner.java +++ b/warehouse/query-core/src/main/java/datawave/query/planner/BooleanChunkingQueryPlanner.java @@ -10,6 +10,8 @@ import com.google.common.collect.Lists; +import datawave.core.query.configuration.QueryData; +import datawave.microservice.query.Query; import datawave.query.CloseableIterable; import datawave.query.config.ShardQueryConfiguration; import datawave.query.exceptions.DatawaveFatalQueryException; @@ -24,8 +26,6 @@ import datawave.query.util.MetadataHelper; import datawave.query.util.QueryStopwatch; import datawave.util.time.TraceStopwatch; -import datawave.webservice.query.Query; -import datawave.webservice.query.configuration.QueryData; public class BooleanChunkingQueryPlanner extends DefaultQueryPlanner { private static final Logger log = Logger.getLogger(BooleanChunkingQueryPlanner.class); diff --git a/warehouse/query-core/src/main/java/datawave/query/planner/DefaultQueryPlanner.java b/warehouse/query-core/src/main/java/datawave/query/planner/DefaultQueryPlanner.java index 8cfad16d723..30821b87c92 100644 --- a/warehouse/query-core/src/main/java/datawave/query/planner/DefaultQueryPlanner.java +++ b/warehouse/query-core/src/main/java/datawave/query/planner/DefaultQueryPlanner.java @@ -60,10 +60,15 @@ import com.google.common.collect.Multimaps; import com.google.common.collect.Sets; +import datawave.core.common.logging.ThreadConfigurableLogger; import datawave.core.iterators.querylock.QueryLock; +import datawave.core.query.configuration.GenericQueryConfiguration; +import datawave.core.query.configuration.QueryData; import datawave.data.type.AbstractGeometryType; import datawave.data.type.Type; import datawave.ingest.mapreduce.handler.dateindex.DateIndexUtil; +import datawave.microservice.query.Query; +import datawave.microservice.query.QueryImpl.Parameter; import datawave.query.CloseableIterable; import datawave.query.Constants; import datawave.query.QueryParameters; @@ -167,11 +172,6 @@ import datawave.query.util.Tuple2; import datawave.query.util.TypeMetadata; import datawave.util.time.TraceStopwatch; -import datawave.webservice.common.logging.ThreadConfigurableLogger; -import datawave.webservice.query.Query; -import datawave.webservice.query.QueryImpl.Parameter; -import datawave.webservice.query.configuration.GenericQueryConfiguration; -import datawave.webservice.query.configuration.QueryData; import datawave.webservice.query.exception.BadRequestQueryException; import datawave.webservice.query.exception.DatawaveErrorCode; import datawave.webservice.query.exception.NotFoundQueryException; @@ -2583,6 +2583,7 @@ protected CloseableIterable getFullScanRange(ShardQueryConfiguration // @formatter:off QueryPlan queryPlan = new QueryPlan() + .withTableName(config.getShardTableName()) .withQueryTree(queryTree) .withRanges(Collections.singleton(range)); // @formatter:on diff --git a/warehouse/query-core/src/main/java/datawave/query/planner/FacetedQueryPlanner.java b/warehouse/query-core/src/main/java/datawave/query/planner/FacetedQueryPlanner.java index ab8ac318848..008cfae747a 100644 --- a/warehouse/query-core/src/main/java/datawave/query/planner/FacetedQueryPlanner.java +++ b/warehouse/query-core/src/main/java/datawave/query/planner/FacetedQueryPlanner.java @@ -7,6 +7,8 @@ import com.google.common.base.Joiner; +import datawave.core.query.configuration.QueryData; +import datawave.microservice.query.Query; import datawave.query.CloseableIterable; import datawave.query.config.ShardQueryConfiguration; import datawave.query.exceptions.DatawaveQueryException; @@ -24,8 +26,6 @@ import datawave.query.util.DateIndexHelper; import datawave.query.util.MetadataHelper; import datawave.query.util.Tuple2; -import datawave.webservice.query.Query; -import datawave.webservice.query.configuration.QueryData; import datawave.webservice.query.exception.DatawaveErrorCode; import datawave.webservice.query.exception.QueryException; diff --git a/warehouse/query-core/src/main/java/datawave/query/planner/IndexQueryPlanner.java b/warehouse/query-core/src/main/java/datawave/query/planner/IndexQueryPlanner.java index 513f1cd9a7f..9ead8720514 100644 --- a/warehouse/query-core/src/main/java/datawave/query/planner/IndexQueryPlanner.java +++ b/warehouse/query-core/src/main/java/datawave/query/planner/IndexQueryPlanner.java @@ -5,6 +5,8 @@ import org.apache.accumulo.core.client.IteratorSetting; import org.apache.commons.jexl3.parser.ASTJexlScript; +import datawave.core.query.configuration.QueryData; +import datawave.microservice.query.Query; import datawave.query.config.ShardQueryConfiguration; import datawave.query.exceptions.DatawaveQueryException; import datawave.query.exceptions.EmptyUnfieldedTermExpansionException; @@ -15,8 +17,6 @@ import datawave.query.tables.ScannerFactory; import datawave.query.util.DateIndexHelper; import datawave.query.util.MetadataHelper; -import datawave.webservice.query.Query; -import datawave.webservice.query.configuration.QueryData; import datawave.webservice.query.exception.DatawaveErrorCode; import datawave.webservice.query.exception.QueryException; diff --git a/warehouse/query-core/src/main/java/datawave/query/planner/MetadataHelperQueryModelProvider.java b/warehouse/query-core/src/main/java/datawave/query/planner/MetadataHelperQueryModelProvider.java index bcf4e31ef04..82dee244c7e 100644 --- a/warehouse/query-core/src/main/java/datawave/query/planner/MetadataHelperQueryModelProvider.java +++ b/warehouse/query-core/src/main/java/datawave/query/planner/MetadataHelperQueryModelProvider.java @@ -3,11 +3,11 @@ import org.apache.accumulo.core.client.TableNotFoundException; import org.apache.log4j.Logger; +import datawave.core.common.logging.ThreadConfigurableLogger; import datawave.query.config.ShardQueryConfiguration; import datawave.query.exceptions.DatawaveFatalQueryException; import datawave.query.model.QueryModel; import datawave.query.util.MetadataHelper; -import datawave.webservice.common.logging.ThreadConfigurableLogger; import datawave.webservice.query.exception.DatawaveErrorCode; import datawave.webservice.query.exception.QueryException; diff --git a/warehouse/query-core/src/main/java/datawave/query/planner/QueryOptionsSwitch.java b/warehouse/query-core/src/main/java/datawave/query/planner/QueryOptionsSwitch.java index 7cbf920760c..0c1fbc151a9 100644 --- a/warehouse/query-core/src/main/java/datawave/query/planner/QueryOptionsSwitch.java +++ b/warehouse/query-core/src/main/java/datawave/query/planner/QueryOptionsSwitch.java @@ -10,6 +10,7 @@ import com.google.common.collect.Sets; +import datawave.core.common.logging.ThreadConfigurableLogger; import datawave.query.Constants; import datawave.query.QueryParameters; import datawave.query.attributes.ExcerptFields; @@ -17,7 +18,6 @@ import datawave.query.common.grouping.GroupFields; import datawave.query.config.ShardQueryConfiguration; import datawave.util.StringUtils; -import datawave.webservice.common.logging.ThreadConfigurableLogger; public class QueryOptionsSwitch { diff --git a/warehouse/query-core/src/main/java/datawave/query/planner/QueryPlan.java b/warehouse/query-core/src/main/java/datawave/query/planner/QueryPlan.java index 35bb1776c68..176575769b1 100644 --- a/warehouse/query-core/src/main/java/datawave/query/planner/QueryPlan.java +++ b/warehouse/query-core/src/main/java/datawave/query/planner/QueryPlan.java @@ -16,21 +16,26 @@ import org.apache.commons.jexl3.parser.ParseException; import org.apache.commons.lang.builder.EqualsBuilder; import org.apache.commons.lang.builder.HashCodeBuilder; +import org.apache.log4j.Logger; import com.google.common.base.Preconditions; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; +import datawave.core.common.logging.ThreadConfigurableLogger; +import datawave.core.query.configuration.QueryData; import datawave.query.jexl.JexlASTHelper; import datawave.query.jexl.visitors.JexlStringBuildingVisitor; import datawave.query.util.count.CountMap; -import datawave.webservice.query.configuration.QueryData; /** * Base representation of a query plan */ public class QueryPlan { + private static final Logger log = ThreadConfigurableLogger.getLogger(QueryPlan.class); + + protected String tableName = null; protected JexlNode queryTree = null; protected String queryTreeString = null; protected Collection ranges = null; @@ -49,6 +54,8 @@ public QueryPlan() { /** * Preferred full constructor * + * @param tableName + * the table name * @param queryTree * the query tree * @param ranges @@ -58,7 +65,8 @@ public QueryPlan() { * @param settings * a list of IteratorSetting */ - public QueryPlan(JexlNode queryTree, Collection ranges, Collection columnFamilies, List settings) { + public QueryPlan(String tableName, JexlNode queryTree, Collection ranges, Collection columnFamilies, List settings) { + this.tableName = tableName; this.queryTree = queryTree; this.queryTreeString = JexlStringBuildingVisitor.buildQueryWithoutParse(queryTree); this.ranges = ranges; @@ -70,6 +78,8 @@ public QueryPlan(JexlNode queryTree, Collection ranges, Collection ranges, Collection ranges, Collection columnFamilies, List settings) { + public QueryPlan(String tableName, String queryString, Collection ranges, Collection columnFamilies, List settings) { + this.tableName = tableName; this.queryTree = null; this.queryTreeString = queryString; this.ranges = ranges; @@ -95,6 +106,7 @@ public QueryPlan(String queryString, Collection ranges, Collection(other.ranges); @@ -109,6 +121,8 @@ public QueryPlan(QueryPlan other) { /** * Partial constructor, missing IteratorSetting * + * @param tableName + * the table name * @param queryTreeString * the query string * @param queryTree @@ -118,13 +132,14 @@ public QueryPlan(QueryPlan other) { * @deprecated */ @Deprecated(since = "6.9.0", forRemoval = true) - public QueryPlan(String queryTreeString, JexlNode queryTree, Iterable ranges) { - this(queryTreeString, queryTree, ranges, null); + public QueryPlan(String tableName, String queryTreeString, JexlNode queryTree, Iterable ranges) { + this(tableName, queryTreeString, queryTree, ranges, null); } @Deprecated(since = "6.9.0", forRemoval = true) - public QueryPlan(String queryTreeString, JexlNode queryTree, Iterable ranges, List settings) { + public QueryPlan(String tableName, String queryTreeString, JexlNode queryTree, Iterable ranges, List settings) { Preconditions.checkNotNull(queryTree); + this.tableName = tableName; this.queryTree = queryTree; this.queryTreeString = queryTreeString; this.ranges = Lists.newArrayList(ranges); @@ -135,8 +150,9 @@ public QueryPlan(String queryTreeString, JexlNode queryTree, Iterable ran } @Deprecated(since = "6.9.0", forRemoval = true) - public QueryPlan(JexlNode queryTree, Iterable ranges, Collection columnFamilies) { + public QueryPlan(String tableName, JexlNode queryTree, Iterable ranges, Collection columnFamilies) { Preconditions.checkNotNull(queryTree); + this.tableName = tableName; this.queryTree = queryTree; this.ranges = Lists.newArrayList(ranges); this.columnFamilies = Lists.newArrayList(columnFamilies); @@ -144,8 +160,9 @@ public QueryPlan(JexlNode queryTree, Iterable ranges, Collection } @Deprecated(since = "6.9.0", forRemoval = true) - public QueryPlan(JexlNode queryTree, Range range) { + public QueryPlan(String tableName, JexlNode queryTree, Range range) { Preconditions.checkNotNull(queryTree); + this.tableName = tableName; this.queryTree = queryTree; this.ranges = Lists.newArrayList(range); resetHashCode(); @@ -153,6 +170,7 @@ public QueryPlan(JexlNode queryTree, Range range) { @Deprecated(since = "6.9.0", forRemoval = true) public QueryPlan(QueryData currentQueryData) throws ParseException { + this.tableName = currentQueryData.getTableName(); this.queryTreeString = currentQueryData.getQuery(); this.ranges = Lists.newArrayList(currentQueryData.getRanges()); this.settings.addAll(currentQueryData.getSettings()); @@ -160,8 +178,16 @@ public QueryPlan(QueryData currentQueryData) throws ParseException { resetHashCode(); } + /** + * @param tableName + * @param queryTree + * @param rangeIter + * @param settings + * @param columnFamilies + */ @Deprecated(since = "6.9.0", forRemoval = true) - public QueryPlan(JexlNode queryTree, Iterable rangeIter, List settings, Collection columnFamilies) { + public QueryPlan(String tableName, JexlNode queryTree, Iterable rangeIter, List settings, Collection columnFamilies) { + this.tableName = tableName; this.queryTree = queryTree; this.ranges = Lists.newArrayList(rangeIter); for (IteratorSetting setting : settings) { @@ -180,9 +206,15 @@ public QueryPlan(JexlNode queryTree, Iterable rangeIter, List rangeIter, List settings) { - this(queryTree, rangeIter, settings, null); + public QueryPlan(String tableName, JexlNode queryTree, Iterable rangeIter, List settings) { + this(tableName, queryTree, rangeIter, settings, null); } public QueryPlan(JexlNode queryTree, Collection ranges) { @@ -195,6 +227,19 @@ public QueryPlan(JexlNode queryTree, Collection ranges) { // builder methods + /** + * Builder style method for setting the table name + * + * @param tableName + * the table name + * @return this QueryPlan + */ + public QueryPlan withTableName(String tableName) { + this.tableName = tableName; + resetHashCode(); + return this; + } + /** * Builder style method for setting the query tree * @@ -318,10 +363,6 @@ public String getQueryString() { return queryTreeString; } - public Iterable getRanges() { - return ranges; - } - public void setRanges(Collection ranges) { this.ranges.clear(); this.ranges.addAll(ranges); @@ -344,11 +385,26 @@ public Collection getColumnFamilies() { return columnFamilies; } + public Collection getRanges() { + return ranges; + } + public void addColumnFamily(String cf) { columnFamilies.add(cf); resetHashCode(); } + public String getTableName() { + return tableName; + } + + public void setTableName(String tableName) { + this.tableName = tableName; + } + + /** + * @return + */ public List getSettings() { return settings; } @@ -372,6 +428,7 @@ public boolean equals(Object obj) { // @formatter:off return new EqualsBuilder() + .append(tableName, other.tableName) .append(queryTree, other.queryTree) .append(queryTreeString, other.queryTreeString) .append(ranges, other.ranges) @@ -388,6 +445,7 @@ public int hashCode() { if (rebuildHashCode) { // @formatter:off hashCode = new HashCodeBuilder() + .append(tableName) .append(queryTree) .append(queryTreeString) .append(ranges) @@ -409,6 +467,6 @@ private void resetHashCode() { @Override public String toString() { - return (ranges + getQueryString() + columnFamilies).intern(); + return (tableName + ranges + getQueryString() + columnFamilies).intern(); } } diff --git a/warehouse/query-core/src/main/java/datawave/query/planner/QueryPlanner.java b/warehouse/query-core/src/main/java/datawave/query/planner/QueryPlanner.java index 2d8e07df40f..797d6cf7264 100644 --- a/warehouse/query-core/src/main/java/datawave/query/planner/QueryPlanner.java +++ b/warehouse/query-core/src/main/java/datawave/query/planner/QueryPlanner.java @@ -4,6 +4,9 @@ import org.apache.accumulo.core.data.Value; import org.apache.accumulo.core.iterators.SortedKeyValueIterator; +import datawave.core.query.configuration.GenericQueryConfiguration; +import datawave.core.query.configuration.QueryData; +import datawave.microservice.query.Query; import datawave.query.CloseableIterable; import datawave.query.exceptions.DatawaveQueryException; import datawave.query.index.lookup.CreateUidsIterator; @@ -11,9 +14,6 @@ import datawave.query.index.lookup.UidIntersector; import datawave.query.planner.pushdown.PushDownPlanner; import datawave.query.tables.ScannerFactory; -import datawave.webservice.query.Query; -import datawave.webservice.query.configuration.GenericQueryConfiguration; -import datawave.webservice.query.configuration.QueryData; public abstract class QueryPlanner implements PushDownPlanner { diff --git a/warehouse/query-core/src/main/java/datawave/query/planner/ThreadedRangeBundler.java b/warehouse/query-core/src/main/java/datawave/query/planner/ThreadedRangeBundler.java index 7d5010dc2f7..e2549c4a838 100644 --- a/warehouse/query-core/src/main/java/datawave/query/planner/ThreadedRangeBundler.java +++ b/warehouse/query-core/src/main/java/datawave/query/planner/ThreadedRangeBundler.java @@ -7,9 +7,9 @@ import org.apache.commons.jexl3.parser.ASTJexlScript; +import datawave.core.query.configuration.QueryData; +import datawave.microservice.query.Query; import datawave.query.CloseableIterable; -import datawave.webservice.query.Query; -import datawave.webservice.query.configuration.QueryData; public class ThreadedRangeBundler implements CloseableIterable { diff --git a/warehouse/query-core/src/main/java/datawave/query/planner/ThreadedRangeBundlerIterator.java b/warehouse/query-core/src/main/java/datawave/query/planner/ThreadedRangeBundlerIterator.java index 82d68f16b59..3252ae5afba 100644 --- a/warehouse/query-core/src/main/java/datawave/query/planner/ThreadedRangeBundlerIterator.java +++ b/warehouse/query-core/src/main/java/datawave/query/planner/ThreadedRangeBundlerIterator.java @@ -23,14 +23,13 @@ import datawave.common.util.MultiComparator; import datawave.common.util.concurrent.BoundedBlockingQueue; +import datawave.core.common.logging.ThreadConfigurableLogger; +import datawave.core.query.configuration.QueryData; +import datawave.microservice.query.Query; import datawave.query.CloseableIterable; -import datawave.query.iterator.QueryIterator; import datawave.query.iterator.QueryOptions; import datawave.query.tld.TLDQueryIterator; import datawave.query.util.count.CountMapSerDe; -import datawave.webservice.common.logging.ThreadConfigurableLogger; -import datawave.webservice.query.Query; -import datawave.webservice.query.configuration.QueryData; public class ThreadedRangeBundlerIterator implements Iterator, Closeable { private static final Logger log = ThreadConfigurableLogger.getLogger(ThreadedRangeBundlerIterator.class); @@ -295,6 +294,7 @@ private QueryData createNewQueryData(QueryPlan plan) { // @formatter:off return new QueryData() + .withTableName(plan.getTableName()) .withQuery(queryString) .withRanges(Lists.newArrayList(plan.getRanges())) .withColumnFamilies(plan.getColumnFamilies()) @@ -391,7 +391,10 @@ public void run() { } } catch (Exception e) { - throw new RuntimeException(e); + // only propogate the exception if we weren't being shutdown. + if (running) { + throw new RuntimeException(e); + } } finally { rangeConsumer.stop(); } diff --git a/warehouse/query-core/src/main/java/datawave/query/scheduler/PushdownFunction.java b/warehouse/query-core/src/main/java/datawave/query/scheduler/PushdownFunction.java index 6d74daa80b5..9f168e63e32 100644 --- a/warehouse/query-core/src/main/java/datawave/query/scheduler/PushdownFunction.java +++ b/warehouse/query-core/src/main/java/datawave/query/scheduler/PushdownFunction.java @@ -30,6 +30,8 @@ import com.google.common.collect.Multimap; import com.google.common.collect.Sets; +import datawave.core.common.connection.AccumuloConnectionFactory; +import datawave.core.query.configuration.QueryData; import datawave.query.config.ShardQueryConfiguration; import datawave.query.iterator.QueryIterator; import datawave.query.iterator.QueryOptions; @@ -37,8 +39,6 @@ import datawave.query.planner.QueryPlan; import datawave.query.tables.SessionOptions; import datawave.query.tables.async.ScannerChunk; -import datawave.webservice.common.connection.AccumuloConnectionFactory; -import datawave.webservice.query.configuration.QueryData; public class PushdownFunction implements Function> { @@ -120,8 +120,7 @@ public List apply(QueryData qd) { options.setQueryConfig(this.config); - chunks.add(new ScannerChunk(options, Lists.newArrayList(plan.getRanges()), server)); - + chunks.add(new ScannerChunk(options, plan.getRanges(), qd, server)); } catch (Exception e) { log.error(e); throw new AccumuloException(e); @@ -137,7 +136,6 @@ public List apply(QueryData qd) { protected void redistributeQueries(Multimap serverPlan, TabletLocator tl, QueryPlan currentPlan) throws AccumuloException, AccumuloSecurityException, TableNotFoundException { - List ranges = Lists.newArrayList(currentPlan.getRanges()); if (!ranges.isEmpty()) { Map>> binnedRanges = binRanges(tl, config.getClient(), ranges); @@ -171,6 +169,7 @@ protected void redistributeQueries(Multimap serverPlan, Tablet // @formatter:off QueryPlan queryPlan = new QueryPlan() + .withTableName(currentPlan.getTableName()) .withQueryTree(currentPlan.getQueryTree()) .withRanges(allRanges) .withSettings(newSettings) diff --git a/warehouse/query-core/src/main/java/datawave/query/scheduler/PushdownScheduler.java b/warehouse/query-core/src/main/java/datawave/query/scheduler/PushdownScheduler.java index c6a1b025355..024e2198257 100644 --- a/warehouse/query-core/src/main/java/datawave/query/scheduler/PushdownScheduler.java +++ b/warehouse/query-core/src/main/java/datawave/query/scheduler/PushdownScheduler.java @@ -1,6 +1,7 @@ package datawave.query.scheduler; -import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Map.Entry; @@ -29,6 +30,12 @@ import datawave.accumulo.inmemory.InMemoryAccumuloClient; import datawave.accumulo.inmemory.impl.InMemoryTabletLocator; +import datawave.core.common.connection.AccumuloConnectionFactory; +import datawave.core.common.logging.ThreadConfigurableLogger; +import datawave.core.query.configuration.QueryData; +import datawave.core.query.configuration.Result; +import datawave.core.query.logic.QueryCheckpoint; +import datawave.core.query.logic.QueryKey; import datawave.mr.bulk.RfileResource; import datawave.query.config.ShardQueryConfiguration; import datawave.query.tables.BatchScannerSession; @@ -39,9 +46,6 @@ import datawave.query.tables.stats.ScanSessionStats; import datawave.query.util.MetadataHelper; import datawave.query.util.MetadataHelperFactory; -import datawave.webservice.common.connection.AccumuloConnectionFactory; -import datawave.webservice.common.logging.ThreadConfigurableLogger; -import datawave.webservice.query.configuration.QueryData; /** * Purpose: Pushes down individual queries to the Tservers. Is aware that each server may have a different query, thus bins ranges per tserver and keeps the @@ -95,13 +99,30 @@ public void addSetting(IteratorSetting customSetting) { settings.add(customSetting); } + @Override + public List checkpoint(QueryKey queryKey) { + // if we were not actually started, then simple return the query data checkpoints + if (session == null) { + Iterator queries = getQueryDataIterator(); + List checkpoints = new ArrayList<>(); + while (queries.hasNext()) { + checkpoints.add(new QueryCheckpoint(queryKey, Collections.singletonList(queries.next()))); + } + return checkpoints; + } else { + List checkpoints = session.checkpoint(queryKey); + close(); + return checkpoints; + } + } + /* * (non-Javadoc) * * @see java.lang.Iterable#iterator() */ @Override - public Iterator> iterator() { + public Iterator iterator() { if (null == this.config) { throw new IllegalArgumentException("Null configuration provided"); } @@ -115,8 +136,15 @@ public Iterator> iterator() { } - protected Iterator> concatIterators() throws AccumuloException, AccumuloSecurityException, TableNotFoundException, ParseException { - + /** + * @return + * @throws ParseException + * @throws TableNotFoundException + * @throws AccumuloSecurityException + * @throws AccumuloException + */ + protected Iterator concatIterators() throws AccumuloException, AccumuloSecurityException, TableNotFoundException, ParseException { + boolean hasNext = config.getQueriesIter().hasNext(); String tableName = config.getShardTableName(); Set auths = config.getAuthorizations(); @@ -135,7 +163,7 @@ protected Iterator> concatIterators() throws AccumuloException, Iterator> chunkIter = Iterators.transform(getQueryDataIterator(), new PushdownFunction(tl, config, settings, tableId)); try { - session = scannerFactory.newQueryScanner(tableName, auths, config.getQuery()); + session = scannerFactory.newQueryScanner(tableName, auths, config.getQuery()).setConfig(config); if (config.getBypassAccumulo()) { session.setDelegatedInitializer(RfileResource.class); @@ -166,7 +194,11 @@ protected Iterator> concatIterators() throws AccumuloException, } protected Iterator getQueryDataIterator() { - return config.getQueries(); + if (config.isCheckpointable()) { + return new SingleRangeQueryDataIterator(config.getQueriesIter()); + } else { + return config.getQueriesIter(); + } } /* @@ -175,7 +207,7 @@ protected Iterator getQueryDataIterator() { * @see java.io.Closeable#close() */ @Override - public void close() throws IOException { + public void close() { if (session != null) scannerFactory.close(session); diff --git a/warehouse/query-core/src/main/java/datawave/query/scheduler/Scheduler.java b/warehouse/query-core/src/main/java/datawave/query/scheduler/Scheduler.java index bc9aea1419d..68db69dc8a4 100644 --- a/warehouse/query-core/src/main/java/datawave/query/scheduler/Scheduler.java +++ b/warehouse/query-core/src/main/java/datawave/query/scheduler/Scheduler.java @@ -1,24 +1,24 @@ package datawave.query.scheduler; import java.util.Collection; -import java.util.Map.Entry; +import java.util.List; import org.apache.accumulo.core.client.BatchScanner; import org.apache.accumulo.core.client.IteratorSetting; import org.apache.accumulo.core.client.TableNotFoundException; -import org.apache.accumulo.core.data.Key; -import org.apache.accumulo.core.data.Value; import com.google.common.collect.Lists; +import datawave.core.query.configuration.QueryData; +import datawave.core.query.configuration.Result; +import datawave.core.query.logic.QueryCheckpoint; +import datawave.core.query.logic.QueryKey; import datawave.query.CloseableIterable; import datawave.query.config.ShardQueryConfiguration; import datawave.query.tables.ScannerFactory; import datawave.query.tables.stats.ScanSessionStats; -import datawave.webservice.query.configuration.QueryData; - -public abstract class Scheduler implements CloseableIterable> { +public abstract class Scheduler implements CloseableIterable { protected Collection settings = Lists.newArrayList(); public abstract BatchScanner createBatchScanner(ShardQueryConfiguration config, ScannerFactory scannerFactory, QueryData qd) throws TableNotFoundException; @@ -34,4 +34,5 @@ public void addSetting(IteratorSetting customSetting) { settings.add(customSetting); } + public abstract List checkpoint(QueryKey queryKey); } diff --git a/warehouse/query-core/src/main/java/datawave/query/scheduler/SequentialScheduler.java b/warehouse/query-core/src/main/java/datawave/query/scheduler/SequentialScheduler.java index 03bb4c080b6..6b5f6841e5b 100644 --- a/warehouse/query-core/src/main/java/datawave/query/scheduler/SequentialScheduler.java +++ b/warehouse/query-core/src/main/java/datawave/query/scheduler/SequentialScheduler.java @@ -1,22 +1,27 @@ package datawave.query.scheduler; import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; import java.util.Iterator; -import java.util.Map.Entry; +import java.util.List; import java.util.concurrent.atomic.AtomicInteger; import org.apache.accumulo.core.client.BatchScanner; import org.apache.accumulo.core.client.TableNotFoundException; -import org.apache.accumulo.core.data.Key; -import org.apache.accumulo.core.data.Value; import org.apache.log4j.Logger; +import com.google.common.collect.Lists; + +import datawave.core.common.logging.ThreadConfigurableLogger; +import datawave.core.query.configuration.QueryData; +import datawave.core.query.configuration.Result; +import datawave.core.query.logic.QueryCheckpoint; +import datawave.core.query.logic.QueryKey; import datawave.query.config.ShardQueryConfiguration; import datawave.query.tables.ScannerFactory; import datawave.query.tables.ShardQueryLogic; import datawave.query.tables.stats.ScanSessionStats; -import datawave.webservice.common.logging.ThreadConfigurableLogger; -import datawave.webservice.query.configuration.QueryData; /** * @@ -46,7 +51,7 @@ public SequentialScheduler(ShardQueryConfiguration config, ScannerFactory scanne * @see java.lang.Iterable#iterator() */ @Override - public Iterator> iterator() { + public Iterator iterator() { if (null == this.config) { throw new IllegalArgumentException("Null configuration provided"); } @@ -80,22 +85,26 @@ public BatchScanner createBatchScanner(ShardQueryConfiguration config, ScannerFa return ShardQueryLogic.createBatchScanner(config, scannerFactory, qd); } - public class SequentialSchedulerIterator implements Iterator> { - + public class SequentialSchedulerIterator implements Iterator { protected final ShardQueryConfiguration config; protected final ScannerFactory scannerFactory; protected Iterator queries = null; - protected Entry currentEntry = null; + protected QueryData currentQuery = null; + protected Result currentEntry = null; + protected Result lastEntry = null; protected BatchScanner currentBS = null; - protected Iterator> currentIter = null; + protected Iterator currentIter = null; protected volatile boolean closed = false; public SequentialSchedulerIterator(ShardQueryConfiguration config, ScannerFactory scannerFactory) { this.config = config; this.scannerFactory = scannerFactory; - this.queries = config.getQueries(); + this.queries = config.getQueriesIter(); + if (this.config.isCheckpointable()) { + this.queries = new SingleRangeQueryDataIterator(this.queries); + } } /* @@ -105,24 +114,26 @@ public SequentialSchedulerIterator(ShardQueryConfiguration config, ScannerFactor */ @Override public boolean hasNext() { - if (closed) { - return false; - } - - if (null != this.currentEntry) { - return true; - } else if (null != this.currentBS && null != this.currentIter) { - if (this.currentIter.hasNext()) { - this.currentEntry = this.currentIter.next(); + while (true) { + if (closed) { + return false; + } - return hasNext(); - } else { - this.currentBS.close(); + if (null != this.currentEntry) { + return true; + } else if (null != this.currentBS && null != this.currentIter) { + if (this.currentIter.hasNext()) { + this.currentEntry = this.currentIter.next(); + continue; + } else { + this.currentBS.close(); + this.currentBS = null; + this.currentIter = null; + } } - } - QueryData newQueryData = null; - while (true) { + lastEntry = null; + currentQuery = null; if (this.queries.hasNext()) { // Keep track of how many QueryData's we make QueryData qd = this.queries.next(); @@ -130,30 +141,22 @@ public boolean hasNext() { rangesSeen += qd.getRanges().size(); } count.incrementAndGet(); - if (null == newQueryData) - newQueryData = new QueryData(qd); - else { - newQueryData.getRanges().addAll(qd.getRanges()); - } + currentQuery = qd; + } - } else - break; - } + if (null != currentQuery) { - if (null != newQueryData) { + try { + this.currentBS = createBatchScanner(this.config, this.scannerFactory, currentQuery); + } catch (TableNotFoundException e) { + throw new RuntimeException(e); + } - try { - this.currentBS = createBatchScanner(this.config, this.scannerFactory, newQueryData); - } catch (TableNotFoundException e) { - throw new RuntimeException(e); + this.currentIter = Result.resultIterator(currentQuery, this.currentBS.iterator()); + } else { + return false; } - - this.currentIter = this.currentBS.iterator(); - - return hasNext(); } - - return false; } /* @@ -162,20 +165,35 @@ public boolean hasNext() { * @see java.util.Iterator#next() */ @Override - public Entry next() { + public Result next() { if (closed) { return null; } if (hasNext()) { - Entry cur = this.currentEntry; + this.lastEntry = this.currentEntry; this.currentEntry = null; - return cur; + return this.lastEntry; } return null; } + public List checkpoint(QueryKey queryKey) { + close(); + List checkpoints = new ArrayList<>(); + if (currentQuery != null) { + checkpoints.add(new QueryCheckpoint(queryKey, Collections.singletonList(currentQuery))); + currentQuery = null; + } + while (queries.hasNext()) { + checkpoints.add(new QueryCheckpoint(queryKey, Collections.singletonList(queries.next()))); + } + config.setQueries(null); + config.setQueriesIter(null); + return checkpoints; + } + /* * (non-Javadoc) * @@ -196,6 +214,21 @@ public void close() { } } + @Override + public List checkpoint(QueryKey queryKey) { + if (null == this.config) { + throw new IllegalArgumentException("Null configuration provided"); + } + if (!config.isCheckpointable()) { + throw new UnsupportedOperationException("Cannot checkpoint a scheduler which is not checkpointable"); + } + if (this.iterator != null) { + return this.iterator.checkpoint(queryKey); + } else { + return Lists.newArrayList(new QueryCheckpoint(queryKey, config.getQueries())); + } + } + @Override public ScanSessionStats getSchedulerStats() { return null; diff --git a/warehouse/query-core/src/main/java/datawave/query/scheduler/SingleRangeQueryDataIterator.java b/warehouse/query-core/src/main/java/datawave/query/scheduler/SingleRangeQueryDataIterator.java new file mode 100644 index 00000000000..14c6a97b858 --- /dev/null +++ b/warehouse/query-core/src/main/java/datawave/query/scheduler/SingleRangeQueryDataIterator.java @@ -0,0 +1,59 @@ +package datawave.query.scheduler; + +import java.util.Collections; +import java.util.Iterator; +import java.util.LinkedList; +import java.util.NoSuchElementException; +import java.util.Queue; + +import org.apache.accumulo.core.data.Range; + +import datawave.core.query.configuration.QueryData; + +public class SingleRangeQueryDataIterator implements Iterator { + private final Iterator delegate; + private Queue pending = new LinkedList(); + + public SingleRangeQueryDataIterator(Iterator queries) { + this.delegate = queries; + } + + /** + * Returns {@code true} if the iteration has more elements. (In other words, returns {@code true} if {@link #next} would return an element rather than + * throwing an exception.) + * + * @return {@code true} if the iteration has more elements + */ + @Override + public boolean hasNext() { + if (pending.isEmpty()) { + return delegate.hasNext(); + } else { + return true; + } + } + + /** + * Returns the next element in the iteration. + * + * @return the next element in the iteration + * @throws NoSuchElementException + * if the iteration has no more elements + */ + @Override + public QueryData next() { + if (pending.isEmpty()) { + QueryData next = delegate.next(); + if (next.getRanges().size() == 1) { + pending.add(next); + } else { + for (Range range : next.getRanges()) { + QueryData qd = new QueryData(next); + qd.setRanges(Collections.singleton(range)); + pending.add(qd); + } + } + } + return pending.remove(); + } +} diff --git a/warehouse/query-core/src/main/java/datawave/query/table/parser/ContentKeyValueFactory.java b/warehouse/query-core/src/main/java/datawave/query/table/parser/ContentKeyValueFactory.java index e229f146f74..558d7ef2e4a 100644 --- a/warehouse/query-core/src/main/java/datawave/query/table/parser/ContentKeyValueFactory.java +++ b/warehouse/query-core/src/main/java/datawave/query/table/parser/ContentKeyValueFactory.java @@ -1,15 +1,15 @@ package datawave.query.table.parser; import java.io.ByteArrayInputStream; -import java.io.ByteArrayOutputStream; import java.io.IOException; +import java.util.Base64; import java.util.zip.GZIPInputStream; import org.apache.accumulo.core.data.Key; import org.apache.accumulo.core.data.Value; import org.apache.accumulo.core.security.Authorizations; +import org.apache.commons.io.IOUtils; import org.apache.log4j.Logger; -import org.infinispan.commons.util.Base64; import datawave.marking.MarkingFunctions; import datawave.query.Constants; @@ -44,34 +44,21 @@ public static ContentKeyValue parse(Key key, Value value, Authorizations auths, /* * We are storing 'documents' in this column gzip'd and base64 encoded. Base64.decode detects and handles compression. */ + byte[] contents = value.get(); try { - c.setContents(Base64.decode(new String(value.get()))); - } catch (IllegalStateException e) { + contents = decompress(Base64.getMimeDecoder().decode(contents)); + } catch (IOException e) { + log.error("Error decompressing Base64 encoded GZIPInputStream", e); + } catch (Exception e) { // Thrown when data is not Base64 encoded. Try GZIP - ByteArrayInputStream bais = new ByteArrayInputStream(value.get()); - GZIPInputStream gzip = null; - ByteArrayOutputStream baos = new ByteArrayOutputStream(); try { - gzip = new GZIPInputStream(bais); - byte[] buf = new byte[4096]; - int length = 0; - while ((length = gzip.read(buf)) >= 0) { - baos.write(buf, 0, length); - } - c.setContents(baos.toByteArray()); + contents = decompress(contents); } catch (IOException ioe) { - // Not GZIP, now what? - c.setContents(value.get()); - } finally { - if (null != gzip) { - try { - gzip.close(); - } catch (IOException e1) { - log.error("Error closing GZIPInputStream", e1); - } - } + log.error("Error decompressing GZIPInputStream", e); } } + + c.setContents(contents); } EventKeyValueFactory.parseColumnVisibility(c, key, auths, markingFunctions); @@ -79,6 +66,20 @@ public static ContentKeyValue parse(Key key, Value value, Authorizations auths, return c; } + private static boolean isCompressed(byte[] compressed) { + return (compressed[0] == (byte) (GZIPInputStream.GZIP_MAGIC)) && (compressed[1] == (byte) (GZIPInputStream.GZIP_MAGIC >> 8)); + } + + private static byte[] decompress(byte[] compressed) throws IOException { + byte[] decompressed = compressed; + if (isCompressed(compressed)) { + try (GZIPInputStream gzis = new GZIPInputStream(new ByteArrayInputStream(compressed))) { + decompressed = IOUtils.toByteArray(gzis); + } + } + return decompressed; + } + public static class ContentKeyValue extends EventKeyValue { protected String viewName = null; diff --git a/warehouse/query-core/src/main/java/datawave/query/tables/AnyFieldScanner.java b/warehouse/query-core/src/main/java/datawave/query/tables/AnyFieldScanner.java index e2c1e6f3acd..540f7336ec6 100644 --- a/warehouse/query-core/src/main/java/datawave/query/tables/AnyFieldScanner.java +++ b/warehouse/query-core/src/main/java/datawave/query/tables/AnyFieldScanner.java @@ -9,7 +9,7 @@ import org.apache.hadoop.io.Text; import org.apache.log4j.Logger; -import datawave.webservice.query.Query; +import datawave.microservice.query.Query; import datawave.webservice.query.util.QueryUncaughtExceptionHandler; /** diff --git a/warehouse/query-core/src/main/java/datawave/query/tables/BaseRemoteQueryLogic.java b/warehouse/query-core/src/main/java/datawave/query/tables/BaseRemoteQueryLogic.java index 37b0ab0002c..e667396f588 100644 --- a/warehouse/query-core/src/main/java/datawave/query/tables/BaseRemoteQueryLogic.java +++ b/warehouse/query-core/src/main/java/datawave/query/tables/BaseRemoteQueryLogic.java @@ -9,18 +9,19 @@ import org.apache.accumulo.core.security.Authorizations; import org.apache.log4j.Logger; +import datawave.core.common.connection.AccumuloConnectionFactory; +import datawave.core.common.logging.ThreadConfigurableLogger; +import datawave.core.query.configuration.GenericQueryConfiguration; +import datawave.core.query.logic.BaseQueryLogic; +import datawave.core.query.logic.QueryLogicTransformer; +import datawave.core.query.remote.RemoteQueryService; import datawave.marking.MarkingFunctions; +import datawave.microservice.query.Query; import datawave.query.config.RemoteQueryConfiguration; import datawave.query.tables.remote.RemoteQueryLogic; +import datawave.security.authorization.ProxiedUserDetails; import datawave.security.authorization.UserOperations; -import datawave.webservice.common.connection.AccumuloConnectionFactory; -import datawave.webservice.common.logging.ThreadConfigurableLogger; -import datawave.webservice.common.remote.RemoteQueryService; -import datawave.webservice.query.Query; -import datawave.webservice.query.configuration.GenericQueryConfiguration; import datawave.webservice.query.exception.QueryException; -import datawave.webservice.query.logic.BaseQueryLogic; -import datawave.webservice.query.logic.QueryLogicTransformer; import datawave.webservice.query.result.event.ResponseObjectFactory; import datawave.webservice.result.GenericResponse; @@ -96,10 +97,6 @@ public void setRemoteQueryLogic(String remoteQueryLogic) { getConfig().setRemoteQueryLogic(remoteQueryLogic); } - public Object getCallerObject() { - return getPrincipal(); - } - @Override public GenericQueryConfiguration initialize(AccumuloClient connection, Query settings, Set auths) throws Exception { Map> parms = settings.toMap(); @@ -107,7 +104,7 @@ public GenericQueryConfiguration initialize(AccumuloClient connection, Query set if (!parms.containsKey(QUERY_ID) && settings.getId() != null) { parms.put(QUERY_ID, Collections.singletonList(settings.getId().toString())); } - GenericResponse createResponse = remoteQueryService.createQuery(getRemoteQueryLogic(), parms, getCallerObject()); + GenericResponse createResponse = remoteQueryService.createQuery(getRemoteQueryLogic(), parms, getCurrentUser()); setRemoteId(createResponse.getResult()); log.info("Local query " + settings.getId() + " maps to remote query " + getRemoteId()); RemoteQueryConfiguration config = getConfig(); @@ -117,7 +114,7 @@ public GenericQueryConfiguration initialize(AccumuloClient connection, Query set @Override public String getPlan(AccumuloClient connection, Query settings, Set auths, boolean expandFields, boolean expandValues) throws Exception { - GenericResponse planResponse = remoteQueryService.planQuery(getRemoteQueryLogic(), settings.toMap(), getCallerObject()); + GenericResponse planResponse = remoteQueryService.planQuery(getRemoteQueryLogic(), settings.toMap(), getCurrentUser()); return planResponse.getResult(); } @@ -143,7 +140,7 @@ public void close() { if (getRemoteId() != null) { try { - remoteQueryService.close(getRemoteId(), getCallerObject()); + remoteQueryService.close(getRemoteId(), getCurrentUser()); } catch (Exception e) { log.error("Failed to close remote query", e); } diff --git a/warehouse/query-core/src/main/java/datawave/query/tables/BatchScannerSession.java b/warehouse/query-core/src/main/java/datawave/query/tables/BatchScannerSession.java index fd3b2aff8e1..b38883be416 100644 --- a/warehouse/query-core/src/main/java/datawave/query/tables/BatchScannerSession.java +++ b/warehouse/query-core/src/main/java/datawave/query/tables/BatchScannerSession.java @@ -2,12 +2,13 @@ import java.io.InterruptedIOException; import java.lang.Thread.UncaughtExceptionHandler; +import java.util.ArrayList; import java.util.Collection; import java.util.Collections; +import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; -import java.util.Map.Entry; import java.util.Set; import java.util.concurrent.BlockingQueue; import java.util.concurrent.ExecutorService; @@ -23,7 +24,6 @@ import org.apache.accumulo.core.data.Key; import org.apache.accumulo.core.data.PartialKey; import org.apache.accumulo.core.data.Range; -import org.apache.accumulo.core.data.Value; import org.apache.accumulo.core.security.Authorizations; import org.apache.commons.lang.builder.EqualsBuilder; import org.apache.log4j.Logger; @@ -40,18 +40,29 @@ import com.google.common.util.concurrent.MoreExecutors; import com.google.common.util.concurrent.Service; +import datawave.core.query.configuration.GenericQueryConfiguration; +import datawave.core.query.configuration.QueryData; +import datawave.core.query.configuration.Result; +import datawave.core.query.configuration.ResultContext; +import datawave.core.query.logic.QueryCheckpoint; +import datawave.core.query.logic.QueryKey; +import datawave.microservice.query.Query; import datawave.query.tables.async.Scan; import datawave.query.tables.async.ScannerChunk; import datawave.query.tables.async.SessionArbiter; import datawave.query.tables.async.SpeculativeScan; -import datawave.webservice.query.Query; -public class BatchScannerSession extends ScannerSession implements Iterator>, FutureCallback, SessionArbiter, UncaughtExceptionHandler { +public class BatchScannerSession extends ScannerSession implements Iterator, FutureCallback, SessionArbiter, UncaughtExceptionHandler { private static final double RANGE_MULTIPLIER = 5; private static final double QUEUE_MULTIPLIER = 25; + /** + * The configuration used for checkpoints + */ + private GenericQueryConfiguration config; + /** * Delegates scanners to us, blocking if none are available or used by other sources. */ @@ -67,10 +78,22 @@ public class BatchScannerSession extends ScannerSession implements Iterator localAuths; + /** + * This is the iterator of scanner chunks. Basically the work queue. + */ protected Iterator> scannerBatches; + /** + * This is the current batch of chunks pending submission + */ protected BlockingQueue currentBatch; + // set when we need operations to stop gracefully enough to checkpoint + protected volatile boolean needToCheckpoint = false; + + // set when the processing is at a place where we can checkpoint + protected volatile boolean readyToCheckpoint = false; + protected ExecutorService service = null; protected StringBuilder threadId = new StringBuilder(); @@ -92,12 +115,38 @@ public class BatchScannerSession extends ScannerSession implements Iterator runningQueries = Collections.synchronizedSet(new HashSet<>()); + protected boolean backoffEnabled = false; protected boolean speculativeScanning = false; protected int threadCount = 5; + public List checkpoint(QueryKey queryKey) { + needToCheckpoint = true; + while (!readyToCheckpoint && isRunning()) { + try { + Thread.sleep(100); + } catch (InterruptedException e) {} + } + List checkpoints = new ArrayList<>(); + for (ResultContext context : runningQueries) { + if (!context.isFinished()) { + checkpoints.add(new QueryCheckpoint(queryKey, Collections.singletonList((QueryData) context))); + } + + } + // now add all of the remaining chunks + for (Iterator> it = scannerBatches; it.hasNext();) { + List chunks = it.next(); + for (ScannerChunk chunk : chunks) { + checkpoints.add(new QueryCheckpoint(queryKey, Collections.singletonList((QueryData) chunk.getContext()))); + } + } + return checkpoints; + } + private class BatchReaderThreadFactory implements ThreadFactory { private ThreadFactory dtf = Executors.defaultThreadFactory(); @@ -105,9 +154,7 @@ private class BatchReaderThreadFactory implements ThreadFactory { private StringBuilder threadIdentifier; private UncaughtExceptionHandler uncaughtHandler = null; - public BatchReaderThreadFactory(StringBuilder threadName, UncaughtExceptionHandler handler) - - { + public BatchReaderThreadFactory(StringBuilder threadName, UncaughtExceptionHandler handler) { uncaughtHandler = handler; this.threadIdentifier = threadName; } @@ -191,6 +238,11 @@ public BatchScannerSession setThreads(int threads) { return this; } + public BatchScannerSession setConfig(GenericQueryConfiguration config) { + this.config = config; + return this; + } + public BatchScannerSession updateIdentifier(String threadId) { this.threadId.append(threadId); return this; @@ -277,9 +329,7 @@ protected void run() throws Exception { return; } - while (scannerBatches.hasNext()) - - { + while (scannerBatches.hasNext() && !needToCheckpoint) { if (runnableCount.get() < (threadCount * RANGE_MULTIPLIER)) { if (currentBatch.isEmpty()) { List chunks = scannerBatches.next(); @@ -310,7 +360,11 @@ protected void run() throws Exception { if (log.isTraceEnabled()) log.trace("waiting " + runnableCount.get()); submitTasks(); - while (runnableCount.get() > 0) { + + // notify those that are wondering + readyToCheckpoint = true; + + while (runnableCount.get() > 0 && !needToCheckpoint) { Thread.sleep(1); // if a failure did not occur, let's check the interrupted status if (isRunning()) { @@ -331,6 +385,9 @@ protected void run() throws Exception { } catch (Exception e) { uncaughtExceptionHandler.uncaughtException(Thread.currentThread().currentThread(), e); Throwables.propagate(e); + } finally { + // make sure nobody is hung up on this flag.... + readyToCheckpoint = true; } } @@ -451,8 +508,10 @@ protected void submitTasks() { protected void submitScan(Scan scan, boolean increment) { ListenableFuture future = (ListenableFuture) service.submit(scan); - if (increment) + if (increment) { runnableCount.incrementAndGet(); + runningQueries.add((scan.getScannerChunk().getContext())); + } Futures.addCallback(future, this, MoreExecutors.newDirectExecutorService()); } @@ -504,6 +563,12 @@ public void onSuccess(Scan finishedScan) { if (finishedScan.finished()) { runnableCount.decrementAndGet(); + // if we have pulled all of the results of the front end for this query, then and only then can we remove it. + // otherwise we still need it for checkpointing + if (finishedScan.getScannerChunk().getContext().isFinished()) { + runningQueries.remove(finishedScan.getScannerChunk().getContext()); + } + finishedScan.close(); if (null != stats && null != finishedScan.getStats()) { @@ -657,6 +722,11 @@ protected void shutdownServices() { @Override public void close() { stopAsync(); + try { + awaitTerminated(); + } catch (Exception e) { + + } service.shutdownNow(); } diff --git a/warehouse/query-core/src/main/java/datawave/query/tables/CountingShardQueryLogic.java b/warehouse/query-core/src/main/java/datawave/query/tables/CountingShardQueryLogic.java index a35b444af0c..33095d03c7e 100644 --- a/warehouse/query-core/src/main/java/datawave/query/tables/CountingShardQueryLogic.java +++ b/warehouse/query-core/src/main/java/datawave/query/tables/CountingShardQueryLogic.java @@ -1,17 +1,24 @@ package datawave.query.tables; +import java.util.Set; + +import org.apache.accumulo.core.client.AccumuloClient; import org.apache.accumulo.core.client.IteratorSetting; +import org.apache.accumulo.core.security.Authorizations; import org.apache.commons.collections4.iterators.TransformIterator; import org.apache.log4j.Logger; import datawave.core.iterators.ResultCountingIterator; +import datawave.core.query.configuration.GenericQueryConfiguration; +import datawave.core.query.logic.QueryLogicTransformer; +import datawave.core.query.logic.ResultPostprocessor; +import datawave.microservice.query.Query; import datawave.query.config.ShardQueryConfiguration; import datawave.query.scheduler.PushdownScheduler; import datawave.query.scheduler.Scheduler; import datawave.query.tables.shard.CountAggregatingIterator; +import datawave.query.tables.shard.CountResultPostprocessor; import datawave.query.transformer.ShardQueryCountTableTransformer; -import datawave.webservice.query.Query; -import datawave.webservice.query.logic.QueryLogicTransformer; /** * A simple extension of the basic ShardQueryTable which applies a counting iterator on top of the "normal" iterator stack. @@ -30,6 +37,13 @@ public CountingShardQueryLogic(CountingShardQueryLogic other) { super(other); } + @Override + public GenericQueryConfiguration initialize(AccumuloClient client, Query settings, Set runtimeQueryAuthorizations) throws Exception { + GenericQueryConfiguration config = super.initialize(client, settings, runtimeQueryAuthorizations); + config.setReduceResults(true); + return config; + } + @Override public CountingShardQueryLogic clone() { return new CountingShardQueryLogic(this); @@ -42,7 +56,12 @@ public QueryLogicTransformer getTransformer(Query settings) { @Override public TransformIterator getTransformIterator(Query settings) { - return new CountAggregatingIterator(this.iterator(), getTransformer(settings)); + return new CountAggregatingIterator(this.iterator(), getTransformer(settings), this.markingFunctions); + } + + @Override + public ResultPostprocessor getResultPostprocessor(GenericQueryConfiguration config) { + return new CountResultPostprocessor(markingFunctions); } @Override diff --git a/warehouse/query-core/src/main/java/datawave/query/tables/DedupingIterator.java b/warehouse/query-core/src/main/java/datawave/query/tables/DedupingIterator.java index 8ffd4716044..2bbc6ea5cb3 100644 --- a/warehouse/query-core/src/main/java/datawave/query/tables/DedupingIterator.java +++ b/warehouse/query-core/src/main/java/datawave/query/tables/DedupingIterator.java @@ -2,32 +2,49 @@ import java.io.Serializable; import java.util.Iterator; -import java.util.Map.Entry; import org.apache.accumulo.core.data.ByteSequence; -import org.apache.accumulo.core.data.Key; -import org.apache.accumulo.core.data.Value; import com.google.common.hash.BloomFilter; import com.google.common.hash.Funnel; import com.google.common.hash.PrimitiveSink; -class DedupingIterator implements Iterator> { +import datawave.core.query.configuration.Result; +import datawave.query.function.deserializer.KryoDocumentDeserializer; +import datawave.query.iterator.profile.FinalDocumentTrackingIterator; + +class DedupingIterator implements Iterator { static final int BLOOM_EXPECTED_DEFAULT = 500000; static final double BLOOM_FPP_DEFAULT = 1e-15; - private Iterator> delegate; - private Entry next; + private Iterator delegate; + private Result next; + private byte[] nextBytes; private BloomFilter bloom = null; - public DedupingIterator(Iterator> iterator, int bloomFilterExpected, double bloomFilterFpp) { + public DedupingIterator(Iterator iterator) { + this(iterator, BLOOM_EXPECTED_DEFAULT, BLOOM_FPP_DEFAULT); + } + + public DedupingIterator(Iterator iterator, BloomFilter bloom) { + this(iterator, bloom, BLOOM_EXPECTED_DEFAULT, BLOOM_FPP_DEFAULT); + } + + public DedupingIterator(Iterator iterator, int bloomFilterExpected, double bloomFilterFpp) { + this(iterator, null, bloomFilterExpected, bloomFilterFpp); + } + + public DedupingIterator(Iterator iterator, BloomFilter bloom, int bloomFilterExpected, double bloomFilterFpp) { this.delegate = iterator; - this.bloom = BloomFilter.create(new ByteFunnel(), bloomFilterExpected, bloomFilterFpp); + if (bloom == null) { + bloom = BloomFilter.create(new ByteFunnel(), bloomFilterExpected, bloomFilterFpp); + } + this.bloom = bloom; getNext(); } - public DedupingIterator(Iterator> iterator) { - this(iterator, BLOOM_EXPECTED_DEFAULT, BLOOM_FPP_DEFAULT); + public BloomFilter getBloom() { + return bloom; } private void getNext() { @@ -40,7 +57,7 @@ private void getNext() { } } - private byte[] getBytes(Entry entry) { + private byte[] getBytes(Result entry) { ByteSequence row = entry.getKey().getRowData(); ByteSequence cf = entry.getKey().getColumnFamilyData(); @@ -72,9 +89,14 @@ public boolean hasNext() { } @Override - public Entry next() { - Entry nextReturn = next; + public Result next() { + Result nextReturn = next; if (next != null) { + if (nextBytes != null) { + // now that we are actually returning this result, update the bloom filter + bloom.put(nextBytes); + nextBytes = null; + } getNext(); } return nextReturn; @@ -85,15 +107,25 @@ public void remove() { throw new UnsupportedOperationException("Remove not supported on DedupingIterator"); } - private boolean isDuplicate(Entry entry) { - byte[] bytes = getBytes(entry); - if (bloom.mightContain(bytes)) { + private boolean isDuplicate(Result entry) { + // allow empty results to go through (required to track completion of ranges) + if (entry.getKey() == null) { + return false; + } + // allow all final documents through + if (FinalDocumentTrackingIterator.isFinalDocumentKey(entry.getKey())) { + return false; + } + nextBytes = getBytes(entry); + if (bloom.mightContain(nextBytes)) { + nextBytes = null; return true; } - bloom.put(bytes); return false; } + KryoDocumentDeserializer deserializer = new KryoDocumentDeserializer(); + public static class ByteFunnel implements Funnel, Serializable { private static final long serialVersionUID = -2126172579955897986L; diff --git a/warehouse/query-core/src/main/java/datawave/query/tables/ParentQueryLogic.java b/warehouse/query-core/src/main/java/datawave/query/tables/ParentQueryLogic.java index dc891f01c09..14d0fb672cf 100644 --- a/warehouse/query-core/src/main/java/datawave/query/tables/ParentQueryLogic.java +++ b/warehouse/query-core/src/main/java/datawave/query/tables/ParentQueryLogic.java @@ -8,13 +8,13 @@ import com.google.common.collect.Iterators; +import datawave.core.query.logic.QueryLogicTransformer; +import datawave.microservice.query.Query; import datawave.query.iterator.ParentQueryIterator; import datawave.query.planner.QueryPlanner; import datawave.query.tld.DedupeColumnFamilies; import datawave.query.transformer.DocumentTransformer; import datawave.query.transformer.ParentDocumentTransformer; -import datawave.webservice.query.Query; -import datawave.webservice.query.logic.QueryLogicTransformer; public class ParentQueryLogic extends ShardQueryLogic { public ParentQueryLogic() {} diff --git a/warehouse/query-core/src/main/java/datawave/query/tables/PartitionedQueryLogic.java b/warehouse/query-core/src/main/java/datawave/query/tables/PartitionedQueryLogic.java index 7a3176bfa01..3038d7c5a43 100644 --- a/warehouse/query-core/src/main/java/datawave/query/tables/PartitionedQueryLogic.java +++ b/warehouse/query-core/src/main/java/datawave/query/tables/PartitionedQueryLogic.java @@ -11,11 +11,11 @@ import org.apache.accumulo.core.security.Authorizations; import org.apache.log4j.Logger; +import datawave.core.query.configuration.GenericQueryConfiguration; +import datawave.microservice.query.Query; +import datawave.microservice.query.QueryImpl.Parameter; import datawave.query.QueryParameters; import datawave.query.tables.chunk.Chunker; -import datawave.webservice.query.Query; -import datawave.webservice.query.QueryImpl.Parameter; -import datawave.webservice.query.configuration.GenericQueryConfiguration; public class PartitionedQueryLogic extends ShardQueryLogic { protected static final Logger log = Logger.getLogger(PartitionedQueryLogic.class); @@ -64,7 +64,7 @@ public GenericQueryConfiguration initialize(AccumuloClient client, Query setting if (chunker.preInitializeQueryLogic()) { GenericQueryConfiguration config = super.initialize(this.client, this.settings, this.auths); - if (!config.getQueries().hasNext()) { + if (!config.getQueriesIter().hasNext()) { return config; } diff --git a/warehouse/query-core/src/main/java/datawave/query/tables/RangeStreamScanner.java b/warehouse/query-core/src/main/java/datawave/query/tables/RangeStreamScanner.java index a35949efa6d..f6cb3d74f1c 100644 --- a/warehouse/query-core/src/main/java/datawave/query/tables/RangeStreamScanner.java +++ b/warehouse/query-core/src/main/java/datawave/query/tables/RangeStreamScanner.java @@ -5,7 +5,6 @@ import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; -import java.util.AbstractMap; import java.util.ArrayDeque; import java.util.Collection; import java.util.Collections; @@ -21,6 +20,8 @@ import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantReadWriteLock; +import javax.annotation.Nullable; + import org.apache.accumulo.core.client.IteratorSetting; import org.apache.accumulo.core.client.Scanner; import org.apache.accumulo.core.client.ScannerBase; @@ -36,18 +37,20 @@ import org.apache.hadoop.io.Text; import org.apache.log4j.Logger; +import com.google.common.base.Function; import com.google.common.base.Throwables; -import com.google.common.collect.Maps; +import com.google.common.collect.Iterators; import com.google.common.collect.Queues; import com.google.common.util.concurrent.MoreExecutors; +import datawave.core.query.configuration.Result; +import datawave.microservice.query.Query; import datawave.mr.bulk.RfileScanner; import datawave.query.exceptions.DatawaveFatalQueryException; import datawave.query.index.lookup.IndexInfo; import datawave.query.index.lookup.IndexMatch; import datawave.query.index.lookup.ShardEquality; import datawave.query.tables.stats.ScanSessionStats.TIMERS; -import datawave.webservice.query.Query; /** * Purpose: Extends Scanner session so that we can modify how we build our subsequent ranges. Breaking this out cleans up the code. May require implementation @@ -75,9 +78,9 @@ public class RangeStreamScanner extends ScannerSession implements Callable> currentQueue; + protected ArrayDeque currentQueue; - protected Entry prevDay = null; + protected Result prevDay = null; protected ReentrantReadWriteLock queueLock = new ReentrantReadWriteLock(true); @@ -292,8 +295,8 @@ public String advanceQueues(String seekShard) { * shard to seek to * @return the matched shard, the next highest shard, or null */ - public String advanceQueueToShard(Queue> queue, String shard) { - Entry top; + public String advanceQueueToShard(Queue queue, String shard) { + Result top; String topShard = null; boolean advancing = true; @@ -428,12 +431,12 @@ protected void run() { } } - protected int scannerInvariant(final Iterator> iter) { - PeekingIterator> kvIter = new PeekingIterator<>(iter); + protected int scannerInvariant(final Iterator iter) { + PeekingIterator kvIter = new PeekingIterator<>(iter); int retrievalCount = 0; - Entry myEntry; + Result myEntry; String currentDay = null; @@ -455,7 +458,7 @@ protected int scannerInvariant(final Iterator> iter) { writeLock.lock(); try { while (kvIter.hasNext()) { - Entry currentKeyValue = kvIter.peek(); + Result currentKeyValue = kvIter.peek(); // become a pass-through if we've seen an unexpected key. if (seenUnexpectedKey) { @@ -521,7 +524,7 @@ protected int scannerInvariant(final Iterator> iter) { if (currentQueue.size() >= shardsPerDayThreshold && stats.getPercentile(50) > MAX_MEDIAN) { - Entry top = currentQueue.poll(); + Result top = currentQueue.poll(); Key topKey = top.getKey(); if (log.isTraceEnabled()) @@ -530,7 +533,7 @@ protected int scannerInvariant(final Iterator> iter) { Value newValue = writeInfoToValue(); - myEntry = Maps.immutableEntry(newKey, newValue); + myEntry = new Result(top.getContext(), newKey, newValue); lastSeenKey = newKey; try { @@ -598,10 +601,11 @@ private int dequeue() { private int dequeue(boolean forceAll) { int count = 0; - Queue> kvIter = Queues.newArrayDeque(currentQueue); + Queue kvIter = Queues.newArrayDeque(currentQueue); + currentQueue.clear(); boolean result = true; - for (Entry top : kvIter) { + for (Result top : kvIter) { if (result) { do { @@ -791,8 +795,15 @@ else if (baseScanner instanceof RfileScanner) if (baseScanner instanceof Scanner) ((Scanner) baseScanner).setRange(currentRange); - Iterator> iter = baseScanner.iterator(); - + Iterator iter = Iterators.transform(baseScanner.iterator(), new Function,Result>() { + @Override + public Result apply(@Nullable Entry input) { + if (input == null) { + return null; + } + return new Result(input.getKey(), input.getValue()); + } + }); // do not continue if we've reached the end of the corpus if (!iter.hasNext()) { @@ -867,9 +878,9 @@ private boolean isBeyondRange(Key lastSeenKey, Key endKey) { } // Overloaded - public static Entry trimTrailingUnderscore(Entry entry) { + public static Result trimTrailingUnderscore(Result entry) { Key nextKey = trimTrailingUnderscore(entry.getKey()); - return new AbstractMap.SimpleEntry<>(nextKey, entry.getValue()); + return new Result(entry.getContext(), nextKey, entry.getValue()); } /** diff --git a/warehouse/query-core/src/main/java/datawave/query/tables/RemoteEdgeQueryLogic.java b/warehouse/query-core/src/main/java/datawave/query/tables/RemoteEdgeQueryLogic.java index 9152ef54348..f49bad80217 100644 --- a/warehouse/query-core/src/main/java/datawave/query/tables/RemoteEdgeQueryLogic.java +++ b/warehouse/query-core/src/main/java/datawave/query/tables/RemoteEdgeQueryLogic.java @@ -7,15 +7,17 @@ import org.apache.log4j.Logger; +import datawave.core.common.logging.ThreadConfigurableLogger; +import datawave.core.query.configuration.GenericQueryConfiguration; +import datawave.core.query.exception.EmptyObjectException; +import datawave.core.query.logic.QueryLogicTransformer; +import datawave.edge.model.EdgeModelFields; +import datawave.edge.model.EdgeModelFieldsFactory; import datawave.marking.MarkingFunctions; +import datawave.microservice.query.Query; import datawave.query.tables.edge.EdgeQueryLogic; import datawave.query.tables.remote.RemoteQueryLogic; import datawave.query.transformer.EdgeQueryTransformerSupport; -import datawave.webservice.common.logging.ThreadConfigurableLogger; -import datawave.webservice.query.Query; -import datawave.webservice.query.configuration.GenericQueryConfiguration; -import datawave.webservice.query.exception.EmptyObjectException; -import datawave.webservice.query.logic.QueryLogicTransformer; import datawave.webservice.query.result.EdgeQueryResponseBase; import datawave.webservice.query.result.edge.EdgeBase; import datawave.webservice.query.result.event.ResponseObjectFactory; @@ -28,6 +30,8 @@ public class RemoteEdgeQueryLogic extends BaseRemoteQueryLogic impleme protected static final Logger log = ThreadConfigurableLogger.getLogger(RemoteEdgeQueryLogic.class); + protected EdgeModelFields edgeFields; + /** * Basic constructor */ @@ -56,7 +60,7 @@ public void setupQuery(GenericQueryConfiguration genericConfig) throws Exception @Override public QueryLogicTransformer createTransformer(Query settings, MarkingFunctions markingFunctions, ResponseObjectFactory responseObjectFactory) { - return new EdgeBaseTransformer(settings, markingFunctions, responseObjectFactory); + return new EdgeBaseTransformer(settings, markingFunctions, responseObjectFactory, edgeFields); } @Override @@ -87,7 +91,7 @@ private class RemoteQueryLogicIterator implements Iterator { public boolean hasNext() { if (data.isEmpty() && !complete) { try { - EdgeQueryResponseBase response = (EdgeQueryResponseBase) remoteQueryService.next(getRemoteId(), getCallerObject()); + EdgeQueryResponseBase response = (EdgeQueryResponseBase) remoteQueryService.next(getRemoteId(), getCurrentUser()); if (response != null) { if (response.getTotalResults() == 0) { if (!response.isPartialResults()) { @@ -116,10 +120,22 @@ public EdgeBase next() { } } + public void setEdgeModelFieldsFactory(EdgeModelFieldsFactory edgeModelFieldsFactory) { + this.edgeFields = edgeModelFieldsFactory.createFields(); + } + + public void setEdgeFields(EdgeModelFields edgeFields) { + this.edgeFields = edgeFields; + } + + public EdgeModelFields getEdgeFields() { + return edgeFields; + } + private class EdgeBaseTransformer extends EdgeQueryTransformerSupport { - public EdgeBaseTransformer(Query settings, MarkingFunctions markingFunctions, ResponseObjectFactory responseObjectFactory) { - super(settings, markingFunctions, responseObjectFactory); + public EdgeBaseTransformer(Query settings, MarkingFunctions markingFunctions, ResponseObjectFactory responseObjectFactory, EdgeModelFields fields) { + super(settings, markingFunctions, responseObjectFactory, fields); } @Override diff --git a/warehouse/query-core/src/main/java/datawave/query/tables/RemoteEventQueryLogic.java b/warehouse/query-core/src/main/java/datawave/query/tables/RemoteEventQueryLogic.java index 5612244c59f..b9e0ce5a9c2 100644 --- a/warehouse/query-core/src/main/java/datawave/query/tables/RemoteEventQueryLogic.java +++ b/warehouse/query-core/src/main/java/datawave/query/tables/RemoteEventQueryLogic.java @@ -10,15 +10,14 @@ import org.apache.accumulo.core.data.Value; import org.apache.log4j.Logger; +import datawave.core.common.logging.ThreadConfigurableLogger; +import datawave.core.query.configuration.GenericQueryConfiguration; +import datawave.core.query.exception.EmptyObjectException; +import datawave.core.query.logic.BaseQueryLogic; +import datawave.core.query.logic.QueryLogicTransformer; import datawave.marking.MarkingFunctions; -import datawave.query.tables.remote.RemoteQueryLogic; +import datawave.microservice.query.Query; import datawave.query.transformer.EventQueryTransformerSupport; -import datawave.webservice.common.logging.ThreadConfigurableLogger; -import datawave.webservice.query.Query; -import datawave.webservice.query.configuration.GenericQueryConfiguration; -import datawave.webservice.query.exception.EmptyObjectException; -import datawave.webservice.query.logic.BaseQueryLogic; -import datawave.webservice.query.logic.QueryLogicTransformer; import datawave.webservice.query.result.event.EventBase; import datawave.webservice.query.result.event.ResponseObjectFactory; import datawave.webservice.result.EventQueryResponseBase; @@ -27,7 +26,7 @@ *

    Overview

    This is a query logic implementation that can handle delegating to a remote event query logic (i.e. one that returns an extension of * EventQueryResponseBase). */ -public class RemoteEventQueryLogic extends BaseRemoteQueryLogic implements RemoteQueryLogic { +public class RemoteEventQueryLogic extends BaseRemoteQueryLogic { protected static final Logger log = ThreadConfigurableLogger.getLogger(RemoteEventQueryLogic.class); @@ -90,7 +89,7 @@ private class RemoteQueryLogicIterator implements Iterator { public boolean hasNext() { if (data.isEmpty() && !complete) { try { - EventQueryResponseBase response = (EventQueryResponseBase) remoteQueryService.next(getRemoteId(), getCallerObject()); + EventQueryResponseBase response = (EventQueryResponseBase) remoteQueryService.next(getRemoteId(), currentUser); if (response != null) { if (response.getReturnedEvents() == 0) { if (response.isPartialResults()) { diff --git a/warehouse/query-core/src/main/java/datawave/query/tables/ScannerFactory.java b/warehouse/query-core/src/main/java/datawave/query/tables/ScannerFactory.java index cd2f204d9c7..6145ba40116 100644 --- a/warehouse/query-core/src/main/java/datawave/query/tables/ScannerFactory.java +++ b/warehouse/query-core/src/main/java/datawave/query/tables/ScannerFactory.java @@ -19,7 +19,9 @@ import com.google.common.base.Preconditions; +import datawave.core.query.configuration.GenericQueryConfiguration; import datawave.ingest.data.config.ingest.AccumuloHelper; +import datawave.microservice.query.Query; import datawave.mr.bulk.BulkInputFormat; import datawave.mr.bulk.MultiRfileInputformat; import datawave.mr.bulk.RfileScanner; @@ -27,8 +29,6 @@ import datawave.query.tables.stats.ScanSessionStats; import datawave.query.util.QueryScannerHelper; import datawave.webservice.common.connection.WrappedConnector; -import datawave.webservice.query.Query; -import datawave.webservice.query.configuration.GenericQueryConfiguration; public class ScannerFactory { diff --git a/warehouse/query-core/src/main/java/datawave/query/tables/ScannerSession.java b/warehouse/query-core/src/main/java/datawave/query/tables/ScannerSession.java index 7741a43d1d5..6cbccce6089 100644 --- a/warehouse/query-core/src/main/java/datawave/query/tables/ScannerSession.java +++ b/warehouse/query-core/src/main/java/datawave/query/tables/ScannerSession.java @@ -4,7 +4,6 @@ import java.util.Collections; import java.util.Iterator; import java.util.List; -import java.util.Map.Entry; import java.util.Set; import java.util.concurrent.ArrayBlockingQueue; import java.util.concurrent.ConcurrentLinkedQueue; @@ -16,7 +15,6 @@ import org.apache.accumulo.core.data.Key; import org.apache.accumulo.core.data.PartialKey; import org.apache.accumulo.core.data.Range; -import org.apache.accumulo.core.data.Value; import org.apache.accumulo.core.security.Authorizations; import org.apache.commons.lang.builder.EqualsBuilder; import org.apache.log4j.Logger; @@ -28,11 +26,12 @@ import com.google.common.util.concurrent.AbstractExecutionThreadService; import com.google.common.util.concurrent.MoreExecutors; +import datawave.core.query.configuration.Result; +import datawave.microservice.query.Query; import datawave.query.tables.AccumuloResource.ResourceFactory; import datawave.query.tables.stats.ScanSessionStats; import datawave.query.tables.stats.ScanSessionStats.TIMERS; import datawave.query.tables.stats.StatsListener; -import datawave.webservice.query.Query; import datawave.webservice.query.util.QueryUncaughtExceptionHandler; /** @@ -40,8 +39,7 @@ * result queue is polled in the actual next() and hasNext() calls. Note that the uncaughtExceptionHandler from the Query is used to pass exceptions up which * will also fail the overall query if something happens. If this is not desired then a local handler should be set. */ -public class ScannerSession extends AbstractExecutionThreadService implements Iterator> { - +public class ScannerSession extends AbstractExecutionThreadService implements Iterator { /** * last seen key, used for moving across the sliding window of ranges. */ @@ -55,13 +53,11 @@ public class ScannerSession extends AbstractExecutionThreadService implements It /** * Result queue, providing us objects */ - protected ArrayBlockingQueue> resultQueue; - + protected ArrayBlockingQueue resultQueue; /** * Current entry to return. this will be popped from the result queue. */ - protected Entry currentEntry; - + protected Result currentEntry; /** * Delegates scanners to us, blocking if none are available or used by other sources. */ @@ -286,18 +282,26 @@ public boolean hasNext() { // if we are new, let's start and wait if (state() == State.NEW) { - // we have just started, so let's start and wait - // until we've completed the start process - if (null != stats) - initializeTimers(); - - // these two guava methods replaced behavior of startAndWait() from version 15 but - // will now throw an exception if another thread closes the session so catch and ignore - startAsync(); - try { - awaitRunning(); - } catch (IllegalStateException e) { - log.debug("Session was closed while waiting to start up."); + // make sure this is not done multiple time concurrently + synchronized (this) { + if (state() == State.NEW) { + if (null != stats) { + initializeTimers(); + } + startAsync(); + try { + // we have just started, so let's start and wait + // until we've completed the start process + awaitRunning(); + } catch (IllegalStateException e) { + // This is thrown if the state is anything other than RUNNING + // STOPPING, and TERMINATED are valid as they indicate successful execution + // FAILED is not ok, and should be thrown + if (state() == State.FAILED) { + throw e; + } + } + } } } @@ -376,9 +380,9 @@ protected void initializeTimers() { * Note that this method needs to check the uncaught exception handler and propogate any set throwables. */ @Override - public Entry next() { + public Result next() { try { - Entry retVal = currentEntry; + Result retVal = currentEntry; currentEntry = null; return retVal; } finally { @@ -470,7 +474,7 @@ protected void findTop() throws Exception { delegatedResource = ResourceFactory.initializeResource(delegatedResourceInitializer, delegatedResource, tableName, auths, currentRange) .setOptions(options); - Iterator> iter = delegatedResource.iterator(); + Iterator iter = Result.resultIterator(null, delegatedResource.iterator()); // do not continue if we've reached the end of the corpus @@ -534,10 +538,10 @@ protected void findTop() throws Exception { } } - protected int scannerInvariant(final Iterator> iter) { + protected int scannerInvariant(final Iterator iter) { int retrievalCount = 0; - Entry myEntry = null; + Result myEntry = null; Key highest = null; while (iter.hasNext()) { myEntry = iter.next(); diff --git a/warehouse/query-core/src/main/java/datawave/query/tables/ShardIndexQueryTable.java b/warehouse/query-core/src/main/java/datawave/query/tables/ShardIndexQueryTable.java index 7d10274bcf4..d10c7d978de 100644 --- a/warehouse/query-core/src/main/java/datawave/query/tables/ShardIndexQueryTable.java +++ b/warehouse/query-core/src/main/java/datawave/query/tables/ShardIndexQueryTable.java @@ -4,6 +4,7 @@ import static com.google.common.collect.Iterators.transform; import static datawave.query.config.ShardQueryConfiguration.PARAM_VALUE_SEP_STR; +import java.io.Closeable; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; @@ -19,14 +20,12 @@ import java.util.Set; import java.util.TreeSet; import java.util.concurrent.ExecutionException; -import java.util.concurrent.TimeUnit; import org.apache.accumulo.core.client.AccumuloClient; import org.apache.accumulo.core.client.BatchScanner; import org.apache.accumulo.core.client.IteratorSetting; import org.apache.accumulo.core.client.ScannerBase; import org.apache.accumulo.core.client.TableNotFoundException; -import org.apache.accumulo.core.client.sample.SamplerConfiguration; import org.apache.accumulo.core.data.Key; import org.apache.accumulo.core.data.Range; import org.apache.accumulo.core.data.Value; @@ -48,7 +47,17 @@ import com.google.common.collect.Multimap; import com.google.common.collect.Multimaps; +import datawave.core.common.connection.AccumuloConnectionFactory; +import datawave.core.query.configuration.GenericQueryConfiguration; +import datawave.core.query.configuration.QueryData; +import datawave.core.query.logic.BaseQueryLogic; +import datawave.core.query.logic.CheckpointableQueryLogic; +import datawave.core.query.logic.QueryCheckpoint; +import datawave.core.query.logic.QueryKey; +import datawave.core.query.logic.QueryLogicTransformer; import datawave.data.type.Type; +import datawave.microservice.query.Query; +import datawave.microservice.query.QueryImpl; import datawave.query.QueryParameters; import datawave.query.config.ShardIndexQueryConfiguration; import datawave.query.config.ShardQueryConfiguration; @@ -70,46 +79,33 @@ import datawave.query.util.MetadataHelper; import datawave.query.util.MetadataHelperFactory; import datawave.util.TableName; -import datawave.webservice.common.connection.AccumuloConnectionFactory; -import datawave.webservice.query.Query; -import datawave.webservice.query.QueryImpl; -import datawave.webservice.query.configuration.GenericQueryConfiguration; import datawave.webservice.query.exception.QueryException; -import datawave.webservice.query.logic.BaseQueryLogic; -import datawave.webservice.query.logic.QueryLogicTransformer; /** * Query Table implementation that accepts a single term and returns information from the global index for that term. The response includes the number of * occurrences of the term by type by day. */ -public class ShardIndexQueryTable extends BaseQueryLogic { +public class ShardIndexQueryTable extends BaseQueryLogic implements CheckpointableQueryLogic { private static final Logger log = Logger.getLogger(ShardIndexQueryTable.class); - private String indexTableName; - private String reverseIndexTableName; - private boolean fullTableScanEnabled = true; - private boolean allowLeadingWildcard = true; - private List realmSuffixExclusionPatterns = null; - protected String modelName = "DATAWAVE"; - protected String modelTableName = "DatawaveMetadata"; - protected MetadataHelperFactory metadataHelperFactory; protected ScannerFactory scannerFactory; - protected QueryModel queryModel; + private ShardIndexQueryConfiguration config; + private MetadataHelperFactory metadataHelperFactory; public ShardIndexQueryTable() {} public ShardIndexQueryTable(ShardIndexQueryTable other) { super(other); - this.indexTableName = other.getIndexTableName(); - this.reverseIndexTableName = other.getReverseIndexTableName(); - this.fullTableScanEnabled = other.isFullTableScanEnabled(); - this.allowLeadingWildcard = other.isAllowLeadingWildcard(); - this.scannerFactory = other.scannerFactory; - this.queryModel = other.getQueryModel(); - this.modelName = other.getModelName(); - this.modelTableName = other.getModelTableName(); - this.metadataHelperFactory = other.getMetadataHelperFactory(); - this.setRealmSuffixExclusionPatterns(other.getRealmSuffixExclusionPatterns()); + this.config = ShardIndexQueryConfiguration.create(other); + } + + @Override + public ShardIndexQueryConfiguration getConfig() { + if (this.config == null) { + this.config = ShardIndexQueryConfiguration.create(); + } + + return this.config; } @Override @@ -161,27 +157,35 @@ public void setMetadataHelperFactory(MetadataHelperFactory metadataHelperFactory this.metadataHelperFactory = metadataHelperFactory; } + public void setMetadataTableName(String tableName) { + getConfig().setMetadataTableName(tableName); + } + + public String getMetadataTableName() { + return getConfig().getMetadataTableName(); + } + public String getIndexTableName() { - return indexTableName; + return getConfig().getIndexTableName(); } public void setIndexTableName(String indexTableName) { - this.indexTableName = indexTableName; + getConfig().setIndexTableName(indexTableName); } public String getReverseIndexTableName() { - return reverseIndexTableName; + return getConfig().getReverseIndexTableName(); } public void setReverseIndexTableName(String reverseIndexTableName) { - this.reverseIndexTableName = reverseIndexTableName; + getConfig().setReverseIndexTableName(reverseIndexTableName); } @Override public GenericQueryConfiguration initialize(AccumuloClient client, Query settings, Set auths) throws Exception { - ShardIndexQueryConfiguration config = new ShardIndexQueryConfiguration(this, settings); + this.config = new ShardIndexQueryConfiguration(this, settings); this.scannerFactory = new ScannerFactory(client); - MetadataHelper metadataHelper = initializeMetadataHelper(client, config.getMetadataTableName(), auths); + MetadataHelper metadataHelper = initializeMetadataHelper(client, getConfig().getMetadataTableName(), auths); if (StringUtils.isEmpty(settings.getQuery())) { throw new IllegalArgumentException("Query cannot be null"); @@ -193,48 +197,40 @@ public GenericQueryConfiguration initialize(AccumuloClient client, Query setting String tModelName = getTrimmedOrNull(settings, QueryParameters.PARAMETER_MODEL_NAME); if (tModelName != null) { - modelName = tModelName; + getConfig().setModelName(tModelName); } String tModelTableName = getTrimmedOrNull(settings, QueryParameters.PARAMETER_MODEL_TABLE_NAME); if (tModelTableName != null) { - modelTableName = tModelTableName; + getConfig().setModelTableName(tModelTableName); } - queryModel = metadataHelper.getQueryModel(modelTableName, modelName, null); + getConfig().setQueryModel(metadataHelper.getQueryModel(getConfig().getModelTableName(), getConfig().getModelName(), null)); String datatypeFilterString = getTrimmedOrNull(settings, QueryParameters.DATATYPE_FILTER_SET); if (datatypeFilterString != null) { - config.setDatatypeFilter(new HashSet<>(Arrays.asList(datatypeFilterString.split(PARAM_VALUE_SEP_STR)))); + getConfig().setDatatypeFilter(new HashSet<>(Arrays.asList(datatypeFilterString.split(PARAM_VALUE_SEP_STR)))); if (log.isDebugEnabled()) { - log.debug("Data type filter set to " + config.getDatatypeFilterAsString()); + log.debug("Data type filter set to " + getConfig().getDatatypeFilterAsString()); } } - config.setClient(client); - config.setAuthorizations(auths); - - if (indexTableName != null) { - config.setIndexTableName(indexTableName); - } - - if (reverseIndexTableName != null) { - config.setReverseIndexTableName(reverseIndexTableName); - } + getConfig().setClient(client); + getConfig().setAuthorizations(auths); if (settings.getBeginDate() != null) { - config.setBeginDate(settings.getBeginDate()); + getConfig().setBeginDate(settings.getBeginDate()); } else { - config.setBeginDate(new Date(0)); + getConfig().setBeginDate(new Date(0)); if (log.isDebugEnabled()) { log.debug("No begin date supplied in settings."); } } if (settings.getEndDate() != null) { - config.setEndDate(settings.getEndDate()); + getConfig().setEndDate(settings.getEndDate()); } else { - config.setEndDate(new Date(Long.MAX_VALUE)); + getConfig().setEndDate(new Date(Long.MAX_VALUE)); if (log.isDebugEnabled()) { log.debug("No end date supplied in settings."); } @@ -245,45 +241,43 @@ public GenericQueryConfiguration initialize(AccumuloClient client, Query setting parser.setAllowLeadingWildCard(this.isAllowLeadingWildcard()); QueryNode node = parser.parse(settings.getQuery().trim()); // TODO: Validate that this is a simple list of terms type of query - config.setQueryString(node.getOriginalQuery()); + getConfig().setQueryString(node.getOriginalQuery()); if (log.isDebugEnabled()) { log.debug("Original Query = " + settings.getQuery().trim()); log.debug("JEXL Query = " + node.getOriginalQuery()); } // Parse & flatten the query. - ASTJexlScript origScript = JexlASTHelper.parseAndFlattenJexlQuery(config.getQueryString()); - + ASTJexlScript origScript = JexlASTHelper.parseAndFlattenJexlQuery(getConfig().getQueryString()); ASTJexlScript script; try { - script = UnfieldedIndexExpansionVisitor.expandUnfielded(config, this.scannerFactory, metadataHelper, origScript); + script = UnfieldedIndexExpansionVisitor.expandUnfielded(getConfig(), this.scannerFactory, metadataHelper, origScript); } catch (EmptyUnfieldedTermExpansionException e) { Multimap emptyMap = Multimaps.unmodifiableMultimap(HashMultimap.create()); - config.setNormalizedTerms(emptyMap); - config.setNormalizedPatterns(emptyMap); - return config; + getConfig().setNormalizedTerms(emptyMap); + getConfig().setNormalizedPatterns(emptyMap); + return getConfig(); } - Set dataTypes = config.getDatatypeFilter(); + Set dataTypes = getConfig().getDatatypeFilter(); Set allFields = metadataHelper.getAllFields(dataTypes); - script = QueryModelVisitor.applyModel(script, queryModel, allFields); - + script = QueryModelVisitor.applyModel(script, getConfig().getQueryModel(), allFields); if (log.isTraceEnabled()) { log.trace("fetching dataTypes from FetchDataTypesVisitor"); } - Multimap> fieldToDataTypeMap = FetchDataTypesVisitor.fetchDataTypes(metadataHelper, config.getDatatypeFilter(), script); - config.setDataTypes(fieldToDataTypeMap); - config.setQueryFieldsDatatypes(fieldToDataTypeMap); + Multimap> fieldToDataTypeMap = FetchDataTypesVisitor.fetchDataTypes(metadataHelper, getConfig().getDatatypeFilter(), script); + getConfig().setDataTypes(fieldToDataTypeMap); + getConfig().setQueryFieldsDatatypes(fieldToDataTypeMap); final Set indexedFields = metadataHelper.getIndexedFields(dataTypes); - config.setIndexedFields(indexedFields); + getConfig().setIndexedFields(indexedFields); final Set reverseIndexedFields = metadataHelper.getReverseIndexedFields(dataTypes); - config.setReverseIndexedFields(reverseIndexedFields); + getConfig().setReverseIndexedFields(reverseIndexedFields); final Multimap> normalizedFields = metadataHelper.getFieldsToDatatypes(dataTypes); - config.setNormalizedFieldsDatatypes(normalizedFields); + getConfig().setNormalizedFieldsDatatypes(normalizedFields); if (log.isTraceEnabled()) { log.trace("Normalizers:"); @@ -292,15 +286,15 @@ public GenericQueryConfiguration initialize(AccumuloClient client, Query setting } } - script = ExpandMultiNormalizedTerms.expandTerms(config, metadataHelper, script); + script = ExpandMultiNormalizedTerms.expandTerms(getConfig(), metadataHelper, script); Multimap literals = LiteralNodeVisitor.getLiterals(script); Multimap patterns = PatternNodeVisitor.getPatterns(script); Map,Range> rangesForTerms = Maps.newHashMap(); Map,Entry> rangesForPatterns = Maps.newHashMap(); - config.setNormalizedTerms(literals); - config.setNormalizedPatterns(patterns); + getConfig().setNormalizedTerms(literals); + getConfig().setNormalizedPatterns(patterns); if (log.isDebugEnabled()) { log.debug("Normalized Literals = " + literals); @@ -313,15 +307,42 @@ public GenericQueryConfiguration initialize(AccumuloClient client, Query setting } for (Entry entry : patterns.entries()) { ShardIndexQueryTableStaticMethods.RefactoredRangeDescription r = ShardIndexQueryTableStaticMethods.getRegexRange(entry, isFullTableScanEnabled(), - metadataHelper, config); + metadataHelper, getConfig()); rangesForPatterns.put(entry, Maps.immutableEntry(r.range, r.isForReverseIndex)); } - config.setRangesForTerms(rangesForTerms); - config.setRangesForPatterns(rangesForPatterns); + getConfig().setRangesForTerms(rangesForTerms); + getConfig().setRangesForPatterns(rangesForPatterns); - return config; + getConfig().setQueries(createQueries(getConfig())); + + return getConfig(); + } + + public List createQueries(ShardIndexQueryConfiguration config) throws QueryException, TableNotFoundException, IOException, ExecutionException { + final List queries = Lists.newLinkedList(); + + for (Entry termEntry : getConfig().getNormalizedTerms().entries()) { + String query = termEntry.getKey(); + Range range = getConfig().getRangesForTerms().get(termEntry); + List settings = getIteratorSettingsForDiscovery(getConfig(), Collections.singleton(termEntry.getValue()), Collections.emptySet(), + getConfig().getTableName().equals(getConfig().getReverseIndexTableName()), false); + List cfs = ShardIndexQueryTableStaticMethods.getColumnFamilies(getConfig(), false, Collections.singleton(termEntry.getKey())); + queries.add(new QueryData(config.getIndexTableName(), query, Collections.singleton(range), cfs, settings)); + } + + for (Entry patternEntry : getConfig().getNormalizedPatterns().entries()) { + String query = patternEntry.getKey(); + Entry rangeEntry = getConfig().getRangesForPatterns().get(patternEntry); + String tName = rangeEntry.getValue() ? TableName.SHARD_RINDEX : TableName.SHARD_INDEX; + List settings = getIteratorSettingsForDiscovery(getConfig(), Collections.emptySet(), + Collections.singleton(patternEntry.getValue()), rangeEntry.getValue(), false); + List cfs = ShardIndexQueryTableStaticMethods.getColumnFamilies(getConfig(), rangeEntry.getValue(), + Collections.singleton(patternEntry.getKey())); + queries.add(new QueryData(tName, query, Collections.singleton(rangeEntry.getKey()), cfs, settings)); + } + return queries; } private String getTrimmedOrNull(Query settings, String value) { @@ -344,32 +365,26 @@ public void setupQuery(GenericQueryConfiguration genericConfig) throws QueryExce throw new QueryException("Did not receive a ShardIndexQueryConfiguration instance!!"); } - ShardIndexQueryConfiguration config = (ShardIndexQueryConfiguration) genericConfig; - final List> batchscanners = Lists.newLinkedList(); + this.config = (ShardIndexQueryConfiguration) genericConfig; + final List> batchscanners = Lists.newLinkedList(); - for (Entry termEntry : config.getNormalizedTerms().entries()) { + for (QueryData qd : config.getQueries()) { // scan the table - BatchScanner bs = configureBatchScannerForDiscovery(config, this.scannerFactory, TableName.SHARD_INDEX, - Collections.singleton(config.getRangesForTerms().get(termEntry)), Collections.singleton(termEntry.getValue()), - Collections.emptySet(), config.getTableName().equals(config.getReverseIndexTableName()), false, - Collections.singleton(termEntry.getKey())); - - batchscanners.add(Maps.immutableEntry(bs, false)); - } + BatchScanner bs = scannerFactory.newScanner(qd.getTableName(), config.getAuthorizations(), config.getNumQueryThreads(), config.getQuery()); - for (Entry patternEntry : config.getNormalizedPatterns().entries()) { - Entry rangeEntry = config.getRangesForPatterns().get(patternEntry); - String tName = rangeEntry.getValue() ? TableName.SHARD_RINDEX : TableName.SHARD_INDEX; + bs.setRanges(qd.getRanges()); + for (IteratorSetting setting : qd.getSettings()) { + bs.addScanIterator(setting); + } - // scan the table - BatchScanner bs = configureBatchScannerForDiscovery(config, this.scannerFactory, tName, Collections.singleton(rangeEntry.getKey()), - Collections.emptySet(), Collections.singleton(patternEntry.getValue()), rangeEntry.getValue(), false, - Collections.singleton(patternEntry.getKey())); + for (String cf : qd.getColumnFamilies()) { + bs.fetchColumnFamily(new Text(cf)); + } - batchscanners.add(Maps.immutableEntry(bs, rangeEntry.getValue())); + batchscanners.add(Maps.immutableEntry(bs, qd)); } - final Iterator> batchScannerIterator = batchscanners.iterator(); + final Iterator> batchScannerIterator = batchscanners.iterator(); this.iterator = concat(transform(new CloseableIterator(batchScannerIterator), new Function,Iterator>() { DataInputBuffer in = new DataInputBuffer(); @@ -391,178 +406,78 @@ public Iterator apply(Entry from) { return thangs.iterator(); } })); + } - this.scanner = new ScannerBase() { - - @Override - public void addScanIterator(IteratorSetting cfg) {} - - @Override - public void clearColumns() {} - - @Override - public void clearScanIterators() {} - - @Override - public void close() {} - - @Override - public Authorizations getAuthorizations() { - return null; - } - - @Override - public void setSamplerConfiguration(SamplerConfiguration samplerConfiguration) { - - } - - @Override - public SamplerConfiguration getSamplerConfiguration() { - return null; - } - - @Override - public void clearSamplerConfiguration() { - - } - - @Override - public void setBatchTimeout(long l, TimeUnit timeUnit) { - - } - - @Override - public long getBatchTimeout(TimeUnit timeUnit) { - return 0; - } - - @Override - public void setClassLoaderContext(String s) { - - } - - @Override - public void clearClassLoaderContext() { - - } - - @Override - public String getClassLoaderContext() { - return null; - } - - @Override - public ConsistencyLevel getConsistencyLevel() { - return null; - } - - @Override - public void setConsistencyLevel(ConsistencyLevel consistencyLevel) { - - } - - @Override - public void fetchColumn(Text colFam, Text colQual) {} - - @Override - public void fetchColumn(IteratorSetting.Column column) { - - } - - @Override - public void fetchColumnFamily(Text col) {} - - @Override - public long getTimeout(TimeUnit timeUnit) { - return 0; - } - - @Override - public Iterator> iterator() { - return null; - } + /** + * Implementations use the configuration to setup execution of a portion of their query. getTransformIterator should be used to get the partial results if + * any. + * + * @param client + * The accumulo client + * @param baseConfig + * The shard query configuration + * @param checkpoint + */ + @Override + public void setupQuery(AccumuloClient client, GenericQueryConfiguration baseConfig, QueryCheckpoint checkpoint) throws Exception { + ShardIndexQueryConfiguration config = (ShardIndexQueryConfiguration) baseConfig; + baseConfig.setQueries(checkpoint.getQueries()); + config.setClient(client); - @Override - public void removeScanIterator(String iteratorName) {} + scannerFactory = new ScannerFactory(client); + MetadataHelper metadataHelper = initializeMetadataHelper(client, config.getMetadataTableName(), config.getAuthorizations()); + config.setQueryModel(metadataHelper.getQueryModel(config.getModelTableName(), config.getModelName(), null)); - @Override - public void setTimeout(long timeOut, TimeUnit timeUnit) {} + setupQuery(config); + } - @Override - public void updateScanIteratorOption(String iteratorName, String key, String value) {} + @Override + public boolean isCheckpointable() { + return getConfig().isCheckpointable(); + } - }; + @Override + public void setCheckpointable(boolean checkpointable) { + getConfig().setCheckpointable(checkpointable); } /** - * scan a global index (shardIndex or shardReverseIndex) for the specified ranges and create a set of fieldname/TermInformation values. The Key/Values - * scanned are trimmed based on a set of terms to match, and a set of data types (found in the config) + * This can be called at any point to get a checkpoint such that this query logic instance can be torn down to be rebuilt later. At a minimum this should be + * called after the getTransformIterator is depleted of results. * - * @param config - * the shard config - * @param scannerFactory - * the scanner factory - * @param tableName - * the table name - * @param ranges - * a set of ranges - * @param literals - * the list of literals - * @param patterns - * the list of patterns - * @param reverseIndex - * the reverse index flag - * @param expansionFields - * the expansion fields - * @return the batch scanner - * @throws TableNotFoundException - * if the table is not found + * @param queryKey + * The query key to include in the checkpoint + * @return The query checkpoint */ - public static BatchScanner configureBatchScanner(ShardQueryConfiguration config, ScannerFactory scannerFactory, String tableName, Collection ranges, - Collection literals, Collection patterns, boolean reverseIndex, Collection expansionFields) - throws TableNotFoundException { - - // if we have no ranges, then nothing to scan - if (ranges.isEmpty()) { - return null; + @Override + public List checkpoint(QueryKey queryKey) { + if (!isCheckpointable()) { + throw new UnsupportedOperationException("Cannot checkpoint a query that is not checkpointable. Try calling setCheckpointable(true) first."); } - if (log.isTraceEnabled()) { - log.trace("Scanning " + tableName + " against " + ranges + " with auths " + config.getAuthorizations()); + // if we have started returning results, then capture the state of the query data objects + if (this.iterator != null) { + List checkpoints = Lists.newLinkedList(); + for (QueryData qd : getConfig().getQueries()) { + checkpoints.add(new QueryCheckpoint(queryKey, Collections.singletonList(qd))); + } + return checkpoints; } - - BatchScanner bs = scannerFactory.newScanner(tableName, config.getAuthorizations(), config.getNumQueryThreads(), config.getQuery()); - - bs.setRanges(ranges); - - // The begin date from the query may be down to the second, for doing lookups in the index we want to use the day because - // the times in the index table have been truncated to the day. - Date begin = DateUtils.truncate(config.getBeginDate(), Calendar.DAY_OF_MONTH); - // we don't need to bump up the end date any more because it's not apart of the range set on the scanner - Date end = config.getEndDate(); - - LongRange dateRange = new LongRange(begin.getTime(), end.getTime()); - - ShardIndexQueryTableStaticMethods.configureGlobalIndexDateRangeFilter(config, bs, dateRange); - ShardIndexQueryTableStaticMethods.configureGlobalIndexDataTypeFilter(config, bs, config.getDatatypeFilter()); - - ShardIndexQueryTableStaticMethods.configureGlobalIndexTermMatchingIterator(config, bs, literals, patterns, reverseIndex, true, expansionFields); - - return bs; - } - - public static BatchScanner configureBatchScannerForDiscovery(ShardQueryConfiguration config, ScannerFactory scannerFactory, String tableName, - Collection ranges, Collection literals, Collection patterns, boolean reverseIndex, boolean uniqueTermsOnly, - Collection expansionFields) throws TableNotFoundException { - - // if we have no ranges, then nothing to scan - if (ranges.isEmpty()) { - return null; + // otherwise we still need to plan or there are no results + else { + return Lists.newArrayList(new QueryCheckpoint(queryKey)); } + } - BatchScanner bs = scannerFactory.newScanner(tableName, config.getAuthorizations(), config.getNumQueryThreads(), config.getQuery()); + @Override + public QueryCheckpoint updateCheckpoint(QueryCheckpoint checkpoint) { + // for the shard index query logic, the query data objects automatically get update with + // the last result returned, so the checkpoint should already be updated! + return checkpoint; + } - bs.setRanges(ranges); + public static List getIteratorSettingsForDiscovery(ShardQueryConfiguration config, Collection literals, + Collection patterns, boolean reverseIndex, boolean uniqueTermsOnly) { // The begin date from the query may be down to the second, for doing lookups in the index we want to use the day because // the times in the index table have been truncated to the day. @@ -572,31 +487,30 @@ public static BatchScanner configureBatchScannerForDiscovery(ShardQueryConfigura LongRange dateRange = new LongRange(begin.getTime(), end.getTime()); - ShardIndexQueryTableStaticMethods.configureGlobalIndexDateRangeFilter(config, bs, dateRange); - ShardIndexQueryTableStaticMethods.configureGlobalIndexDataTypeFilter(config, bs, config.getDatatypeFilter()); - - ShardIndexQueryTableStaticMethods.configureGlobalIndexTermMatchingIterator(config, bs, literals, patterns, reverseIndex, uniqueTermsOnly, - expansionFields); + List settings = Lists.newLinkedList(); + settings.add(ShardIndexQueryTableStaticMethods.configureGlobalIndexDateRangeFilter(config, dateRange)); + settings.add(ShardIndexQueryTableStaticMethods.configureGlobalIndexDataTypeFilter(config, config.getDatatypeFilter())); - bs.addScanIterator(new IteratorSetting(config.getBaseIteratorPriority() + 50, DiscoveryIterator.class)); + settings.add(ShardIndexQueryTableStaticMethods.configureGlobalIndexTermMatchingIterator(config, literals, patterns, reverseIndex, uniqueTermsOnly)); - return bs; + settings.add(new IteratorSetting(config.getBaseIteratorPriority() + 50, DiscoveryIterator.class)); + return settings; } public boolean isFullTableScanEnabled() { - return fullTableScanEnabled; + return getConfig().getFullTableScanEnabled(); } public void setFullTableScanEnabled(boolean fullTableScanEnabled) { - this.fullTableScanEnabled = fullTableScanEnabled; + this.getConfig().setFullTableScanEnabled(fullTableScanEnabled); } public boolean isAllowLeadingWildcard() { - return allowLeadingWildcard; + return getConfig().isAllowLeadingWildcard(); } public void setAllowLeadingWildcard(boolean allowLeadingWildcard) { - this.allowLeadingWildcard = allowLeadingWildcard; + getConfig().setAllowLeadingWildcard(allowLeadingWildcard); } @Override @@ -606,7 +520,7 @@ public AccumuloConnectionFactory.Priority getConnectionPriority() { @Override public QueryLogicTransformer getTransformer(Query settings) { - return new DiscoveryTransformer(this, settings, this.queryModel); + return new DiscoveryTransformer(this, settings, getConfig().getQueryModel()); } /** @@ -615,41 +529,42 @@ public QueryLogicTransformer getTransformer(Query settings) { * @return the model name */ public String getModelName() { - return modelName; + return getConfig().getModelName(); } public void setModelName(String modelName) { - this.modelName = modelName; + getConfig().setModelName(modelName); } public String getModelTableName() { - return modelTableName; + return getConfig().getModelTableName(); } public void setModelTableName(String modelTableName) { - this.modelTableName = modelTableName; + getConfig().setModelTableName(modelTableName); } public QueryModel getQueryModel() { - return this.queryModel; + return getConfig().getQueryModel(); } public void setQueryModel(QueryModel model) { - this.queryModel = model; + getConfig().setQueryModel(model); } - public class CloseableIterator implements Iterator> { + public class CloseableIterator implements Iterator>, Closeable { - private final Iterator> batchScannerIterator; + private final Iterator> batchScannerIterator; protected Boolean reverseIndex = false; protected Entry currentEntry = null; protected BatchScanner currentBS = null; protected Iterator> currentIter = null; + protected QueryData queryData = null; protected volatile boolean closed = false; - public CloseableIterator(Iterator> batchScannerIterator) { + public CloseableIterator(Iterator> batchScannerIterator) { this.batchScannerIterator = batchScannerIterator; } @@ -677,9 +592,10 @@ public boolean hasNext() { } if (batchScannerIterator.hasNext()) { - Entry entry = batchScannerIterator.next(); + Entry entry = batchScannerIterator.next(); this.currentBS = entry.getKey(); - this.reverseIndex = entry.getValue(); + this.queryData = entry.getValue(); + this.reverseIndex = entry.getValue().getTableName().equals(getConfig().getReverseIndexTableName()); this.currentIter = this.currentBS.iterator(); return hasNext(); @@ -703,6 +619,8 @@ public Entry next() { Entry cur = this.currentEntry; this.currentEntry = null; + queryData.setLastResult(cur.getKey()); + if (this.reverseIndex) { Text term = new Text((new StringBuilder(cur.getKey().getRow().toString())).reverse().toString()); cur = Maps.immutableEntry(new Key(term, cur.getKey().getColumnFamily(), cur.getKey().getColumnQualifier(), @@ -725,6 +643,7 @@ public void remove() { throw new UnsupportedOperationException(); } + @Override public void close() { if (!closed) { closed = true; @@ -742,8 +661,8 @@ public Set getOptionalQueryParameters() { optionalParams.add(QueryParameters.PARAMETER_MODEL_NAME); optionalParams.add(QueryParameters.PARAMETER_MODEL_TABLE_NAME); optionalParams.add(QueryParameters.DATATYPE_FILTER_SET); - optionalParams.add(datawave.webservice.query.QueryParameters.QUERY_BEGIN); - optionalParams.add(datawave.webservice.query.QueryParameters.QUERY_END); + optionalParams.add(datawave.microservice.query.QueryParameters.QUERY_BEGIN); + optionalParams.add(datawave.microservice.query.QueryParameters.QUERY_END); return optionalParams; } @@ -758,11 +677,11 @@ public Set getExampleQueries() { } public List getRealmSuffixExclusionPatterns() { - return realmSuffixExclusionPatterns; + return getConfig().getRealmSuffixExclusionPatterns(); } public void setRealmSuffixExclusionPatterns(List realmSuffixExclusionPatterns) { - this.realmSuffixExclusionPatterns = realmSuffixExclusionPatterns; + getConfig().setRealmSuffixExclusionPatterns(realmSuffixExclusionPatterns); } } diff --git a/warehouse/query-core/src/main/java/datawave/query/tables/ShardQueryLogic.java b/warehouse/query-core/src/main/java/datawave/query/tables/ShardQueryLogic.java index 62eba5f42c6..6854c34bd2c 100644 --- a/warehouse/query-core/src/main/java/datawave/query/tables/ShardQueryLogic.java +++ b/warehouse/query-core/src/main/java/datawave/query/tables/ShardQueryLogic.java @@ -39,8 +39,21 @@ import com.google.common.util.concurrent.ListeningExecutorService; import com.google.common.util.concurrent.MoreExecutors; +import datawave.core.common.connection.AccumuloConnectionFactory; +import datawave.core.common.logging.ThreadConfigurableLogger; +import datawave.core.query.configuration.GenericQueryConfiguration; +import datawave.core.query.configuration.QueryData; +import datawave.core.query.configuration.Result; +import datawave.core.query.logic.BaseQueryLogic; +import datawave.core.query.logic.CheckpointableQueryLogic; +import datawave.core.query.logic.QueryCheckpoint; +import datawave.core.query.logic.QueryKey; +import datawave.core.query.logic.QueryLogicTransformer; +import datawave.core.query.logic.WritesQueryMetrics; import datawave.data.type.Type; import datawave.marking.MarkingFunctions; +import datawave.microservice.query.Query; +import datawave.microservice.query.QueryImpl.Parameter; import datawave.query.CloseableIterable; import datawave.query.Constants; import datawave.query.DocumentSerialization; @@ -85,16 +98,7 @@ import datawave.query.util.QueryStopwatch; import datawave.query.util.sortedset.FileSortedSet; import datawave.util.time.TraceStopwatch; -import datawave.webservice.common.connection.AccumuloConnectionFactory; -import datawave.webservice.common.logging.ThreadConfigurableLogger; -import datawave.webservice.query.Query; -import datawave.webservice.query.QueryImpl.Parameter; -import datawave.webservice.query.configuration.GenericQueryConfiguration; -import datawave.webservice.query.configuration.QueryData; import datawave.webservice.query.exception.QueryException; -import datawave.webservice.query.logic.BaseQueryLogic; -import datawave.webservice.query.logic.QueryLogicTransformer; -import datawave.webservice.query.logic.WritesQueryMetrics; import datawave.webservice.query.result.event.ResponseObjectFactory; /** @@ -162,7 +166,7 @@ * * @see datawave.query.enrich */ -public class ShardQueryLogic extends BaseQueryLogic> { +public class ShardQueryLogic extends BaseQueryLogic> implements CheckpointableQueryLogic { public static final String NULL_BYTE = "\0"; public static final Class tableConfigurationType = ShardQueryConfiguration.class; @@ -398,37 +402,7 @@ public void initialize(ShardQueryConfiguration config, AccumuloClient client, Qu config.setEndDate(endDate); } - MetadataHelper metadataHelper = prepareMetadataHelper(client, this.getMetadataTableName(), auths, config.isRawTypes()); - - DateIndexHelper dateIndexHelper = prepareDateIndexHelper(client, this.getDateIndexTableName(), auths); - if (config.isDateIndexTimeTravel()) { - dateIndexHelper.setTimeTravel(config.isDateIndexTimeTravel()); - } - - QueryPlanner queryPlanner = getQueryPlanner(); - if (queryPlanner instanceof DefaultQueryPlanner) { - DefaultQueryPlanner currentQueryPlanner = (DefaultQueryPlanner) queryPlanner; - - currentQueryPlanner.setMetadataHelper(metadataHelper); - currentQueryPlanner.setDateIndexHelper(dateIndexHelper); - - QueryModelProvider queryModelProvider = currentQueryPlanner.getQueryModelProviderFactory().createQueryModelProvider(); - if (queryModelProvider instanceof MetadataHelperQueryModelProvider) { - ((MetadataHelperQueryModelProvider) queryModelProvider).setMetadataHelper(metadataHelper); - ((MetadataHelperQueryModelProvider) queryModelProvider).setConfig(config); - } - - if (null != queryModelProvider.getQueryModel()) { - queryModel = queryModelProvider.getQueryModel(); - - } - } - - if (this.queryModel == null) - loadQueryModel(metadataHelper, config); - - getQueryPlanner().setCreateUidsIteratorClass(createUidsIteratorClass); - getQueryPlanner().setUidIntersector(uidIntersector); + setupQueryPlanner(config); validateConfiguration(config); @@ -448,18 +422,18 @@ public void initialize(ShardQueryConfiguration config, AccumuloClient client, Qu config.setProjectFields(getCardinalityConfiguration().getRevisedProjectFields(queryModel, originalProjectFields)); } - this.queries = getQueryPlanner().process(config, jexlQueryString, settings, this.getScannerFactory()); + setQueries(getQueryPlanner().process(config, jexlQueryString, settings, this.getScannerFactory())); config.setDisallowlistedFields(originalDisallowlistedFields); config.setProjectFields(originalProjectFields); } else { - this.queries = getQueryPlanner().process(config, jexlQueryString, settings, this.getScannerFactory()); + setQueries(getQueryPlanner().process(config, jexlQueryString, settings, this.getScannerFactory())); } TraceStopwatch stopwatch = config.getTimers().newStartedStopwatch("ShardQueryLogic - Get iterator of queries"); if (this.queries != null) { - config.setQueries(this.queries.iterator()); + config.setQueriesIter(this.queries.iterator()); } config.setQueryString(getQueryPlanner().getPlannedScript()); @@ -467,6 +441,42 @@ public void initialize(ShardQueryConfiguration config, AccumuloClient client, Qu stopwatch.stop(); } + private void setupQueryPlanner(ShardQueryConfiguration config) + throws TableNotFoundException, ExecutionException, InstantiationException, IllegalAccessException { + MetadataHelper metadataHelper = prepareMetadataHelper(config.getClient(), this.getMetadataTableName(), config.getAuthorizations(), config.isRawTypes()); + + DateIndexHelper dateIndexHelper = prepareDateIndexHelper(config.getClient(), this.getDateIndexTableName(), config.getAuthorizations()); + if (config.isDateIndexTimeTravel()) { + dateIndexHelper.setTimeTravel(config.isDateIndexTimeTravel()); + } + + QueryPlanner queryPlanner = getQueryPlanner(); + if (queryPlanner instanceof DefaultQueryPlanner) { + DefaultQueryPlanner currentQueryPlanner = (DefaultQueryPlanner) queryPlanner; + + currentQueryPlanner.setMetadataHelper(metadataHelper); + currentQueryPlanner.setDateIndexHelper(dateIndexHelper); + + QueryModelProvider queryModelProvider = currentQueryPlanner.getQueryModelProviderFactory().createQueryModelProvider(); + if (queryModelProvider instanceof MetadataHelperQueryModelProvider) { + ((MetadataHelperQueryModelProvider) queryModelProvider).setMetadataHelper(metadataHelper); + ((MetadataHelperQueryModelProvider) queryModelProvider).setConfig(config); + } + + if (null != queryModelProvider.getQueryModel()) { + queryModel = queryModelProvider.getQueryModel(); + + } + } + + if (this.queryModel == null) + loadQueryModel(metadataHelper, config); + + getQueryPlanner().setCreateUidsIteratorClass(createUidsIteratorClass); + getQueryPlanner().setUidIntersector(uidIntersector); + + } + /** * Validate that the configuration is in a consistent state * @@ -524,7 +534,7 @@ public void setupQuery(GenericQueryConfiguration genericConfig) throws Exception throw new QueryException("Did not receive a ShardQueryConfiguration instance!!"); } - ShardQueryConfiguration config = (ShardQueryConfiguration) genericConfig; + config = (ShardQueryConfiguration) genericConfig; final QueryStopwatch timers = config.getTimers(); TraceStopwatch stopwatch = timers.newStartedStopwatch("ShardQueryLogic - Setup Query"); @@ -568,12 +578,16 @@ public void remove() { this.scheduler = getScheduler(config, scannerFactory); this.scanner = null; - this.iterator = this.scheduler.iterator(); + Iterator resultIterator = this.scheduler.iterator(); if (!config.isSortedUIDs()) { - this.iterator = new DedupingIterator(this.iterator); + DedupingIterator dedupIterator = new DedupingIterator(resultIterator, config.getBloom()); + config.setBloom(dedupIterator.getBloom()); + resultIterator = dedupIterator; } + this.iterator = Result.keyValueIterator(resultIterator); + stopwatch.stop(); log.info(getStopwatchHeader(config)); @@ -611,7 +625,9 @@ public QueryLogicTransformer getTransformer(Query settings) { transformer.setEventQueryDataDecoratorTransformer(eventQueryDataDecoratorTransformer); transformer.setContentFieldNames(getConfig().getContentFieldNames()); transformer.setLogTimingDetails(this.getLogTimingDetails()); - transformer.setCardinalityConfiguration(cardinalityConfiguration); + if (cardinalityConfiguration != null && cardinalityConfiguration.isEnabled()) { + transformer.setCardinalityConfiguration(cardinalityConfiguration); + } transformer.setPrimaryToSecondaryFieldMap(primaryToSecondaryFieldMap); transformer.setQm(queryModel); this.transformerInstance = transformer; @@ -1877,10 +1893,6 @@ public Set getUnevaluatedFields() { return getConfig().getUnevaluatedFields(); } - public void setUnevaluatedFields(String unevaluatedFieldList) { - getConfig().setUnevaluatedFields(unevaluatedFieldList); - } - public void setUnevaluatedFields(Collection unevaluatedFields) { getConfig().setUnevaluatedFields(unevaluatedFields); } @@ -2247,10 +2259,10 @@ public Set getOptionalQueryParameters() { optionalParams.add(QueryParameters.GROUP_FIELDS); optionalParams.add(QueryParameters.UNIQUE_FIELDS); optionalParams.add(QueryOptions.LOG_TIMING_DETAILS); - optionalParams.add(datawave.webservice.query.QueryParameters.QUERY_PAGESIZE); - optionalParams.add(datawave.webservice.query.QueryParameters.QUERY_PAGETIMEOUT); - optionalParams.add(datawave.webservice.query.QueryParameters.QUERY_EXPIRATION); - optionalParams.add(datawave.webservice.query.QueryParameters.QUERY_MAX_RESULTS_OVERRIDE); + optionalParams.add(datawave.microservice.query.QueryParameters.QUERY_PAGESIZE); + optionalParams.add(datawave.microservice.query.QueryParameters.QUERY_PAGETIMEOUT); + optionalParams.add(datawave.microservice.query.QueryParameters.QUERY_EXPIRATION); + optionalParams.add(datawave.microservice.query.QueryParameters.QUERY_MAX_RESULTS_OVERRIDE); optionalParams.add(QueryParameters.SUM_FIELDS); optionalParams.add(QueryParameters.MAX_FIELDS); optionalParams.add(QueryParameters.MIN_FIELDS); @@ -2262,12 +2274,12 @@ public Set getOptionalQueryParameters() { @Override public Set getRequiredQueryParameters() { Set requiredParams = new TreeSet<>(); - requiredParams.add(datawave.webservice.query.QueryParameters.QUERY_STRING); - requiredParams.add(datawave.webservice.query.QueryParameters.QUERY_NAME); - requiredParams.add(datawave.webservice.query.QueryParameters.QUERY_AUTHORIZATIONS); - requiredParams.add(datawave.webservice.query.QueryParameters.QUERY_LOGIC_NAME); - requiredParams.add(datawave.webservice.query.QueryParameters.QUERY_BEGIN); - requiredParams.add(datawave.webservice.query.QueryParameters.QUERY_END); + requiredParams.add(datawave.microservice.query.QueryParameters.QUERY_STRING); + requiredParams.add(datawave.microservice.query.QueryParameters.QUERY_NAME); + requiredParams.add(datawave.microservice.query.QueryParameters.QUERY_AUTHORIZATIONS); + requiredParams.add(datawave.microservice.query.QueryParameters.QUERY_LOGIC_NAME); + requiredParams.add(datawave.microservice.query.QueryParameters.QUERY_BEGIN); + requiredParams.add(datawave.microservice.query.QueryParameters.QUERY_END); return requiredParams; } @@ -2301,7 +2313,7 @@ public void setAccumuloPassword(String password) { } public boolean isExpansionLimitedToModelContents() { - return getConfig().isExpansionLimitedToModelContents(); + return getConfig().isLimitTermExpansionToModel(); } public void setLimitTermExpansionToModel(boolean shouldLimitTermExpansionToModel) { @@ -2724,6 +2736,72 @@ public Set getEvaluationOnlyFields() { return getConfig().getEvaluationOnlyFields(); } + /** + * Implementations use the configuration to setup execution of a portion of their query. getTransformIterator should be used to get the partial results if + * any. + * + * @param client + * The accumulo client + * @param baseConfig + * The shard query configuration + * @param checkpoint + */ + @Override + public void setupQuery(AccumuloClient client, GenericQueryConfiguration baseConfig, QueryCheckpoint checkpoint) throws Exception { + ShardQueryConfiguration config = (ShardQueryConfiguration) baseConfig; + config.setQueries(checkpoint.getQueries()); + config.setClient(client); + setScannerFactory(new ScannerFactory(config)); + + setupQuery(config); + } + + @Override + public boolean isCheckpointable() { + return getConfig().isCheckpointable(); + } + + @Override + public void setCheckpointable(boolean checkpointable) { + getConfig().setCheckpointable(checkpointable); + } + + /** + * This can be called at any point to get a checkpoint such that this query logic instance can be torn down to be rebuilt later. At a minimum this should be + * called after the getTransformIterator is depleted of results. + * + * @param queryKey + * The query key to include in the checkpoint + * @return The query checkpoint + */ + @Override + public List checkpoint(QueryKey queryKey) { + if (!isCheckpointable()) { + throw new UnsupportedOperationException("Cannot checkpoint a query that is not checkpointable. Try calling setCheckpointable(true) first."); + } + + // if we have started returning results, then capture the state of the query scheduler + if (this.scheduler != null) { + return this.scheduler.checkpoint(queryKey); + } + // otherwise we create a checkpoint per query data + else { + Iterator queries = getConfig().getQueriesIter(); + List checkpoints = new ArrayList<>(); + while (queries.hasNext()) { + checkpoints.add(new QueryCheckpoint(queryKey, Collections.singletonList(queries.next()))); + } + return checkpoints; + } + } + + @Override + public QueryCheckpoint updateCheckpoint(QueryCheckpoint checkpoint) { + // for the shard query logic, the query data objects automatically get update with + // the last result returned, so the checkpoint should already be updated! + return checkpoint; + } + public Set getDisallowedRegexPatterns() { return getConfig().getDisallowedRegexPatterns(); } diff --git a/warehouse/query-core/src/main/java/datawave/query/tables/async/Scan.java b/warehouse/query-core/src/main/java/datawave/query/tables/async/Scan.java index 0577e23ddb8..2ff0b11e2b1 100644 --- a/warehouse/query-core/src/main/java/datawave/query/tables/async/Scan.java +++ b/warehouse/query-core/src/main/java/datawave/query/tables/async/Scan.java @@ -3,7 +3,6 @@ import java.io.InterruptedIOException; import java.util.Iterator; import java.util.List; -import java.util.Map.Entry; import java.util.Set; import java.util.concurrent.BlockingQueue; import java.util.concurrent.Callable; @@ -16,13 +15,13 @@ import org.apache.accumulo.core.data.Key; import org.apache.accumulo.core.data.PartialKey; import org.apache.accumulo.core.data.Range; -import org.apache.accumulo.core.data.Value; import org.apache.accumulo.core.security.Authorizations; import org.apache.log4j.Logger; import com.google.common.base.Function; import com.google.common.eventbus.Subscribe; +import datawave.core.query.configuration.Result; import datawave.mr.bulk.RfileResource; import datawave.query.tables.AccumuloResource; import datawave.query.tables.AccumuloResource.ResourceFactory; @@ -52,7 +51,7 @@ public class Scan implements Callable { private ResourceQueue delegatorReference; - protected BlockingQueue> results; + protected BlockingQueue results; private String localTableName; @@ -75,7 +74,7 @@ public class Scan implements Callable { private AccumuloResource delegatedResource = null; public Scan(String localTableName, Set localAuths, ScannerChunk chunk, ResourceQueue delegatorReference, - Class delegatedResourceInitializer, BlockingQueue> results, ExecutorService callingService) { + Class delegatedResourceInitializer, BlockingQueue results, ExecutorService callingService) { myScan = chunk; if (log.isTraceEnabled()) log.trace("Size of ranges: " + myScan.getRanges().size()); @@ -242,7 +241,7 @@ public Scan call() throws Exception { delegatedResource = ResourceFactory.initializeResource(initializer, delegatedResource, localTableName, localAuths, currentRange) .setOptions(myScan.getOptions()); - Iterator> iter = delegatedResource.iterator(); + Iterator iter = Result.resultIterator(myScan.getContext(), delegatedResource.iterator()); if (null != myStats) myStats.getTimer(TIMERS.SCANNER_START).suspend(); @@ -255,7 +254,7 @@ public Scan call() throws Exception { lastSeenKey = null; } - Entry myEntry = null; + Result myEntry = null; if (null != myStats) myStats.getTimer(TIMERS.SCANNER_ITERATE).resume(); while (iter.hasNext()) { @@ -345,6 +344,10 @@ public void setSessionArbiter(SessionArbiter arbiter) { this.arbiter = arbiter; } + public ScannerChunk getScannerChunk() { + return myScan; + } + public String getScanLocation() { return myScan.getLastKnownLocation(); } diff --git a/warehouse/query-core/src/main/java/datawave/query/tables/async/ScannerChunk.java b/warehouse/query-core/src/main/java/datawave/query/tables/async/ScannerChunk.java index f644dbbf3a0..a0ff91507a2 100644 --- a/warehouse/query-core/src/main/java/datawave/query/tables/async/ScannerChunk.java +++ b/warehouse/query-core/src/main/java/datawave/query/tables/async/ScannerChunk.java @@ -12,6 +12,7 @@ import com.google.common.collect.Iterables; import com.google.common.collect.Lists; +import datawave.core.query.configuration.ResultContext; import datawave.query.tables.SessionOptions; /** @@ -19,6 +20,7 @@ */ public class ScannerChunk { + protected ResultContext context; protected SessionOptions options; protected ConcurrentLinkedQueue ranges; protected Range lastRange; @@ -28,13 +30,20 @@ public class ScannerChunk { /* * Constructor used for testing + * + * @param options + * + * @param ranges + * + * @param context */ - public ScannerChunk(SessionOptions options, Collection ranges) { - this(options, ranges, "localhost"); + public ScannerChunk(SessionOptions options, Collection ranges, ResultContext context) { + this(options, ranges, context, "localhost"); } - public ScannerChunk(SessionOptions options, Collection ranges, String server) { + public ScannerChunk(SessionOptions options, Collection ranges, ResultContext context, String server) { Preconditions.checkNotNull(ranges); + this.context = context; this.options = options; this.ranges = new ConcurrentLinkedQueue<>(); this.lastKnownLocation = server; @@ -55,7 +64,8 @@ public ScannerChunk(SessionOptions options, Collection ranges, String ser */ public ScannerChunk(ScannerChunk chunk) { Preconditions.checkNotNull(chunk); - options = chunk.options; + this.options = chunk.options; + this.context = chunk.context; this.ranges = new ConcurrentLinkedQueue<>(); setRanges(chunk.ranges); this.lastKnownLocation = chunk.lastKnownLocation; @@ -87,6 +97,10 @@ public int hashCode() { } + public ResultContext getContext() { + return context; + } + public Range getLastRange() { return lastRange; } diff --git a/warehouse/query-core/src/main/java/datawave/query/tables/async/SpeculativeScan.java b/warehouse/query-core/src/main/java/datawave/query/tables/async/SpeculativeScan.java index 4e3720576e8..69ddd40b23d 100644 --- a/warehouse/query-core/src/main/java/datawave/query/tables/async/SpeculativeScan.java +++ b/warehouse/query-core/src/main/java/datawave/query/tables/async/SpeculativeScan.java @@ -2,7 +2,6 @@ import java.lang.Thread.UncaughtExceptionHandler; import java.util.List; -import java.util.Map.Entry; import java.util.Set; import java.util.concurrent.ArrayBlockingQueue; import java.util.concurrent.ExecutorService; @@ -17,7 +16,6 @@ import org.apache.accumulo.core.data.Key; import org.apache.accumulo.core.data.Range; -import org.apache.accumulo.core.data.Value; import org.apache.accumulo.core.security.Authorizations; import org.apache.log4j.Logger; @@ -28,6 +26,7 @@ import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.MoreExecutors; +import datawave.core.query.configuration.Result; import datawave.query.tables.AccumuloResource; import datawave.query.tables.ResourceQueue; import datawave.query.tables.stats.ScanSessionStats; @@ -51,8 +50,7 @@ public class SpeculativeScan extends Scan implements FutureCallback, Uncau protected ExecutorService service = null; - protected LinkedBlockingDeque> myResultQueue; - + protected LinkedBlockingDeque myResultQueue; protected ReentrantLock writeControl = new ReentrantLock(); protected Throwable failure = null; @@ -82,8 +80,7 @@ public Thread newThread(Runnable r) { } public SpeculativeScan(String localTableName, Set localAuths, ScannerChunk chunk, ResourceQueue delegatorReference, - Class delegatedResourceInitializer, ArrayBlockingQueue> results, - ExecutorService callingService) { + Class delegatedResourceInitializer, ArrayBlockingQueue results, ExecutorService callingService) { super(localTableName, localAuths, chunk, delegatorReference, delegatedResourceInitializer, results, callingService); scans = Lists.newArrayList(); scanFutures = Lists.newArrayList(); @@ -231,7 +228,7 @@ public void onFailure(Throwable t) { } - public LinkedBlockingDeque> getQueue() { + public LinkedBlockingDeque getQueue() { return myResultQueue; } diff --git a/warehouse/query-core/src/main/java/datawave/query/tables/async/event/VisitorFunction.java b/warehouse/query-core/src/main/java/datawave/query/tables/async/event/VisitorFunction.java index 51b4c7484d6..43f5b58b734 100644 --- a/warehouse/query-core/src/main/java/datawave/query/tables/async/event/VisitorFunction.java +++ b/warehouse/query-core/src/main/java/datawave/query/tables/async/event/VisitorFunction.java @@ -39,6 +39,7 @@ import com.google.common.collect.Sets; import datawave.core.iterators.filesystem.FileSystemCache; +import datawave.microservice.query.Query; import datawave.query.config.ShardQueryConfiguration; import datawave.query.exceptions.DatawaveFatalQueryException; import datawave.query.exceptions.InvalidQueryException; @@ -64,7 +65,6 @@ import datawave.query.util.TypeMetadata; import datawave.util.StringUtils; import datawave.util.time.DateHelper; -import datawave.webservice.query.Query; import datawave.webservice.query.exception.BadRequestQueryException; import datawave.webservice.query.exception.DatawaveErrorCode; import datawave.webservice.query.exception.PreConditionFailedQueryException; @@ -159,7 +159,7 @@ public ScannerChunk apply(@Nullable ScannerChunk input) { SessionOptions options = input.getOptions(); - ScannerChunk newSettings = new ScannerChunk(null, input.getRanges(), input.getLastKnownLocation()); + ScannerChunk newSettings = new ScannerChunk(null, input.getRanges(), input.getContext(), input.getLastKnownLocation()); SessionOptions newOptions = new SessionOptions(options); @@ -624,7 +624,7 @@ protected ASTJexlScript pushdownLargeFieldedLists(ShardQueryConfiguration config } protected URI getFstHdfsQueryCacheUri(ShardQueryConfiguration config, Query settings) { - if (config.getIvaratorFstHdfsBaseURIs() != null) { + if (config.getIvaratorFstHdfsBaseURIs() != null && !config.getIvaratorFstHdfsBaseURIs().isEmpty()) { String[] choices = StringUtils.split(config.getIvaratorFstHdfsBaseURIs(), ','); int index = random.nextInt(choices.length); Path path = new Path(choices[index], settings.getId().toString()); diff --git a/warehouse/query-core/src/main/java/datawave/query/tables/chained/ChainedQueryConfiguration.java b/warehouse/query-core/src/main/java/datawave/query/tables/chained/ChainedQueryConfiguration.java index 9ed8bdb7a94..0fe9dd84f11 100644 --- a/warehouse/query-core/src/main/java/datawave/query/tables/chained/ChainedQueryConfiguration.java +++ b/warehouse/query-core/src/main/java/datawave/query/tables/chained/ChainedQueryConfiguration.java @@ -1,18 +1,19 @@ package datawave.query.tables.chained; -import datawave.webservice.query.Query; -import datawave.webservice.query.configuration.GenericQueryConfiguration; +import datawave.core.query.configuration.GenericQueryConfiguration; +import datawave.microservice.query.Query; public class ChainedQueryConfiguration extends GenericQueryConfiguration { private static final long serialVersionUID = 444695916607959066L; - private Query query = null; + // for backward compatability public void setQueryImpl(Query query) { - this.query = query; + setQuery(query); } + // for backward capatability public Query getQueryImpl() { - return this.query; + return getQuery(); } } diff --git a/warehouse/query-core/src/main/java/datawave/query/tables/chained/ChainedQueryTable.java b/warehouse/query-core/src/main/java/datawave/query/tables/chained/ChainedQueryTable.java index ff6ffb16084..4e88c9e4f73 100644 --- a/warehouse/query-core/src/main/java/datawave/query/tables/chained/ChainedQueryTable.java +++ b/warehouse/query-core/src/main/java/datawave/query/tables/chained/ChainedQueryTable.java @@ -7,12 +7,12 @@ import org.apache.accumulo.core.security.Authorizations; import org.apache.log4j.Logger; +import datawave.core.common.connection.AccumuloConnectionFactory; +import datawave.core.query.configuration.GenericQueryConfiguration; +import datawave.core.query.logic.BaseQueryLogic; +import datawave.core.query.logic.QueryLogic; +import datawave.microservice.query.Query; import datawave.query.tables.chained.strategy.ChainStrategy; -import datawave.webservice.common.connection.AccumuloConnectionFactory; -import datawave.webservice.query.Query; -import datawave.webservice.query.configuration.GenericQueryConfiguration; -import datawave.webservice.query.logic.BaseQueryLogic; -import datawave.webservice.query.logic.QueryLogic; /** *

    diff --git a/warehouse/query-core/src/main/java/datawave/query/tables/chained/iterators/ChainedQueryIterator.java b/warehouse/query-core/src/main/java/datawave/query/tables/chained/iterators/ChainedQueryIterator.java index 2c30afdbbde..1ecc7335e03 100644 --- a/warehouse/query-core/src/main/java/datawave/query/tables/chained/iterators/ChainedQueryIterator.java +++ b/warehouse/query-core/src/main/java/datawave/query/tables/chained/iterators/ChainedQueryIterator.java @@ -6,8 +6,8 @@ import org.apache.accumulo.core.client.AccumuloClient; import org.apache.accumulo.core.security.Authorizations; -import datawave.webservice.query.Query; -import datawave.webservice.query.logic.QueryLogic; +import datawave.core.query.logic.QueryLogic; +import datawave.microservice.query.Query; /** * Abstract class that encompasses the members necessary to run a ChainedQuery. Any implementation should need two {@link QueryLogic}'s, the original query the diff --git a/warehouse/query-core/src/main/java/datawave/query/tables/chained/strategy/ChainStrategy.java b/warehouse/query-core/src/main/java/datawave/query/tables/chained/strategy/ChainStrategy.java index 0db66bf3c7c..235e83b9878 100644 --- a/warehouse/query-core/src/main/java/datawave/query/tables/chained/strategy/ChainStrategy.java +++ b/warehouse/query-core/src/main/java/datawave/query/tables/chained/strategy/ChainStrategy.java @@ -6,8 +6,8 @@ import org.apache.accumulo.core.client.AccumuloClient; import org.apache.accumulo.core.security.Authorizations; -import datawave.webservice.query.Query; -import datawave.webservice.query.logic.QueryLogic; +import datawave.core.query.logic.QueryLogic; +import datawave.microservice.query.Query; /** * The approach to take when converting query results into another query diff --git a/warehouse/query-core/src/main/java/datawave/query/tables/chained/strategy/FullChainStrategy.java b/warehouse/query-core/src/main/java/datawave/query/tables/chained/strategy/FullChainStrategy.java index 2f552936166..a112199d16b 100644 --- a/warehouse/query-core/src/main/java/datawave/query/tables/chained/strategy/FullChainStrategy.java +++ b/warehouse/query-core/src/main/java/datawave/query/tables/chained/strategy/FullChainStrategy.java @@ -7,9 +7,9 @@ import org.apache.accumulo.core.security.Authorizations; import org.apache.log4j.Logger; -import datawave.webservice.query.Query; -import datawave.webservice.query.configuration.GenericQueryConfiguration; -import datawave.webservice.query.logic.QueryLogic; +import datawave.core.query.configuration.GenericQueryConfiguration; +import datawave.core.query.logic.QueryLogic; +import datawave.microservice.query.Query; /** * Defines the logic to collect all of the results from the former query logic and issue one query against the latter query logic @@ -17,9 +17,9 @@ * * * @param - * Type of former {@link datawave.webservice.query.logic.QueryLogic} + * Type of former {@link QueryLogic} * @param - * Type of latter {@link datawave.webservice.query.logic.QueryLogic} + * Type of latter {@link QueryLogic} */ public abstract class FullChainStrategy implements ChainStrategy { protected final Logger log = Logger.getLogger(FullChainStrategy.class); diff --git a/warehouse/query-core/src/main/java/datawave/query/tables/chunk/Chunker.java b/warehouse/query-core/src/main/java/datawave/query/tables/chunk/Chunker.java index cd2402983df..01d6e20fb0c 100644 --- a/warehouse/query-core/src/main/java/datawave/query/tables/chunk/Chunker.java +++ b/warehouse/query-core/src/main/java/datawave/query/tables/chunk/Chunker.java @@ -2,8 +2,8 @@ import java.util.Iterator; -import datawave.webservice.query.Query; -import datawave.webservice.query.configuration.GenericQueryConfiguration; +import datawave.core.query.configuration.GenericQueryConfiguration; +import datawave.microservice.query.Query; /** * A java.util.Iterator interface for splitting a query into smaller chunks to be executed as separate queries. diff --git a/warehouse/query-core/src/main/java/datawave/query/tables/content/ContentQueryCheckpoint.java b/warehouse/query-core/src/main/java/datawave/query/tables/content/ContentQueryCheckpoint.java new file mode 100644 index 00000000000..9adf4e85c44 --- /dev/null +++ b/warehouse/query-core/src/main/java/datawave/query/tables/content/ContentQueryCheckpoint.java @@ -0,0 +1,71 @@ +package datawave.query.tables.content; + +import java.io.IOException; +import java.io.Serializable; +import java.util.ArrayList; +import java.util.Collection; + +import org.apache.accumulo.core.data.Range; +import org.apache.commons.lang3.builder.EqualsBuilder; +import org.apache.commons.lang3.builder.HashCodeBuilder; + +import datawave.core.query.logic.QueryCheckpoint; +import datawave.core.query.logic.QueryKey; + +public class ContentQueryCheckpoint extends QueryCheckpoint implements Serializable { + + private transient Collection ranges; + + public ContentQueryCheckpoint(QueryKey queryKey, Collection ranges) { + super(queryKey, null); + this.ranges = ranges; + } + + public Collection getRanges() { + return ranges; + } + + @Override + public String toString() { + return getQueryKey() + ": " + getRanges(); + } + + @Override + public boolean equals(Object o) { + if (this == o) + return true; + + if (!(o instanceof ContentQueryCheckpoint)) + return false; + + ContentQueryCheckpoint that = (ContentQueryCheckpoint) o; + + return new EqualsBuilder().appendSuper(super.equals(o)).append(ranges, that.ranges).isEquals(); + } + + @Override + public int hashCode() { + return new HashCodeBuilder(17, 37).appendSuper(super.hashCode()).append(ranges).toHashCode(); + } + + private void writeObject(java.io.ObjectOutputStream out) throws IOException { + out.defaultWriteObject(); + out.writeInt(ranges != null ? ranges.size() : 0); + for (Range range : ranges) { + range.write(out); + } + } + + private void readObject(java.io.ObjectInputStream in) throws IOException, ClassNotFoundException { + in.defaultReadObject(); + int numRanges = in.readInt(); + if (numRanges > 0) { + ranges = new ArrayList<>(); + while (numRanges-- > 0) { + Range range = new Range(); + range.readFields(in); + ranges.add(range); + } + } + } +} diff --git a/warehouse/query-core/src/main/java/datawave/query/tables/content/ContentQueryTable.java b/warehouse/query-core/src/main/java/datawave/query/tables/content/ContentQueryLogic.java similarity index 75% rename from warehouse/query-core/src/main/java/datawave/query/tables/content/ContentQueryTable.java rename to warehouse/query-core/src/main/java/datawave/query/tables/content/ContentQueryLogic.java index 2d6864dcbd7..797e240d061 100644 --- a/warehouse/query-core/src/main/java/datawave/query/tables/content/ContentQueryTable.java +++ b/warehouse/query-core/src/main/java/datawave/query/tables/content/ContentQueryLogic.java @@ -2,6 +2,7 @@ import java.util.Collection; import java.util.Collections; +import java.util.List; import java.util.Map.Entry; import java.util.Set; import java.util.TreeSet; @@ -19,19 +20,24 @@ import org.apache.commons.lang.StringUtils; import org.apache.log4j.Logger; +import com.google.common.collect.Lists; + +import datawave.core.common.connection.AccumuloConnectionFactory; +import datawave.core.query.configuration.GenericQueryConfiguration; +import datawave.core.query.logic.BaseQueryLogic; +import datawave.core.query.logic.CheckpointableQueryLogic; +import datawave.core.query.logic.QueryCheckpoint; +import datawave.core.query.logic.QueryKey; +import datawave.core.query.logic.QueryLogicTransformer; import datawave.ingest.mapreduce.handler.ExtendedDataTypeHandler; +import datawave.microservice.query.Query; +import datawave.microservice.query.QueryImpl.Parameter; import datawave.query.Constants; import datawave.query.QueryParameters; import datawave.query.config.ContentQueryConfiguration; import datawave.query.tables.ScannerFactory; import datawave.query.transformer.ContentQueryTransformer; -import datawave.webservice.common.connection.AccumuloConnectionFactory; -import datawave.webservice.query.Query; -import datawave.webservice.query.QueryImpl.Parameter; -import datawave.webservice.query.configuration.GenericQueryConfiguration; import datawave.webservice.query.exception.QueryException; -import datawave.webservice.query.logic.BaseQueryLogic; -import datawave.webservice.query.logic.QueryLogicTransformer; /** * This query table implementation returns a QueryResults object that contains documents from the Shard table. The query will contain the shard id, datatype, @@ -47,9 +53,9 @@ * The optional parameter content.view.name can be used to retrieve an alternate view of the document, assuming one is stored with that name. The optional * parameter content.view.all can be used to retrieve all documents for the parent and children Both optional parameters can be used together */ -public class ContentQueryTable extends BaseQueryLogic> { +public class ContentQueryLogic extends BaseQueryLogic> implements CheckpointableQueryLogic { - private static final Logger log = Logger.getLogger(ContentQueryTable.class); + private static final Logger log = Logger.getLogger(ContentQueryLogic.class); private static final String PARENT_ONLY = "\1"; private static final String ALL = "\u10FFFF"; @@ -58,15 +64,17 @@ public class ContentQueryTable extends BaseQueryLogic> { ScannerFactory scannerFactory; String viewName = null; - public ContentQueryTable() { + private ContentQueryConfiguration config; + + public ContentQueryLogic() { super(); } - public ContentQueryTable(final ContentQueryTable contentQueryTable) { - super(contentQueryTable); - this.queryThreads = contentQueryTable.queryThreads; - this.scannerFactory = contentQueryTable.scannerFactory; - this.viewName = contentQueryTable.viewName; + public ContentQueryLogic(final ContentQueryLogic contentQueryLogic) { + super(contentQueryLogic); + this.queryThreads = contentQueryLogic.queryThreads; + this.scannerFactory = contentQueryLogic.scannerFactory; + this.viewName = contentQueryLogic.viewName; } /** @@ -93,7 +101,7 @@ public void close() { @Override public GenericQueryConfiguration initialize(final AccumuloClient client, final Query settings, final Set auths) throws Exception { // Initialize the config and scanner factory - final ContentQueryConfiguration config = new ContentQueryConfiguration(this, settings); + config = new ContentQueryConfiguration(this, settings); this.scannerFactory = new ScannerFactory(client); config.setClient(client); config.setAuthorizations(auths); @@ -244,7 +252,7 @@ public QueryLogicTransformer getTransformer(Query settings) { @Override public Object clone() throws CloneNotSupportedException { - return new ContentQueryTable(this); + return new ContentQueryLogic(this); } public int getQueryThreads() { @@ -268,4 +276,61 @@ public Set getRequiredQueryParameters() { public Set getExampleQueries() { return Collections.emptySet(); } + + @Override + public ContentQueryConfiguration getConfig() { + if (this.config == null) { + this.config = ContentQueryConfiguration.create(); + } + + return this.config; + } + + @Override + public boolean isCheckpointable() { + return getConfig().isCheckpointable(); + } + + @Override + public void setCheckpointable(boolean checkpointable) { + getConfig().setCheckpointable(checkpointable); + } + + @Override + public List checkpoint(QueryKey queryKey) { + if (!isCheckpointable()) { + throw new UnsupportedOperationException("Cannot checkpoint a query that is not checkpointable. Try calling setCheckpointable(true) first."); + } + + // if we have started returning results, then capture the state of the query data objects + if (this.iterator != null) { + List checkpoints = Lists.newLinkedList(); + for (Range range : ((ContentQueryConfiguration) getConfig()).getRanges()) { + checkpoints.add(new ContentQueryCheckpoint(queryKey, Collections.singletonList(range))); + } + return checkpoints; + } + // otherwise we still need to plan or there are no results + else { + return Lists.newArrayList(new QueryCheckpoint(queryKey)); + } + } + + @Override + public QueryCheckpoint updateCheckpoint(QueryCheckpoint checkpoint) { + // for the content query logic, the query data objects automatically get updated with + // the last result returned, so the checkpoint should already be updated! + return checkpoint; + } + + @Override + public void setupQuery(AccumuloClient client, GenericQueryConfiguration config, QueryCheckpoint checkpoint) throws Exception { + ContentQueryConfiguration contentQueryConfig = (ContentQueryConfiguration) config; + contentQueryConfig.setRanges(((ContentQueryCheckpoint) checkpoint).getRanges()); + contentQueryConfig.setClient(client); + + scannerFactory = new ScannerFactory(client); + + setupQuery(contentQueryConfig); + } } diff --git a/warehouse/query-core/src/main/java/datawave/query/tables/edge/DefaultEdgeEventQueryLogic.java b/warehouse/query-core/src/main/java/datawave/query/tables/edge/DefaultEdgeEventQueryLogic.java index d53b252a421..d2c76c80814 100644 --- a/warehouse/query-core/src/main/java/datawave/query/tables/edge/DefaultEdgeEventQueryLogic.java +++ b/warehouse/query-core/src/main/java/datawave/query/tables/edge/DefaultEdgeEventQueryLogic.java @@ -3,13 +3,16 @@ import java.util.HashSet; import java.util.Set; -import javax.inject.Inject; - import org.apache.accumulo.core.client.AccumuloClient; import org.apache.accumulo.core.security.Authorizations; import org.apache.commons.jexl3.parser.ASTJexlScript; import org.apache.log4j.Logger; +import datawave.core.common.edgedictionary.EdgeDictionaryProvider; +import datawave.core.query.configuration.GenericQueryConfiguration; +import datawave.edge.model.EdgeModelFields; +import datawave.edge.model.EdgeModelFieldsFactory; +import datawave.microservice.query.Query; import datawave.query.QueryParameters; import datawave.query.jexl.JexlASTHelper; import datawave.query.jexl.visitors.JexlStringBuildingVisitor; @@ -18,9 +21,6 @@ import datawave.query.tables.ShardQueryLogic; import datawave.webservice.dictionary.edge.EdgeDictionaryBase; import datawave.webservice.dictionary.edge.MetadataBase; -import datawave.webservice.edgedictionary.RemoteEdgeDictionary; -import datawave.webservice.query.Query; -import datawave.webservice.query.configuration.GenericQueryConfiguration; /** * This Logic highjacks the Query string, and transforms it into a ShardQueryLogic query The query string is of the form: @@ -37,8 +37,9 @@ public class DefaultEdgeEventQueryLogic extends ShardQueryLogic { protected EdgeDictionaryBase> dict; - @Inject - protected RemoteEdgeDictionary remoteEdgeDictionary; + protected EdgeDictionaryProvider edgeDictionaryProvider; + + protected EdgeModelFields edgeFields; public DefaultEdgeEventQueryLogic() {} @@ -50,7 +51,7 @@ public DefaultEdgeEventQueryLogic(DefaultEdgeEventQueryLogic other) { this.dict = other.dict; this.edgeModelName = other.edgeModelName; this.edgeQueryModel = other.edgeQueryModel; - this.remoteEdgeDictionary = other.remoteEdgeDictionary; + this.edgeDictionaryProvider = other.edgeDictionaryProvider; } @Override @@ -59,18 +60,18 @@ public DefaultEdgeEventQueryLogic clone() { } @SuppressWarnings("unchecked") - protected EdgeDictionaryBase> getEdgeDictionary(String queryAuths) { - return remoteEdgeDictionary.getEdgeDictionary(getMetadataTableName(), queryAuths); + protected EdgeDictionaryBase> getEdgeDictionary(Query settings) { + return edgeDictionaryProvider.getEdgeDictionary(settings, getMetadataTableName()); } protected DefaultEventQueryBuilder getEventQueryBuilder() { - return new DefaultEventQueryBuilder(dict); + return new DefaultEventQueryBuilder(dict, getEdgeFields()); } @Override public GenericQueryConfiguration initialize(AccumuloClient client, Query settings, Set auths) throws Exception { - setEdgeDictionary(getEdgeDictionary(settings.getQueryAuthorizations())); // TODO grab threads from somewhere + setEdgeDictionary(getEdgeDictionary(settings)); // TODO grab threads from somewhere // Load and apply the configured edge query model loadEdgeQueryModel(client, auths); @@ -102,7 +103,7 @@ protected void loadEdgeQueryModel(AccumuloClient client, Set aut if (null == getEdgeQueryModel() && (!model.isEmpty() && !modelTable.isEmpty())) { try { setEdgeQueryModel(new EdgeQueryModel(getMetadataHelperFactory().createMetadataHelper(client, getConfig().getMetadataTableName(), auths) - .getQueryModel(getConfig().getModelTableName(), getConfig().getModelName()))); + .getQueryModel(getConfig().getModelTableName(), getConfig().getModelName()), getEdgeFields())); } catch (Throwable t) { log.error("Unable to load edgeQueryModel from metadata table", t); } @@ -156,4 +157,23 @@ protected String getEventQuery(Query settings) throws Exception { return getEventQueryBuilder().getEventQuery(getJexlQueryString(settings)); } + public EdgeDictionaryProvider getEdgeDictionaryProvider() { + return edgeDictionaryProvider; + } + + public void setEdgeDictionaryProvider(EdgeDictionaryProvider edgeDictionaryProvider) { + this.edgeDictionaryProvider = edgeDictionaryProvider; + } + + public void setEdgeModelFieldsFactory(EdgeModelFieldsFactory edgeModelFieldsFactory) { + this.edgeFields = edgeModelFieldsFactory.createFields(); + } + + public EdgeModelFields getEdgeFields() { + return edgeFields; + } + + public void setEdgeFields(EdgeModelFields edgeFields) { + this.edgeFields = edgeFields; + } } diff --git a/warehouse/query-core/src/main/java/datawave/query/tables/edge/DefaultEventQueryBuilder.java b/warehouse/query-core/src/main/java/datawave/query/tables/edge/DefaultEventQueryBuilder.java index 36074a21923..7aaecf6b8bd 100644 --- a/warehouse/query-core/src/main/java/datawave/query/tables/edge/DefaultEventQueryBuilder.java +++ b/warehouse/query-core/src/main/java/datawave/query/tables/edge/DefaultEventQueryBuilder.java @@ -11,8 +11,8 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import datawave.edge.model.EdgeModelAware; -import datawave.edge.model.EdgeModelAware.Fields.FieldKey; +import datawave.edge.model.EdgeModelFields; +import datawave.edge.model.EdgeModelFields.FieldKey; import datawave.query.jexl.JexlASTHelper; import datawave.query.jexl.visitors.TreeFlatteningRebuildingVisitor; import datawave.webservice.dictionary.edge.EdgeDictionaryBase; @@ -37,10 +37,13 @@ public class DefaultEventQueryBuilder { protected Set attribute2; protected Set attribute3; + protected final EdgeModelFields fields; + protected EdgeDictionaryBase> dict; - public DefaultEventQueryBuilder(EdgeDictionaryBase> dict) { + public DefaultEventQueryBuilder(EdgeDictionaryBase> dict, EdgeModelFields fields) { this.dict = dict; + this.fields = fields; } public String getEventQuery(String jexlQueryString) throws Exception { @@ -240,7 +243,7 @@ protected String cleanseFieldName(String fieldName) { } protected DefaultEventQueryBuilder parseAndAdd(String fieldName, String fieldValue) { - FieldKey fieldKey = FieldKey.parse(fieldName); + FieldKey fieldKey = fields.parse(fieldName); if (null != fieldKey) { switch (fieldKey) { case EDGE_SOURCE: @@ -274,7 +277,7 @@ protected DefaultEventQueryBuilder parseAndAdd(String fieldName, String fieldVal * event, fields used when searching for the definition should not be or'ed together */ public boolean orAbleField(String fieldName) { - FieldKey fieldKey = FieldKey.parse(fieldName); + FieldKey fieldKey = fields.parse(fieldName); if (null != fieldKey) { // attributes 2 and 3 on the edge should be field names in the event @@ -323,14 +326,13 @@ public void setSink(String sink) { protected void checkMandatoryFieldsSet() { String helpfullMsg = "If you believe this field has been set ensure proper placement of parenthesis to make sure the query is being evaluated in the order you would expect."; if (StringUtils.isBlank(this.sink)) { - throw new IllegalArgumentException("Mandatory Field not set: " + EdgeModelAware.Fields.getInstance().getSinkFieldName() + ". " + helpfullMsg); + throw new IllegalArgumentException("Mandatory Field not set: " + fields.getSinkFieldName() + ". " + helpfullMsg); } else if (StringUtils.isBlank(this.source)) { - throw new IllegalArgumentException("Mandatory Field not set: " + EdgeModelAware.Fields.getInstance().getSourceFieldName() + ". " + helpfullMsg); + throw new IllegalArgumentException("Mandatory Field not set: " + fields.getSourceFieldName() + ". " + helpfullMsg); } else if (StringUtils.isBlank(this.edgeType)) { - throw new IllegalArgumentException("Mandatory Field not set: " + EdgeModelAware.Fields.getInstance().getTypeFieldName() + ". " + helpfullMsg); + throw new IllegalArgumentException("Mandatory Field not set: " + fields.getTypeFieldName() + ". " + helpfullMsg); } else if (StringUtils.isBlank(this.relationship)) { - throw new IllegalArgumentException( - "Mandatory Field not set: " + EdgeModelAware.Fields.getInstance().getRelationshipFieldName() + ". " + helpfullMsg); + throw new IllegalArgumentException("Mandatory Field not set: " + fields.getRelationshipFieldName() + ". " + helpfullMsg); } } diff --git a/warehouse/query-core/src/main/java/datawave/query/tables/edge/EdgeQueryLogic.java b/warehouse/query-core/src/main/java/datawave/query/tables/edge/EdgeQueryLogic.java index 00fc84a7b91..f161e326574 100644 --- a/warehouse/query-core/src/main/java/datawave/query/tables/edge/EdgeQueryLogic.java +++ b/warehouse/query-core/src/main/java/datawave/query/tables/edge/EdgeQueryLogic.java @@ -1,16 +1,18 @@ package datawave.query.tables.edge; +import static com.google.common.collect.Iterators.concat; +import static com.google.common.collect.Iterators.transform; import static datawave.query.jexl.JexlASTHelper.jexlFeatures; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.ObjectOutputStream; import java.io.StringReader; -import java.util.Collection; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.HashSet; +import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; @@ -33,6 +35,7 @@ import org.apache.commons.jexl3.parser.ParseException; import org.apache.commons.jexl3.parser.Parser; import org.apache.commons.jexl3.parser.StringProvider; +import org.apache.hadoop.io.DataInputBuffer; import org.apache.hadoop.io.Text; import org.apache.log4j.Logger; @@ -40,9 +43,20 @@ import com.google.common.collect.HashMultimap; import com.google.common.collect.Lists; +import datawave.core.common.connection.AccumuloConnectionFactory.Priority; import datawave.core.iterators.ColumnQualifierRangeIterator; import datawave.core.iterators.ColumnRangeIterator; +import datawave.core.query.configuration.GenericQueryConfiguration; +import datawave.core.query.configuration.QueryData; +import datawave.core.query.logic.BaseQueryLogic; +import datawave.core.query.logic.CheckpointableQueryLogic; +import datawave.core.query.logic.QueryCheckpoint; +import datawave.core.query.logic.QueryKey; +import datawave.core.query.logic.QueryLogicTransformer; import datawave.data.type.Type; +import datawave.edge.model.EdgeModelFields; +import datawave.edge.model.EdgeModelFieldsFactory; +import datawave.microservice.query.Query; import datawave.query.Constants; import datawave.query.QueryParameters; import datawave.query.config.EdgeQueryConfiguration; @@ -58,61 +72,30 @@ import datawave.query.language.parser.QueryParser; import datawave.query.language.tree.QueryNode; import datawave.query.model.edge.EdgeQueryModel; +import datawave.query.scheduler.SingleRangeQueryDataIterator; import datawave.query.tables.ScannerFactory; import datawave.query.tables.edge.contexts.VisitationContext; import datawave.query.transformer.EdgeQueryTransformer; import datawave.query.util.MetadataHelper; import datawave.query.util.MetadataHelperFactory; import datawave.util.time.DateHelper; -import datawave.webservice.common.connection.AccumuloConnectionFactory.Priority; -import datawave.webservice.query.Query; -import datawave.webservice.query.configuration.GenericQueryConfiguration; -import datawave.webservice.query.configuration.QueryData; -import datawave.webservice.query.logic.BaseQueryLogic; -import datawave.webservice.query.logic.QueryLogicTransformer; - -public class EdgeQueryLogic extends BaseQueryLogic> { +public class EdgeQueryLogic extends BaseQueryLogic> implements CheckpointableQueryLogic { public static final String PRE_FILTER_DISABLE_KEYWORD = "__DISABLE_PREFILTER__"; - private static final int DEFAULT_SKIP_LIMIT = 10; - private static final long DEFAULT_SCAN_LIMIT = Long.MAX_VALUE; private static final Logger log = Logger.getLogger(EdgeQueryLogic.class); - protected boolean protobufEdgeFormat = true; protected EdgeQueryConfiguration config; - protected Map iteratorDiscriptors = new HashMap<>(); protected int currentIteratorPriority; protected ScannerFactory scannerFactory; - protected Set> blockedNormalizers = new HashSet<>(); - - protected List> dataTypes = null; - protected List> regexDataTypes = null; - - protected int queryThreads = 8; - - protected int dateFilterSkipLimit = DEFAULT_SKIP_LIMIT; - - protected long dateFilterScanLimit = DEFAULT_SCAN_LIMIT; - - private Collection ranges; - - protected HashMultimap prefilterValues = null; - - private long maxQueryTerms = 10000; - private long maxPrefilterValues = 100000; - - private String modelName = null; - private String modelTableName = null; - - private EdgeQueryModel edgeQueryModel = null; - - private VisitationContext visitationContext; + protected HashMultimap prefilterValues = null; + protected VisitationContext visitationContext; protected MetadataHelperFactory metadataHelperFactory = null; + protected EdgeModelFields edgeFields; private Map querySyntaxParsers = new HashMap<>(); protected Function queryMacroFunction; private Set mandatoryQuerySyntax = null; @@ -124,24 +107,15 @@ public EdgeQueryLogic() { public EdgeQueryLogic(EdgeQueryLogic other) { super(other); - this.protobufEdgeFormat = other.protobufEdgeFormat; - this.config = other.config; - this.iteratorDiscriptors = other.iteratorDiscriptors; + if (log.isTraceEnabled()) + log.trace("Creating Cloned ShardQueryLogic: " + System.identityHashCode(this) + " from " + System.identityHashCode(other)); + + // Set EdgeQueryConfiguration variables + this.config = EdgeQueryConfiguration.create(other); + this.currentIteratorPriority = other.currentIteratorPriority; this.scannerFactory = other.scannerFactory; - this.blockedNormalizers = other.blockedNormalizers; - this.dataTypes = other.dataTypes; - this.regexDataTypes = other.regexDataTypes; - this.queryThreads = other.queryThreads; - this.dateFilterSkipLimit = other.dateFilterSkipLimit; - this.dateFilterScanLimit = other.dateFilterScanLimit; - this.ranges = other.ranges; this.prefilterValues = other.prefilterValues; - this.maxQueryTerms = other.maxQueryTerms; - this.maxPrefilterValues = other.maxPrefilterValues; - this.modelName = other.modelName; - this.modelTableName = other.modelTableName; - this.edgeQueryModel = other.edgeQueryModel; this.visitationContext = other.visitationContext; this.metadataHelperFactory = other.metadataHelperFactory; this.querySyntaxParsers = other.querySyntaxParsers; @@ -150,29 +124,108 @@ public EdgeQueryLogic(EdgeQueryLogic other) { this.parser = other.parser; } + @Override + public EdgeQueryConfiguration getConfig() { + if (config == null) { + config = new EdgeQueryConfiguration(); + } + return config; + } + @Override public GenericQueryConfiguration initialize(AccumuloClient client, Query settings, Set auths) throws Exception { currentIteratorPriority = super.getBaseIteratorPriority() + 30; - EdgeQueryConfiguration cfg = setUpConfig(settings); + EdgeQueryConfiguration config = getConfig().parseParameters(settings); - cfg.setClient(client); - cfg.setAuthorizations(auths); + config.setClient(client); + config.setAuthorizations(auths); - String jexlQueryString = getJexlQueryString(settings); + String queryString = getJexlQueryString(settings); - if (null == jexlQueryString) { + if (null == queryString) { throw new IllegalArgumentException("Query cannot be null"); } else { - cfg.setQueryString(jexlQueryString); + config.setQueryString(queryString); } - cfg.setBeginDate(settings.getBeginDate()); - cfg.setEndDate(settings.getEndDate()); - + config.setBeginDate(settings.getBeginDate()); + config.setEndDate(settings.getEndDate()); scannerFactory = new ScannerFactory(client); - return cfg; + prefilterValues = null; + EdgeQueryConfiguration.dateType dateFilterType = config.getDateRangeType(); + + log.debug("Performing edge table query: " + config.getQueryString()); + + boolean includeStats = config.includeStats(); + + MetadataHelper metadataHelper = prepareMetadataHelper(config.getClient(), config.getMetadataTableName(), config.getAuthorizations()); + + loadQueryModel(metadataHelper, config); + + String normalizedQuery = null; + String statsNormalizedQuery = null; + + config.setQueryString(queryString = fixQueryString(queryString)); + Set ranges = configureRanges(queryString); + + VisitationContext context = null; + try { + context = normalizeJexlQuery(queryString, false); + normalizedQuery = context.getNormalizedQuery().toString(); + statsNormalizedQuery = context.getNormalizedStatsQuery().toString(); + log.debug("Jexl after normalizing SOURCE and SINK: " + normalizedQuery); + } catch (JexlException ex) { + log.error("Error parsing user query.", ex); + } + + if ((null == normalizedQuery || normalizedQuery.equals("")) && ranges.size() < 1) { + throw new IllegalStateException("Query string is empty after initial processing, no ranges or filters can be generated to execute."); + } + + QueryData qData = new QueryData(); + qData.setTableName(config.getTableName()); + qData.setRanges(ranges); + + addIterators(qData, getDateBasedIterators(config.getBeginDate(), config.getEndDate(), currentIteratorPriority, config.getDateFilterSkipLimit(), + config.getDateFilterScanLimit(), dateFilterType)); + + if (!normalizedQuery.equals("")) { + log.debug("Query being sent to the filter iterator: " + normalizedQuery); + IteratorSetting edgeIteratorSetting = new IteratorSetting(currentIteratorPriority, + EdgeFilterIterator.class.getSimpleName() + "_" + currentIteratorPriority, EdgeFilterIterator.class); + edgeIteratorSetting.addOption(EdgeFilterIterator.JEXL_OPTION, normalizedQuery); + edgeIteratorSetting.addOption(EdgeFilterIterator.PROTOBUF_OPTION, "TRUE"); + + if (!statsNormalizedQuery.equals("")) { + edgeIteratorSetting.addOption(EdgeFilterIterator.JEXL_STATS_OPTION, statsNormalizedQuery); + } + if (prefilterValues != null) { + String value = serializePrefilter(); + edgeIteratorSetting.addOption(EdgeFilterIterator.PREFILTER_ALLOWLIST, value); + } + + if (includeStats) { + edgeIteratorSetting.addOption(EdgeFilterIterator.INCLUDE_STATS_OPTION, "TRUE"); + } else { + edgeIteratorSetting.addOption(EdgeFilterIterator.INCLUDE_STATS_OPTION, "FALSE"); + } + + addIterator(qData, edgeIteratorSetting); + } + + if (context != null && context.isHasAllCompleteColumnFamilies()) { + for (Text columnFamily : context.getColumnFamilies()) { + qData.addColumnFamily(columnFamily); + } + } + + addCustomFilters(qData, currentIteratorPriority); + + config.setQueries(Collections.singletonList(qData)); + + return config; } public String getJexlQueryString(Query settings) throws datawave.query.language.parser.ParseException { @@ -253,7 +306,7 @@ protected String expandQueryMacros(String query) throws datawave.query.language. } protected EdgeQueryConfiguration setUpConfig(Query settings) { - return new EdgeQueryConfiguration(this, settings).parseParameters(settings); + return new EdgeQueryConfiguration(this).parseParameters(settings); } /** @@ -269,9 +322,9 @@ protected void loadQueryModel(MetadataHelper helper, EdgeQueryConfiguration conf String modelTable = config.getModelTableName() == null ? "" : config.getModelTableName(); if (null == getEdgeQueryModel() && (!model.isEmpty() && !modelTable.isEmpty())) { try { - setEdgeQueryModel(new EdgeQueryModel(helper.getQueryModel(config.getModelTableName(), config.getModelName()))); + setEdgeQueryModel(new EdgeQueryModel(helper.getQueryModel(config.getModelTableName(), config.getModelName()), getEdgeFields())); } catch (Throwable t) { - log.error("Unable to load edgeQueryModel from metadata table", t); + log.error("Unable to load edgeQueryModel from model table", t); } } } @@ -316,25 +369,6 @@ protected String applyQueryModel(String queryString) { } } - /** - * Are we querying the protobuf edge format - * - * @return true if querying the protobuf edge format - */ - public boolean isProtobufEdgeFormat() { - return protobufEdgeFormat; - } - - /** - * Set whether we are querying the protobuf edge format. Default is true. - * - * @param protobufedge - * the flag for the protobuf edge format - */ - public void setProtobufEdgeFormat(boolean protobufedge) { - this.protobufEdgeFormat = protobufedge; - } - /** * Parses JEXL in query string to create ranges and column family filters * @@ -344,7 +378,7 @@ public void setProtobufEdgeFormat(boolean protobufedge) { * @throws ParseException * for issues with parsing */ - protected QueryData configureRanges(String queryString) throws ParseException { + protected Set configureRanges(String queryString) throws ParseException { queryString = EdgeQueryLogic.fixQueryString(queryString); Parser parser = new Parser(new StringProvider(";")); ASTJexlScript script; @@ -356,12 +390,11 @@ protected QueryData configureRanges(String queryString) throws ParseException { script = TreeFlatteningRebuildingVisitor.flatten(script); - EdgeTableRangeBuildingVisitor visitor = new EdgeTableRangeBuildingVisitor(config.includeStats(), dataTypes, config.getMaxQueryTerms(), regexDataTypes); - + EdgeTableRangeBuildingVisitor visitor = new EdgeTableRangeBuildingVisitor(getConfig().includeStats(), getConfig().getDataTypes(), + getConfig().getMaxQueryTerms(), getConfig().getRegexDataTypes(), getEdgeFields()); visitationContext = (VisitationContext) script.jjtAccept(visitor, null); - Set ranges = visitationContext.getRanges(); - return new QueryData().withRanges(ranges); + return visitationContext.getRanges(); } /** @@ -406,18 +439,18 @@ protected VisitationContext normalizeJexlQuery(String query, boolean getFullNorm pruneAndSetPreFilterValues(visitationContext.getPreFilterValues()); long termCount = visitationContext.getTermCount(); - if (termCount > config.getMaxQueryTerms()) { - throw new IllegalArgumentException("Edge query max terms limit (" + config.getMaxQueryTerms() + ") exceeded: " + termCount + "."); + if (termCount > getConfig().getMaxQueryTerms()) { + throw new IllegalArgumentException("Edge query max terms limit (" + getConfig().getMaxQueryTerms() + ") exceeded: " + termCount + "."); } return visitationContext; } - void pruneAndSetPreFilterValues(HashMultimap prefilters) { - HashMultimap newMap = HashMultimap.create(); + void pruneAndSetPreFilterValues(HashMultimap prefilters) { + HashMultimap newMap = HashMultimap.create(); long count = 0; - for (String field : prefilters.keySet()) { + for (EdgeModelFields.FieldKey field : prefilters.keySet()) { Set values = prefilters.get(field); if (values == null) { continue; @@ -431,7 +464,7 @@ void pruneAndSetPreFilterValues(HashMultimap prefilters) { newMap.putAll(field, values); count++; } - if (count <= config.getMaxPrefilterValues()) { + if (count <= getConfig().getMaxPrefilterValues()) { if (count > 0) { prefilterValues = newMap; } @@ -441,7 +474,6 @@ void pruneAndSetPreFilterValues(HashMultimap prefilters) { } protected void addIterator(QueryData qData, IteratorSetting iter) { - iteratorDiscriptors.put(currentIteratorPriority, iter); qData.addIterator(iter); currentIteratorPriority++; } @@ -477,7 +509,8 @@ public static String fixQueryString(String original) { * @return created iterator (or null if no iterator needed, i.e. dates not specified) */ public static IteratorSetting getDateFilter(Date beginDate, Date endDate, int priority, EdgeQueryConfiguration.dateType dateFilterType) { - return getDateFilter(beginDate, endDate, priority, DEFAULT_SKIP_LIMIT, DEFAULT_SCAN_LIMIT, dateFilterType); + return getDateFilter(beginDate, endDate, priority, EdgeQueryConfiguration.DEFAULT_SKIP_LIMIT, EdgeQueryConfiguration.DEFAULT_SCAN_LIMIT, + dateFilterType); } /** @@ -649,91 +682,94 @@ protected String serializePrefilter() { @Override public void setupQuery(GenericQueryConfiguration configuration) throws Exception { config = (EdgeQueryConfiguration) configuration; - prefilterValues = null; - EdgeQueryConfiguration.dateType dateFilterType = ((EdgeQueryConfiguration) configuration).getDateRangeType(); - log.debug("Performing edge table query: " + config.getQueryString()); - - boolean includeStats = ((EdgeQueryConfiguration) configuration).includeStats(); + log.debug("Configuring connections: tableName: " + getConfig().getTableName() + ", auths: " + getConfig().getAuthorizations()); - String queryString = config.getQueryString(); - String normalizedQuery = null; - String statsNormalizedQuery = null; - - queryString = fixQueryString(queryString); - QueryData qData = configureRanges(queryString); - setRanges(qData.getRanges()); - - VisitationContext context = null; - try { - context = normalizeJexlQuery(queryString, false); - normalizedQuery = context.getNormalizedQuery().toString(); - statsNormalizedQuery = context.getNormalizedStatsQuery().toString(); - log.debug("Jexl after normalizing SOURCE and SINK: " + normalizedQuery); - } catch (JexlException ex) { - log.error("Error parsing user query.", ex); - } + final List>> iterators = Lists.newArrayList(); - if ((null == normalizedQuery || normalizedQuery.equals("")) && qData.getRanges().size() < 1) { - throw new IllegalStateException("Query string is empty after initial processing, no ranges or filters can be generated to execute."); - } + for (QueryData qd : config.getQueries()) { + // scan the table + BatchScanner bs = createBatchScanner(config); - addIterators(qData, getDateBasedIterators(config.getBeginDate(), config.getEndDate(), currentIteratorPriority, dateFilterSkipLimit, dateFilterScanLimit, - dateFilterType)); + log.debug("Using the following ranges: " + qd.getRanges()); - if (!normalizedQuery.equals("")) { - log.debug("Query being sent to the filter iterator: " + normalizedQuery); - IteratorSetting edgeIteratorSetting = new IteratorSetting(currentIteratorPriority, - EdgeFilterIterator.class.getSimpleName() + "_" + currentIteratorPriority, EdgeFilterIterator.class); - edgeIteratorSetting.addOption(EdgeFilterIterator.JEXL_OPTION, normalizedQuery); - edgeIteratorSetting.addOption(EdgeFilterIterator.PROTOBUF_OPTION, "TRUE"); - - if (!statsNormalizedQuery.equals("")) { - edgeIteratorSetting.addOption(EdgeFilterIterator.JEXL_STATS_OPTION, statsNormalizedQuery); - } - if (prefilterValues != null) { - String value = serializePrefilter(); - edgeIteratorSetting.addOption(EdgeFilterIterator.PREFILTER_ALLOWLIST, value); + bs.setRanges(qd.getRanges()); + for (IteratorSetting setting : qd.getSettings()) { + bs.addScanIterator(setting); } - if (includeStats) { - edgeIteratorSetting.addOption(EdgeFilterIterator.INCLUDE_STATS_OPTION, "TRUE"); - } else { - edgeIteratorSetting.addOption(EdgeFilterIterator.INCLUDE_STATS_OPTION, "FALSE"); + for (String cf : qd.getColumnFamilies()) { + bs.fetchColumnFamily(new Text(cf)); } - addIterator(qData, edgeIteratorSetting); + iterators.add(transformScanner(bs, qd)); } - log.debug("Configuring connection: tableName: " + config.getTableName() + ", auths: " + config.getAuthorizations()); + this.iterator = concat(iterators.iterator()); + } + + /** + * Takes in a batch scanner and returns an iterator over the DiscoveredThing objects contained in the value. + * + * @param scanner + * @return + */ + public static Iterator> transformScanner(final BatchScanner scanner, final QueryData queryData) { + return transform(scanner.iterator(), new Function,Entry>() { + DataInputBuffer in = new DataInputBuffer(); + + @Override + public Entry apply(Entry from) { + queryData.setLastResult(from.getKey()); + return from; + } + }); + } - BatchScanner scanner = createBatchScanner(config); + @Override + public void setupQuery(AccumuloClient client, GenericQueryConfiguration baseConfig, QueryCheckpoint checkpoint) throws Exception { + EdgeQueryConfiguration config = (EdgeQueryConfiguration) baseConfig; + baseConfig.setQueries(checkpoint.getQueries()); + config.setClient(client); - log.debug("Using the following ranges: " + qData.getRanges()); + scannerFactory = new ScannerFactory(client); - if (context != null && context.isHasAllCompleteColumnFamilies()) { - for (Text columnFamily : context.getColumnFamilies()) { - scanner.fetchColumnFamily(columnFamily); - } + setupQuery(config); + } + @Override + public List checkpoint(QueryKey queryKey) { + if (!isCheckpointable()) { + throw new UnsupportedOperationException("Cannot checkpoint a query that is not checkpointable. Try calling setCheckpointable(true) first."); } - scanner.setRanges(qData.getRanges()); - - addCustomFilters(qData, currentIteratorPriority); + // if we have started returning results, then capture the state of the query data objects + if (this.iterator != null) { + List checkpoints = Lists.newLinkedList(); + for (SingleRangeQueryDataIterator it = new SingleRangeQueryDataIterator(getConfig().getQueries().iterator()); it.hasNext();) { + QueryData qd = it.next(); - for (IteratorSetting setting : qData.getSettings()) { - scanner.addScanIterator(setting); + checkpoints.add(new QueryCheckpoint(queryKey, Collections.singletonList(qd))); + } + return checkpoints; + } + // otherwise we still need to plan or there are no results + else { + return Lists.newArrayList(new QueryCheckpoint(queryKey)); } + } - this.scanner = scanner; - iterator = scanner.iterator(); + @Override + public QueryCheckpoint updateCheckpoint(QueryCheckpoint checkpoint) { + // for the edge query logic, the query data objects automatically get update with + // the last result returned, so the checkpoint should already be updated! + return checkpoint; } protected BatchScanner createBatchScanner(GenericQueryConfiguration config) { EdgeQueryConfiguration conf = (EdgeQueryConfiguration) config; try { - return scannerFactory.newScanner(config.getTableName(), config.getAuthorizations(), conf.getNumQueryThreads(), conf.getQuery()); + return scannerFactory.newScanner(config.getTableName(), config.getAuthorizations(), conf.getQueryThreads(), conf.getQuery()); } catch (TableNotFoundException e) { throw new IllegalStateException(e); } @@ -759,7 +795,7 @@ public void close() { * @param priority * the priority for the first of iterator filters */ - protected void addCustomFilters(QueryData data, int priority) {} + protected void addCustomFilters(QueryData data, int priority) throws Exception {} @Override public Priority getConnectionPriority() { @@ -773,87 +809,71 @@ public EdgeQueryLogic clone() { @Override public QueryLogicTransformer getTransformer(Query settings) { - return new EdgeQueryTransformer(settings, this.markingFunctions, this.responseObjectFactory); + return new EdgeQueryTransformer(settings, this.markingFunctions, this.responseObjectFactory, this.getEdgeFields()); } public List> getDataTypes() { - return dataTypes; + return getConfig().getDataTypes(); } public void setDataTypes(List> dataTypes) { - this.dataTypes = dataTypes; + getConfig().setDataTypes((dataTypes)); } public List> getRegexDataTypes() { - return regexDataTypes; + return getConfig().getRegexDataTypes(); } public void setRegexDataTypes(List> regexDataTypes) { - this.regexDataTypes = regexDataTypes; + getConfig().setRegexDataTypes(regexDataTypes); } public int getQueryThreads() { - return queryThreads; + return getConfig().getQueryThreads(); } public void setQueryThreads(int queryThreads) { - this.queryThreads = queryThreads; + getConfig().setQueryThreads(queryThreads); } @Override public Set getOptionalQueryParameters() { Set optionalParams = new TreeSet<>(); - optionalParams.add(datawave.webservice.query.QueryParameters.QUERY_BEGIN); - optionalParams.add(datawave.webservice.query.QueryParameters.QUERY_END); + optionalParams.add(datawave.microservice.query.QueryParameters.QUERY_BEGIN); + optionalParams.add(datawave.microservice.query.QueryParameters.QUERY_END); optionalParams.add(QueryParameters.DATATYPE_FILTER_SET); optionalParams.add(EdgeQueryConfiguration.INCLUDE_STATS); optionalParams.add(EdgeQueryConfiguration.DATE_RANGE_TYPE); - optionalParams.add(datawave.webservice.query.QueryParameters.QUERY_PAGETIMEOUT); - optionalParams.add(datawave.webservice.query.QueryParameters.QUERY_EXPIRATION); - optionalParams.add(datawave.webservice.query.QueryParameters.QUERY_MAX_RESULTS_OVERRIDE); + optionalParams.add(datawave.microservice.query.QueryParameters.QUERY_PAGETIMEOUT); + optionalParams.add(datawave.microservice.query.QueryParameters.QUERY_EXPIRATION); + optionalParams.add(datawave.microservice.query.QueryParameters.QUERY_MAX_RESULTS_OVERRIDE); return optionalParams; } - public Set> getBlockedNormalizers() { - return blockedNormalizers; - } - - public void setBlockedNormalizers(Set> blockedNormalizers) { - this.blockedNormalizers = blockedNormalizers; - } - - public Collection getRanges() { - return ranges; - } - - public void setRanges(Collection ranges) { - this.ranges = ranges; - } - public long getMaxQueryTerms() { - return maxQueryTerms; + return getConfig().getMaxQueryTerms(); } public void setMaxQueryTerms(long maxQueryTerms) { - this.maxQueryTerms = maxQueryTerms; + getConfig().setMaxQueryTerms(maxQueryTerms); } public long getMaxPrefilterValues() { - return maxPrefilterValues; + return getConfig().getMaxPrefilterValues(); } public void setMaxPrefilterValues(long maxPrefilterValues) { - this.maxPrefilterValues = maxPrefilterValues; + getConfig().setMaxPrefilterValues(maxPrefilterValues); } @Override public Set getRequiredQueryParameters() { Set requiredParams = new TreeSet<>(); - requiredParams.add(datawave.webservice.query.QueryParameters.QUERY_STRING); - requiredParams.add(datawave.webservice.query.QueryParameters.QUERY_NAME); - requiredParams.add(datawave.webservice.query.QueryParameters.QUERY_PAGESIZE); - requiredParams.add(datawave.webservice.query.QueryParameters.QUERY_AUTHORIZATIONS); - requiredParams.add(datawave.webservice.query.QueryParameters.QUERY_LOGIC_NAME); + requiredParams.add(datawave.microservice.query.QueryParameters.QUERY_STRING); + requiredParams.add(datawave.microservice.query.QueryParameters.QUERY_NAME); + requiredParams.add(datawave.microservice.query.QueryParameters.QUERY_PAGESIZE); + requiredParams.add(datawave.microservice.query.QueryParameters.QUERY_AUTHORIZATIONS); + requiredParams.add(datawave.microservice.query.QueryParameters.QUERY_LOGIC_NAME); return requiredParams; } @@ -863,27 +883,35 @@ public Set getExampleQueries() { } public EdgeQueryModel getEdgeQueryModel() { - return this.edgeQueryModel; + return getConfig().getEdgeQueryModel(); } public void setEdgeQueryModel(EdgeQueryModel model) { - this.edgeQueryModel = model; + getConfig().setEdgeQueryModel(model); } public String getModelName() { - return this.modelName; + return getConfig().getModelName(); } public void setModelName(String modelName) { - this.modelName = modelName; + getConfig().setModelName(modelName); } public String getModelTableName() { - return this.modelTableName; + return getConfig().getModelTableName(); } public void setModelTableName(String modelTableName) { - this.modelTableName = modelTableName; + getConfig().setModelTableName(modelTableName); + } + + public String getMetadataTableName() { + return getConfig().getMetadataTableName(); + } + + public void setMetadataTableName(String metadataTableName) { + getConfig().setMetadataTableName(metadataTableName); } public MetadataHelperFactory getMetadataHelperFactory() { @@ -897,20 +925,50 @@ public void setMetadataHelperFactory(MetadataHelperFactory metadataHelperFactory this.metadataHelperFactory = metadataHelperFactory; } + public boolean includeStats() { + return getConfig().includeStats(); + } + + public void setIncludeStats(boolean includeStats) { + getConfig().setIncludeStats(includeStats); + } + public int getDateFilterSkipLimit() { - return dateFilterSkipLimit; + return getConfig().getDateFilterSkipLimit(); } public void setDateFilterSkipLimit(int dateFilterSkipLimit) { - this.dateFilterSkipLimit = dateFilterSkipLimit; + getConfig().setDateFilterSkipLimit(dateFilterSkipLimit); } public long getDateFilterScanLimit() { - return dateFilterScanLimit; + return getConfig().getDateFilterScanLimit(); } public void setDateFilterScanLimit(long dateFilterScanLimit) { - this.dateFilterScanLimit = dateFilterScanLimit; + getConfig().setDateFilterScanLimit(dateFilterScanLimit); + } + + public void setEdgeModelFieldsFactory(EdgeModelFieldsFactory edgeModelFieldsFactory) { + this.edgeFields = edgeModelFieldsFactory.createFields(); + } + + public void setEdgeFields(EdgeModelFields edgeFields) { + this.edgeFields = edgeFields; + } + + public EdgeModelFields getEdgeFields() { + return edgeFields; + } + + @Override + public boolean isCheckpointable() { + return getConfig().isCheckpointable(); + } + + @Override + public void setCheckpointable(boolean checkpointable) { + getConfig().setCheckpointable(checkpointable); } public Map getQuerySyntaxParsers() { diff --git a/warehouse/query-core/src/main/java/datawave/query/tables/edge/contexts/IdentityContext.java b/warehouse/query-core/src/main/java/datawave/query/tables/edge/contexts/IdentityContext.java index f85a7abb27f..9a8281bf2b1 100644 --- a/warehouse/query-core/src/main/java/datawave/query/tables/edge/contexts/IdentityContext.java +++ b/warehouse/query-core/src/main/java/datawave/query/tables/edge/contexts/IdentityContext.java @@ -2,20 +2,20 @@ import org.apache.commons.lang.StringUtils; -import datawave.edge.model.EdgeModelAware; +import datawave.edge.model.EdgeModelFields; -public class IdentityContext implements EdgeModelAware, EdgeContext { - private String identity; +public class IdentityContext implements EdgeContext { + private EdgeModelFields.FieldKey identity; private String literal; private String operation; boolean equivalence; - public IdentityContext(String identity, String literal, String opp) { - this.identity = identity; + public IdentityContext(String internalFieldName, String literal, String opp, EdgeModelFields fields) { + this.identity = fields.parse(internalFieldName); this.literal = literal; this.operation = opp; - if (opp.equals(NOT_EQUALS) || opp.equals(NOT_EQUALS_REGEX)) { + if (opp.equals(EdgeModelFields.NOT_EQUALS) || opp.equals(EdgeModelFields.NOT_EQUALS_REGEX)) { equivalence = false; } else { equivalence = true; @@ -59,7 +59,7 @@ public int hashCode() { return result; } - public String getIdentity() { + public EdgeModelFields.FieldKey getIdentity() { return identity; } diff --git a/warehouse/query-core/src/main/java/datawave/query/tables/edge/contexts/QueryContext.java b/warehouse/query-core/src/main/java/datawave/query/tables/edge/contexts/QueryContext.java index 05b77861d70..d2d3e4a04f1 100644 --- a/warehouse/query-core/src/main/java/datawave/query/tables/edge/contexts/QueryContext.java +++ b/warehouse/query-core/src/main/java/datawave/query/tables/edge/contexts/QueryContext.java @@ -9,7 +9,7 @@ import com.google.common.collect.HashMultimap; -import datawave.edge.model.EdgeModelAware; +import datawave.edge.model.EdgeModelFields; import datawave.edge.util.EdgeKey; import datawave.query.tables.edge.EdgeQueryLogic; import datawave.util.StringUtils; @@ -32,12 +32,17 @@ * The enforce rules boolean that gets passed around is to check to make sure that the same sets of identifiers are not being ANDed together. Eg can't and * SOURCE and SOURCE because an edge only has one SOURCE */ -public class QueryContext implements EdgeModelAware, EdgeContext { +public class QueryContext implements EdgeContext { + private final EdgeModelFields fields; private RowContext rowContext; private ColumnContext columnContext; private Set otherContexts; + public QueryContext(EdgeModelFields fields) { + this.fields = fields; + } + private boolean hasCompleteColumnFamilies = false; public void packageIdentities(List identityContexts) { @@ -49,12 +54,13 @@ public void packageIdentities(List identityContexts, boolean en buildColumnContexts(identityContexts, enforceRules); return; } - String type = identityContexts.get(0).getIdentity(); + EdgeModelFields.FieldKey type = identityContexts.get(0).getIdentity(); - if (type.equals(EDGE_SOURCE) || type.equals(EDGE_SINK)) { + if (type.equals(EdgeModelFields.FieldKey.EDGE_SOURCE) || type.equals(EdgeModelFields.FieldKey.EDGE_SINK)) { buildRowContexts(identityContexts, enforceRules); - } else if (type.equals(EDGE_TYPE) || type.equals(EDGE_RELATIONSHIP) || type.equals(EDGE_ATTRIBUTE1) || type.equals(EDGE_ATTRIBUTE2) - || type.equals(EDGE_ATTRIBUTE3) || type.equals(FUNCTION)) { + } else if (type.equals(EdgeModelFields.FieldKey.EDGE_TYPE) || type.equals(EdgeModelFields.FieldKey.EDGE_RELATIONSHIP) + || type.equals(EdgeModelFields.FieldKey.EDGE_ATTRIBUTE1) || type.equals(EdgeModelFields.FieldKey.EDGE_ATTRIBUTE2) + || type.equals(EdgeModelFields.FieldKey.EDGE_ATTRIBUTE3) || type.equals(EdgeModelFields.FieldKey.FUNCTION)) { buildColumnContexts(identityContexts, enforceRules); } else { throw new RuntimeException("Invalid identifier: " + type); @@ -211,7 +217,7 @@ public boolean combineQueryContexts(List other, boolean optionalSo } // If any parts of this queryContext needs to be moved out into the other context list do so here if (overlappingColumn || overlappingSink) { - QueryContext tempContext = new QueryContext(); + QueryContext tempContext = new QueryContext(fields); // if this query context does not have selector list then if there is an over lap in either the row // or column context push them both out into the list of other contexts if (!this.hasSourceList() || overlappingSink) { @@ -287,7 +293,8 @@ private void verifyNotSet(List contexts, boolean check) { * get away with excluding source and sink if the */ public void buildStrings(StringBuilder normalizedQuery, StringBuilder normalizedStatsQuery, boolean includeStats, boolean includeSource, - boolean includeSink, HashMultimap preFilterValues, boolean includeColumnFamilyTerms, boolean updateAllowlist) { + boolean includeSink, HashMultimap preFilterValues, boolean includeColumnFamilyTerms, + boolean updateAllowlist, EdgeModelFields fields) { StringBuilder trimmedQuery = new StringBuilder(); StringBuilder trimmedStatsQuery = new StringBuilder(); @@ -307,10 +314,10 @@ public void buildStrings(StringBuilder normalizedQuery, StringBuilder normalized if (i > 0) { if (tempQueryString.length() > 7) { - tempQueryString.append(OR); + tempQueryString.append(EdgeModelFields.OR); } if (includeStats && tempQueryStatsString.length() > 7) { - tempQueryStatsString.append(OR); + tempQueryStatsString.append(EdgeModelFields.OR); } } if (this.otherContexts.size() > 1) { @@ -335,10 +342,10 @@ public void buildStrings(StringBuilder normalizedQuery, StringBuilder normalized i++; } if (trimmedQuery.length() > 7 && tempQueryString.length() > 7) { - trimmedQuery.append(AND); + trimmedQuery.append(EdgeModelFields.AND); } if (includeStats && trimmedStatsQuery.length() > 7 && tempQueryStatsString.length() > 7) { - trimmedStatsQuery.append(AND); + trimmedStatsQuery.append(EdgeModelFields.AND); } if (tempQueryString.length() > 7) { trimmedQuery.append("(" + tempQueryString + ")"); @@ -353,8 +360,8 @@ public void buildStrings(StringBuilder normalizedQuery, StringBuilder normalized } } - private NormalizedQuery toString(boolean includeStats, boolean includeSource, boolean includeSink, HashMultimap preFilterValues, - boolean includeColumnFamilyTerms, boolean updateAllowlist) { + private NormalizedQuery toString(boolean includeStats, boolean includeSource, boolean includeSink, + HashMultimap preFilterValues, boolean includeColumnFamilyTerms, boolean updateAllowlist) { NormalizedQuery rowString = null, colString = null; @@ -378,13 +385,13 @@ private NormalizedQuery toString(boolean includeStats, boolean includeSource, bo if (colString != null) { if (colString.getNormalizedQuery() != null && colString.getNormalizedQuery().length() > 7) { if (normalizedQuery.length() > 7) { - normalizedQuery.append(AND); + normalizedQuery.append(EdgeModelFields.AND); } normalizedQuery.append(colString.getNormalizedQuery()); } if (includeStats && colString.getNormalizedStatsQuery() != null && colString.getNormalizedStatsQuery().length() > 7) { if (normalizedStatsQuery.length() > 7) { - normalizedStatsQuery.append(AND); + normalizedStatsQuery.append(EdgeModelFields.AND); } normalizedStatsQuery.append(colString.getNormalizedStatsQuery()); } @@ -423,22 +430,22 @@ public Set getOtherContexts() { return otherContexts; } - private void updateAllowList(IdentityContext expression, HashMultimap preFilterValues) { + private void updateAllowList(IdentityContext expression, HashMultimap preFilterValues) { /* * A allowlist is a list of things that you allow, therefore, there is no reason to check for things that you do not allow. This means there is no * reason to check for NOT_EQUALS or NOT_EQUALS_REGEX, because they won't be allowed by default. */ - if (expression.getOperation().equals(EQUALS)) { + if (expression.getOperation().equals(EdgeModelFields.EQUALS)) { preFilterValues.put(expression.getIdentity(), expression.getLiteral()); - } else if (expression.getOperation().equals(EQUALS_REGEX)) { + } else if (expression.getOperation().equals(EdgeModelFields.EQUALS_REGEX)) { preFilterValues.put(expression.getIdentity(), EdgeQueryLogic.PRE_FILTER_DISABLE_KEYWORD); } } private int populateQuery(List terms, StringBuilder trimmedQuery, StringBuilder trimmedStatsQuery, String operator, boolean includeStats, - HashMultimap preFilterValues, boolean addToPrefilter) { + HashMultimap preFilterValues, boolean addToPrefilter) { int numTermsAdded = 0; boolean createStats = includeStats; boolean expandStats = false; @@ -452,13 +459,14 @@ private int populateQuery(List terms, StringBuilder trimmedQuer for (int i = 0; i < terms.size(); i++) { IdentityContext iContext = terms.get(i); - if (includeStats == false || iContext.getIdentity().equals(EDGE_SINK)) { + if (includeStats == false || iContext.getIdentity().equals(EdgeModelFields.FieldKey.EDGE_SINK)) { createStats = false; } else { createStats = true; } - if (iContext.getIdentity().equals(EDGE_RELATIONSHIP) || iContext.getIdentity().equals(EDGE_ATTRIBUTE1)) { + if (iContext.getIdentity().equals(EdgeModelFields.FieldKey.EDGE_RELATIONSHIP) + || iContext.getIdentity().equals(EdgeModelFields.FieldKey.EDGE_ATTRIBUTE1)) { expandStats = true; } else { expandStats = false; @@ -471,7 +479,7 @@ private int populateQuery(List terms, StringBuilder trimmedQuer } } - if (!iContext.getIdentity().equals(FUNCTION)) { + if (!iContext.getIdentity().equals(EdgeModelFields.FieldKey.FUNCTION)) { trimmedQuery.append(iContext.getIdentity() + " " + iContext.getOperation() + " " + "'" + iContext.getEscapedLiteral() + "'"); if (createStats) { if (expandStats) { @@ -500,7 +508,7 @@ private int populateQuery(List terms, StringBuilder trimmedQuer if (createStats && tempStatsStringBuilder.length() > 7) { if (trimmedStatsQuery.length() > 7) { - trimmedStatsQuery.append(AND); + trimmedStatsQuery.append(EdgeModelFields.AND); } trimmedStatsQuery.append(tempStatsStringBuilder); } @@ -510,8 +518,8 @@ private int populateQuery(List terms, StringBuilder trimmedQuer /* * Used for creating the stats query. Splits up EDGE_RELATIONSHIP=A-B into (EDGE_RELATIONSHIP=A) */ - private static StringBuilder splitCompoundValue(String name, String operator, String value, HashMultimap preFilterValues, - boolean updateAllowlist) { + private static StringBuilder splitCompoundValue(EdgeModelFields.FieldKey name, String operator, String value, + HashMultimap preFilterValues, boolean updateAllowlist) { StringBuilder sb = new StringBuilder(); String[] parts = value.split("-"); @@ -533,7 +541,8 @@ private static StringBuilder splitCompoundValue(String name, String operator, St } - public class ColumnContext implements EdgeModelAware { + public class ColumnContext { + // Each list (except exclusions and funtions) is expected to have identity contexts all with the same opperation private List edgeTypes; private List edgeRelationships; @@ -595,24 +604,24 @@ public void packageIdentities(List identityContexts, boolean en return; } - String type = identityContexts.get(0).getIdentity(); + EdgeModelFields.FieldKey type = identityContexts.get(0).getIdentity(); - if (type.equals(EDGE_TYPE)) { + if (type.equals(EdgeModelFields.FieldKey.EDGE_TYPE)) { verifyNotSet(edgeTypes, enforceRules); addEdgeTypes(identityContexts); - } else if (type.equals(EDGE_RELATIONSHIP)) { + } else if (type.equals(EdgeModelFields.FieldKey.EDGE_RELATIONSHIP)) { verifyNotSet(edgeRelationships, enforceRules); addEdgeRelationships(identityContexts); - } else if (type.equals(EDGE_ATTRIBUTE1)) { + } else if (type.equals(EdgeModelFields.FieldKey.EDGE_ATTRIBUTE1)) { verifyNotSet(edgeAttribute1Values, enforceRules); addEdgeAttribute1Values(identityContexts); - } else if (type.equals(EDGE_ATTRIBUTE2)) { + } else if (type.equals(EdgeModelFields.FieldKey.EDGE_ATTRIBUTE2)) { verifyNotSet(edgeAttribute2Values, enforceRules); addAttribute2Values(identityContexts); - } else if (type.equals(EDGE_ATTRIBUTE3)) { + } else if (type.equals(EdgeModelFields.FieldKey.EDGE_ATTRIBUTE3)) { verifyNotSet(edgeAttribute3Values, enforceRules); addAttribute3Values(identityContexts); - } else if (type.equals(FUNCTION)) { + } else if (type.equals(EdgeModelFields.FieldKey.FUNCTION)) { verifyNotSet(functions, enforceRules); functions = identityContexts; } else { @@ -630,13 +639,13 @@ public boolean hasCompleteColumnFamily() { completeColumnFamilies = true; if (edgeTypes == null) { completeColumnFamilies = false; - } else if (edgeTypes.get(0).getOperation() != EQUALS) { + } else if (edgeTypes.get(0).getOperation() != EdgeModelFields.EQUALS) { completeColumnFamilies = false; } if (edgeRelationships == null) { completeColumnFamilies = false; - } else if (edgeRelationships.get(0).getOperation() != EQUALS) { + } else if (edgeRelationships.get(0).getOperation() != EdgeModelFields.EQUALS) { completeColumnFamilies = false; } @@ -677,7 +686,7 @@ public List computeColumnFamilyUnions(boolean includeStats) { return columnFamilies; } - public NormalizedQuery toString(boolean includeStats, HashMultimap preFilterValues, boolean includeColumnFamilyTerms, + public NormalizedQuery toString(boolean includeStats, HashMultimap preFilterValues, boolean includeColumnFamilyTerms, boolean updateAllowlist) { StringBuilder trimmedQuery = new StringBuilder(); StringBuilder trimmedStatsQuery = new StringBuilder(); @@ -685,61 +694,63 @@ public NormalizedQuery toString(boolean includeStats, HashMultimap 7) { - trimmedQuery.append(AND); + trimmedQuery.append(EdgeModelFields.AND); } // if (includeStats && trimmedStatsQuery.length() > 7) {trimmedStatsQuery.append(AND);} - numTermsAdded += populateQuery(getEdgeTypes(), trimmedQuery, trimmedStatsQuery, OR, includeStats, preFilterValues, (updateAllowlist)); + numTermsAdded += populateQuery(getEdgeTypes(), trimmedQuery, trimmedStatsQuery, EdgeModelFields.OR, includeStats, preFilterValues, + (updateAllowlist)); } if (includeColumnFamilyTerms && getEdgeRelationships() != null) { if (trimmedQuery.length() > 7) { - trimmedQuery.append(AND); + trimmedQuery.append(EdgeModelFields.AND); } // if (includeStats && trimmedStatsQuery.length() > 7) {trimmedStatsQuery.append(AND);} - numTermsAdded += populateQuery(getEdgeRelationships(), trimmedQuery, trimmedStatsQuery, OR, includeStats, preFilterValues, (updateAllowlist)); + numTermsAdded += populateQuery(getEdgeRelationships(), trimmedQuery, trimmedStatsQuery, EdgeModelFields.OR, includeStats, preFilterValues, + (updateAllowlist)); } if (getEdgeAttribute1Values() != null) { if (trimmedQuery.length() > 7) { - trimmedQuery.append(AND); + trimmedQuery.append(EdgeModelFields.AND); } // if (includeStats && trimmedStatsQuery.length() > 7) {trimmedStatsQuery.append(AND);} - numTermsAdded += populateQuery(getEdgeAttribute1Values(), trimmedQuery, trimmedStatsQuery, OR, includeStats, preFilterValues, + numTermsAdded += populateQuery(getEdgeAttribute1Values(), trimmedQuery, trimmedStatsQuery, EdgeModelFields.OR, includeStats, preFilterValues, (updateAllowlist)); } if (getEdgeAttribute2Values() != null) { if (trimmedQuery.length() > 7) { - trimmedQuery.append(AND); + trimmedQuery.append(EdgeModelFields.AND); } // if (includeStats && trimmedStatsQuery.length() > 7) {trimmedStatsQuery.append(AND);} - numTermsAdded += populateQuery(getEdgeAttribute2Values(), trimmedQuery, trimmedStatsQuery, OR, includeStats, preFilterValues, + numTermsAdded += populateQuery(getEdgeAttribute2Values(), trimmedQuery, trimmedStatsQuery, EdgeModelFields.OR, includeStats, preFilterValues, (updateAllowlist)); } if (getEdgeAttribute3Values() != null) { if (trimmedQuery.length() > 7) { - trimmedQuery.append(AND); + trimmedQuery.append(EdgeModelFields.AND); } // if (includeStats && trimmedStatsQuery.length() > 7) {trimmedStatsQuery.append(AND);} - numTermsAdded += populateQuery(getEdgeAttribute3Values(), trimmedQuery, trimmedStatsQuery, OR, includeStats, preFilterValues, + numTermsAdded += populateQuery(getEdgeAttribute3Values(), trimmedQuery, trimmedStatsQuery, EdgeModelFields.OR, includeStats, preFilterValues, (updateAllowlist)); } if (getExclusions() != null) { if (trimmedQuery.length() > 7) { - trimmedQuery.append(AND); + trimmedQuery.append(EdgeModelFields.AND); } // there could be sinks in this list of exclusions which would not get added do AND'ing in method - numTermsAdded += populateQuery(getExclusions(), trimmedQuery, trimmedStatsQuery, AND, includeStats, preFilterValues, false); + numTermsAdded += populateQuery(getExclusions(), trimmedQuery, trimmedStatsQuery, EdgeModelFields.AND, includeStats, preFilterValues, false); } if (getFunctions() != null) { if (trimmedQuery.length() > 7) { - trimmedQuery.append(AND); + trimmedQuery.append(EdgeModelFields.AND); } // if (includeStats && trimmedStatsQuery.length() > 7) {trimmedStatsQuery.append(AND);} - numTermsAdded += populateQuery(getFunctions(), trimmedQuery, trimmedStatsQuery, AND, includeStats, preFilterValues, false); + numTermsAdded += populateQuery(getFunctions(), trimmedQuery, trimmedStatsQuery, EdgeModelFields.AND, includeStats, preFilterValues, false); } NormalizedQuery ret = new NormalizedQuery(); @@ -841,7 +852,7 @@ public List getFunctions() { } } - public class RowContext implements EdgeModelAware { + public class RowContext { private List sources; private List sinks; @@ -851,12 +862,12 @@ public void packageIdentities(List identityContexts) { } public void packageIdentities(List identityContexts, boolean enforceRules) { - String type = identityContexts.get(0).getIdentity(); + EdgeModelFields.FieldKey type = identityContexts.get(0).getIdentity(); - if (type.equals(EDGE_SOURCE)) { + if (type.equals(EdgeModelFields.FieldKey.EDGE_SOURCE)) { verifyNotSet(sources, enforceRules); addSources(identityContexts); - } else if (type.equals(EDGE_SINK)) { + } else if (type.equals(EdgeModelFields.FieldKey.EDGE_SINK)) { verifyNotSet(sinks, enforceRules); addSinks(identityContexts); } else { @@ -869,15 +880,15 @@ public NormalizedQuery toString(boolean includeStats, boolean includingSources, StringBuilder trimmedStatsQuery = new StringBuilder(); if (includingSources) { - populateQuery(getSources(), trimmedQuery, trimmedStatsQuery, OR, includeStats, null, false); + populateQuery(getSources(), trimmedQuery, trimmedStatsQuery, EdgeModelFields.OR, includeStats, null, false); } if (getSinks() != null && (includingSources || includingSinks)) { if (trimmedQuery.length() > 7) { - trimmedQuery.append(AND); + trimmedQuery.append(EdgeModelFields.AND); } // never add target sources to stats query no need to append - populateQuery(getSinks(), trimmedQuery, trimmedStatsQuery, OR, includeStats, null, false); + populateQuery(getSinks(), trimmedQuery, trimmedStatsQuery, EdgeModelFields.OR, includeStats, null, false); } NormalizedQuery ret = new NormalizedQuery(); diff --git a/warehouse/query-core/src/main/java/datawave/query/tables/edge/contexts/VisitationContext.java b/warehouse/query-core/src/main/java/datawave/query/tables/edge/contexts/VisitationContext.java index 170803083b1..8ad68e3167e 100644 --- a/warehouse/query-core/src/main/java/datawave/query/tables/edge/contexts/VisitationContext.java +++ b/warehouse/query-core/src/main/java/datawave/query/tables/edge/contexts/VisitationContext.java @@ -11,7 +11,7 @@ import com.google.common.collect.HashMultimap; -import datawave.edge.model.EdgeModelAware; +import datawave.edge.model.EdgeModelFields; import datawave.edge.util.EdgeKeyUtil; import datawave.query.parser.JavaRegexAnalyzer; @@ -26,7 +26,7 @@ * Further more the original query must be run through a set of normalizers so that the query can correctly get results from the table without having to worry * about capitalization or other formatting details. */ -public class VisitationContext implements EdgeModelAware, EdgeContext { +public class VisitationContext implements EdgeContext { private static final Logger log = Logger.getLogger(VisitationContext.class); StringBuilder normalizedQuery; @@ -35,28 +35,26 @@ public class VisitationContext implements EdgeModelAware, EdgeContext { protected List columnFamilies; protected boolean hasAllCompleteColumnFamilies = false; - private HashMultimap preFilterValues = HashMultimap.create(); + private HashMultimap preFilterValues = HashMultimap.create(); long termCount = 0; protected boolean includeStats; + private final EdgeModelFields fields; + private static final String OR = " || "; private static final String AND = " && "; - public VisitationContext() { - normalizedQuery = new StringBuilder(); - normalizedStatsQuery = new StringBuilder(); - ranges = new HashSet<>(); - - this.includeStats = true; + public VisitationContext(EdgeModelFields fields) { + this(fields, true); } - public VisitationContext(boolean includeStats) { + public VisitationContext(EdgeModelFields fields, boolean includeStats) { normalizedQuery = new StringBuilder(); normalizedStatsQuery = new StringBuilder(); ranges = new HashSet<>(); - + this.fields = fields; this.includeStats = includeStats; } @@ -101,7 +99,7 @@ public void updateQueryStrings(QueryContext qContext, boolean includeSources, bo } qContext.buildStrings(trimmedQuery, trimmedStatsQuery, includeStats, includeSources, includeSinks, preFilterValues, includColumnFamilyTerms, - updateAllowlist); + updateAllowlist, fields); trimmedQuery.append(")"); if (includeStats) { trimmedStatsQuery.append(")"); @@ -141,7 +139,7 @@ private Set buildRanges(List sources, List buildRanges(List sources, List buildRange(IdentityContext source, IdentityContext sink) { rangeSet.add(EdgeKeyUtil.createEscapedRange(rowSource, false, includeStats, false)); } - boolean isSinkRegex = sink.getOperation().equals(EQUALS_REGEX); + boolean isSinkRegex = sink.getOperation().equals(EdgeModelFields.EQUALS_REGEX); rangeSet.add(EdgeKeyUtil.createEscapedRange(rowSource, rowSink, isSinkRegex)); return rangeSet; @@ -227,7 +225,7 @@ private Set buildRange(IdentityContext source, IdentityContext sink) { */ private String getLeadingLiteral(IdentityContext term, boolean leadingWildCardAllowed) { String leadingLiteral = ""; - if (term.getOperation().equals(EQUALS_REGEX)) { + if (term.getOperation().equals(EdgeModelFields.EQUALS_REGEX)) { try { JavaRegexAnalyzer regexAnalyzer = new JavaRegexAnalyzer(term.getLiteral()); @@ -328,7 +326,7 @@ public Set getRanges() { return ranges; } - public HashMultimap getPreFilterValues() { + public HashMultimap getPreFilterValues() { return preFilterValues; } diff --git a/warehouse/query-core/src/main/java/datawave/query/tables/facets/FacetQueryPlanVisitor.java b/warehouse/query-core/src/main/java/datawave/query/tables/facets/FacetQueryPlanVisitor.java index bc5d4f77f52..5252b30cd76 100644 --- a/warehouse/query-core/src/main/java/datawave/query/tables/facets/FacetQueryPlanVisitor.java +++ b/warehouse/query-core/src/main/java/datawave/query/tables/facets/FacetQueryPlanVisitor.java @@ -72,6 +72,7 @@ public QueryPlan visit(ASTEQNode node, Object data) { // @formatter:off QueryPlan plan = new QueryPlan() + .withTableName(config.getShardTableName()) .withQueryTree(node) .withRanges(Collections.singleton(new Range(startKey, true, endKey, false))) .withColumnFamilies(fieldPairs); diff --git a/warehouse/query-core/src/main/java/datawave/query/tables/facets/FacetedQueryLogic.java b/warehouse/query-core/src/main/java/datawave/query/tables/facets/FacetedQueryLogic.java index c5b748376d2..a7e3fcca142 100644 --- a/warehouse/query-core/src/main/java/datawave/query/tables/facets/FacetedQueryLogic.java +++ b/warehouse/query-core/src/main/java/datawave/query/tables/facets/FacetedQueryLogic.java @@ -16,6 +16,9 @@ import com.google.common.collect.Lists; import com.google.common.collect.Sets; +import datawave.core.query.configuration.GenericQueryConfiguration; +import datawave.core.query.logic.QueryLogicTransformer; +import datawave.microservice.query.Query; import datawave.query.Constants; import datawave.query.DocumentSerialization; import datawave.query.QueryParameters; @@ -31,9 +34,6 @@ import datawave.query.tables.IndexQueryLogic; import datawave.query.transformer.FacetedTransformer; import datawave.util.StringUtils; -import datawave.webservice.query.Query; -import datawave.webservice.query.configuration.GenericQueryConfiguration; -import datawave.webservice.query.logic.QueryLogicTransformer; /** * diff --git a/warehouse/query-core/src/main/java/datawave/query/tables/remote/RemoteQueryLogic.java b/warehouse/query-core/src/main/java/datawave/query/tables/remote/RemoteQueryLogic.java index c0d176fec86..8cb332ded53 100644 --- a/warehouse/query-core/src/main/java/datawave/query/tables/remote/RemoteQueryLogic.java +++ b/warehouse/query-core/src/main/java/datawave/query/tables/remote/RemoteQueryLogic.java @@ -1,8 +1,8 @@ package datawave.query.tables.remote; +import datawave.core.query.logic.QueryLogic; +import datawave.core.query.remote.RemoteQueryService; import datawave.security.authorization.UserOperations; -import datawave.webservice.common.remote.RemoteQueryService; -import datawave.webservice.query.logic.QueryLogic; /** * A remote query logic is is a query logic that uses a remote query service. diff --git a/warehouse/query-core/src/main/java/datawave/query/tables/shard/CountAggregatingIterator.java b/warehouse/query-core/src/main/java/datawave/query/tables/shard/CountAggregatingIterator.java index 32b79c98ddf..b781cbcda94 100644 --- a/warehouse/query-core/src/main/java/datawave/query/tables/shard/CountAggregatingIterator.java +++ b/warehouse/query-core/src/main/java/datawave/query/tables/shard/CountAggregatingIterator.java @@ -31,12 +31,13 @@ public class CountAggregatingIterator extends TransformIterator { protected Set columnVisibilities = Sets.newHashSet(); - private MarkingFunctions markingFunctions = MarkingFunctions.Factory.createMarkingFunctions(); + private final MarkingFunctions markingFunctions; private Kryo kryo = new Kryo(); - public CountAggregatingIterator(Iterator> iterator, Transformer transformer) { + public CountAggregatingIterator(Iterator> iterator, Transformer transformer, MarkingFunctions markingFunctions) { super(iterator, transformer); + this.markingFunctions = markingFunctions; } @Override diff --git a/warehouse/query-core/src/main/java/datawave/query/tables/shard/CountResultPostprocessor.java b/warehouse/query-core/src/main/java/datawave/query/tables/shard/CountResultPostprocessor.java new file mode 100644 index 00000000000..5edb7a85dd5 --- /dev/null +++ b/warehouse/query-core/src/main/java/datawave/query/tables/shard/CountResultPostprocessor.java @@ -0,0 +1,100 @@ +package datawave.query.tables.shard; + +import static datawave.query.transformer.ShardQueryCountTableTransformer.COUNT_CELL; + +import java.util.ArrayList; +import java.util.List; +import java.util.Set; + +import org.apache.accumulo.core.security.ColumnVisibility; +import org.apache.log4j.Logger; + +import com.google.common.collect.Sets; + +import datawave.core.query.logic.ResultPostprocessor; +import datawave.marking.MarkingFunctions; +import datawave.webservice.query.result.event.EventBase; +import datawave.webservice.query.result.event.FieldBase; + +public class CountResultPostprocessor implements ResultPostprocessor { + private static final Logger log = Logger.getLogger(CountResultPostprocessor.class); + + private final MarkingFunctions markingFunctions; + + public CountResultPostprocessor(MarkingFunctions markingFunctions) { + this.markingFunctions = markingFunctions; + } + + @Override + public void apply(List results) { + if (results.size() > 1) { + EventBase firstResult = null; + Long count = 0L; + Set columnVisibilities = Sets.newHashSet(); + + boolean success = true; + List resultsToRemove = new ArrayList<>(); + for (Object result : results) { + if (result instanceof EventBase) { + EventBase event = (EventBase) result; + + // save the first result + if (firstResult == null) { + firstResult = event; + } + + // aggregate the count, and column visibility + FieldBase countField = getCountField(event.getFields()); + if (countField != null) { + columnVisibilities.add(new ColumnVisibility(countField.getColumnVisibility())); + if (countField.getTypedValue().getDataType().isAssignableFrom(Long.class)) { + count += ((Number) countField.getValueOfTypedValue()).longValue(); + + if (event != firstResult) { + resultsToRemove.add(event); + } + } else { + success = false; + break; + } + } else { + success = false; + break; + } + } else { + success = false; + break; + } + } + + if (success) { + ColumnVisibility columnVisibility = null; + try { + columnVisibility = markingFunctions.combine(columnVisibilities); + } catch (Exception e) { + log.error("Could not create combined columnVisibilities for the count", e); + } + + if (columnVisibility != null) { + results.removeAll(resultsToRemove); + + // update the first result + FieldBase countField = getCountField(firstResult.getFields()); + countField.setValue(count); + countField.setColumnVisibility(columnVisibility); + } + } + } + } + + private FieldBase getCountField(List> fields) { + FieldBase countField = null; + for (FieldBase field : fields) { + if (field.getName().equals(COUNT_CELL)) { + countField = field; + break; + } + } + return countField; + } +} diff --git a/warehouse/query-core/src/main/java/datawave/query/tables/shard/FieldIndexCountQueryLogic.java b/warehouse/query-core/src/main/java/datawave/query/tables/shard/FieldIndexCountQueryLogic.java index eb1e3438e7a..4602dcf3c55 100644 --- a/warehouse/query-core/src/main/java/datawave/query/tables/shard/FieldIndexCountQueryLogic.java +++ b/warehouse/query-core/src/main/java/datawave/query/tables/shard/FieldIndexCountQueryLogic.java @@ -29,8 +29,11 @@ import org.apache.hadoop.io.Text; import org.apache.log4j.Logger; +import datawave.core.query.configuration.GenericQueryConfiguration; +import datawave.core.query.logic.QueryLogicTransformer; import datawave.data.type.Type; import datawave.marking.MarkingFunctions; +import datawave.microservice.query.Query; import datawave.query.Constants; import datawave.query.QueryParameters; import datawave.query.config.ShardQueryConfiguration; @@ -40,10 +43,7 @@ import datawave.query.transformer.FieldIndexCountQueryTransformer; import datawave.query.util.MetadataHelper; import datawave.util.StringUtils; -import datawave.webservice.query.Query; -import datawave.webservice.query.configuration.GenericQueryConfiguration; import datawave.webservice.query.exception.QueryException; -import datawave.webservice.query.logic.QueryLogicTransformer; /** * Given a date range, FieldName(s), FieldValue(s), DataType(s) pull keys directly using FieldIndexIterator and count them as specified. diff --git a/warehouse/query-core/src/main/java/datawave/query/tables/shard/IndexStatsQueryLogic.java b/warehouse/query-core/src/main/java/datawave/query/tables/shard/IndexStatsQueryLogic.java index 56f56480301..33a6b4a6967 100644 --- a/warehouse/query-core/src/main/java/datawave/query/tables/shard/IndexStatsQueryLogic.java +++ b/warehouse/query-core/src/main/java/datawave/query/tables/shard/IndexStatsQueryLogic.java @@ -27,7 +27,13 @@ import org.apache.commons.lang.StringUtils; import org.apache.log4j.Logger; +import datawave.core.common.connection.AccumuloConnectionFactory; import datawave.core.iterators.filter.CsvKeyFilter; +import datawave.core.query.configuration.GenericQueryConfiguration; +import datawave.core.query.logic.AbstractQueryLogicTransformer; +import datawave.core.query.logic.BaseQueryLogic; +import datawave.core.query.logic.QueryLogicTransformer; +import datawave.microservice.query.Query; import datawave.query.Constants; import datawave.query.QueryParameters; import datawave.query.config.ShardQueryConfiguration; @@ -37,12 +43,6 @@ import datawave.security.util.ScannerHelper; import datawave.util.TableName; import datawave.util.time.DateHelper; -import datawave.webservice.common.connection.AccumuloConnectionFactory; -import datawave.webservice.query.Query; -import datawave.webservice.query.configuration.GenericQueryConfiguration; -import datawave.webservice.query.logic.AbstractQueryLogicTransformer; -import datawave.webservice.query.logic.BaseQueryLogic; -import datawave.webservice.query.logic.QueryLogicTransformer; import datawave.webservice.query.result.istat.FieldStat; import datawave.webservice.query.result.istat.IndexStatsResponse; import datawave.webservice.result.BaseQueryResponse; diff --git a/warehouse/query-core/src/main/java/datawave/query/tables/ssdeep/FullSSDeepDiscoveryChainStrategy.java b/warehouse/query-core/src/main/java/datawave/query/tables/ssdeep/FullSSDeepDiscoveryChainStrategy.java index 8a14d0acf04..2cb82303c48 100644 --- a/warehouse/query-core/src/main/java/datawave/query/tables/ssdeep/FullSSDeepDiscoveryChainStrategy.java +++ b/warehouse/query-core/src/main/java/datawave/query/tables/ssdeep/FullSSDeepDiscoveryChainStrategy.java @@ -16,11 +16,11 @@ import com.google.common.collect.Multimap; import com.google.common.collect.TreeMultimap; +import datawave.core.query.logic.QueryLogic; +import datawave.microservice.query.Query; +import datawave.microservice.query.QueryImpl; import datawave.query.discovery.DiscoveredThing; import datawave.query.tables.chained.strategy.FullChainStrategy; -import datawave.webservice.query.Query; -import datawave.webservice.query.QueryImpl; -import datawave.webservice.query.logic.QueryLogic; /** * A chain strategy that is designed to first run a ssdeep similarity query and then run a subsequent discovery query for each matching ssdeep hash found by diff --git a/warehouse/query-core/src/main/java/datawave/query/tables/ssdeep/SSDeepChainedDiscoveryQueryLogic.java b/warehouse/query-core/src/main/java/datawave/query/tables/ssdeep/SSDeepChainedDiscoveryQueryLogic.java index 6635e6d3846..028b4fcf4d8 100644 --- a/warehouse/query-core/src/main/java/datawave/query/tables/ssdeep/SSDeepChainedDiscoveryQueryLogic.java +++ b/warehouse/query-core/src/main/java/datawave/query/tables/ssdeep/SSDeepChainedDiscoveryQueryLogic.java @@ -8,10 +8,10 @@ import org.apache.accumulo.core.security.Authorizations; import org.apache.log4j.Logger; +import datawave.core.query.configuration.GenericQueryConfiguration; +import datawave.core.query.logic.QueryLogicTransformer; +import datawave.microservice.query.Query; import datawave.query.tables.chained.ChainedQueryTable; -import datawave.webservice.query.Query; -import datawave.webservice.query.configuration.GenericQueryConfiguration; -import datawave.webservice.query.logic.QueryLogicTransformer; /** * Implements a ChainedQueryTable that will first use the SSDeepSimilarityQueryLogic to find similar hashes for a set of query hashes and then run the diff --git a/warehouse/query-core/src/main/java/datawave/query/tables/ssdeep/SSDeepDiscoveryQueryLogic.java b/warehouse/query-core/src/main/java/datawave/query/tables/ssdeep/SSDeepDiscoveryQueryLogic.java index 6bf6ca09bc1..aa9a7ce2098 100644 --- a/warehouse/query-core/src/main/java/datawave/query/tables/ssdeep/SSDeepDiscoveryQueryLogic.java +++ b/warehouse/query-core/src/main/java/datawave/query/tables/ssdeep/SSDeepDiscoveryQueryLogic.java @@ -1,6 +1,5 @@ package datawave.query.tables.ssdeep; -import java.security.Principal; import java.util.Collection; import java.util.Iterator; import java.util.List; @@ -14,23 +13,23 @@ import org.apache.commons.collections4.iterators.TransformIterator; import datawave.audit.SelectorExtractor; +import datawave.core.common.connection.AccumuloConnectionFactory; +import datawave.core.query.configuration.GenericQueryConfiguration; +import datawave.core.query.iterator.DatawaveTransformIterator; +import datawave.core.query.logic.AbstractQueryLogicTransformer; +import datawave.core.query.logic.BaseQueryLogic; +import datawave.core.query.logic.QueryLogicTransformer; +import datawave.core.query.logic.ResponseEnricherBuilder; import datawave.marking.MarkingFunctions; +import datawave.microservice.query.Query; import datawave.query.discovery.DiscoveryLogic; import datawave.query.discovery.DiscoveryTransformer; import datawave.query.model.QueryModel; import datawave.query.util.MetadataHelperFactory; +import datawave.security.authorization.ProxiedUserDetails; import datawave.security.authorization.UserOperations; import datawave.webservice.common.audit.Auditor; -import datawave.webservice.common.connection.AccumuloConnectionFactory; -import datawave.webservice.query.Query; -import datawave.webservice.query.configuration.GenericQueryConfiguration; import datawave.webservice.query.exception.QueryException; -import datawave.webservice.query.iterator.DatawaveTransformIterator; -import datawave.webservice.query.logic.AbstractQueryLogicTransformer; -import datawave.webservice.query.logic.BaseQueryLogic; -import datawave.webservice.query.logic.QueryLogicTransformer; -import datawave.webservice.query.logic.ResponseEnricherBuilder; -import datawave.webservice.query.logic.RoleManager; import datawave.webservice.query.result.event.EventBase; import datawave.webservice.query.result.event.FieldBase; import datawave.webservice.query.result.event.ResponseObjectFactory; @@ -186,6 +185,14 @@ public void setModelName(String modelName) { discoveryDelegate.setModelName(modelName); } + public void setMetadataTableName(String metadataTableName) { + discoveryDelegate.setMetadataTableName(metadataTableName); + } + + public String getIndexTableName() { + return discoveryDelegate.getIndexTableName(); + } + public void setQueryModel(QueryModel model) { discoveryDelegate.setQueryModel(model); } @@ -274,13 +281,13 @@ public ResponseObjectFactory getResponseObjectFactory() { } @Override - public Principal getPrincipal() { - return discoveryDelegate.getPrincipal(); + public ProxiedUserDetails getCurrentUser() { + return discoveryDelegate.getCurrentUser(); } @Override - public void setPrincipal(Principal principal) { - discoveryDelegate.setPrincipal(principal); + public void setCurrentUser(ProxiedUserDetails currentUser) { + discoveryDelegate.setCurrentUser(currentUser); } @Override @@ -404,13 +411,13 @@ public void setCollectQueryMetrics(boolean collectQueryMetrics) { } @Override - public RoleManager getRoleManager() { - return discoveryDelegate.getRoleManager(); + public Set getRequiredRoles() { + return discoveryDelegate.getRequiredRoles(); } @Override - public void setRoleManager(RoleManager roleManager) { - discoveryDelegate.setRoleManager(roleManager); + public void setRequiredRoles(Set requiredRoles) { + discoveryDelegate.setRequiredRoles(requiredRoles); } @Override @@ -424,13 +431,8 @@ public void setConnPoolName(String connPoolName) { } @Override - public boolean canRunQuery() { - return discoveryDelegate.canRunQuery(); - } - - @Override - public boolean canRunQuery(Principal principal) { - return discoveryDelegate.canRunQuery(principal); + public boolean canRunQuery(Collection userRoles) { + return discoveryDelegate.canRunQuery(userRoles); } @Override diff --git a/warehouse/query-core/src/main/java/datawave/query/tables/ssdeep/SSDeepScoringFunction.java b/warehouse/query-core/src/main/java/datawave/query/tables/ssdeep/SSDeepScoringFunction.java index fc17ad144ca..61cb511c04b 100644 --- a/warehouse/query-core/src/main/java/datawave/query/tables/ssdeep/SSDeepScoringFunction.java +++ b/warehouse/query-core/src/main/java/datawave/query/tables/ssdeep/SSDeepScoringFunction.java @@ -12,6 +12,8 @@ import com.google.common.collect.Multimap; +import datawave.microservice.query.Query; +import datawave.microservice.query.QueryImpl; import datawave.query.config.SSDeepSimilarityQueryConfiguration; import datawave.util.ssdeep.ChunkSizeEncoding; import datawave.util.ssdeep.IntegerEncoding; @@ -20,8 +22,6 @@ import datawave.util.ssdeep.SSDeepHashEditDistanceScorer; import datawave.util.ssdeep.SSDeepHashScorer; import datawave.util.ssdeep.SSDeepNGramOverlapScorer; -import datawave.webservice.query.Query; -import datawave.webservice.query.QueryImpl; /** A function that transforms entries retrieved from Accumulo into Scored SSDeep hash matches */ public class SSDeepScoringFunction implements Function,Stream> { diff --git a/warehouse/query-core/src/main/java/datawave/query/tables/ssdeep/SSDeepSimilarityQueryLogic.java b/warehouse/query-core/src/main/java/datawave/query/tables/ssdeep/SSDeepSimilarityQueryLogic.java index 402718d6ed3..eb40a1f4d16 100644 --- a/warehouse/query-core/src/main/java/datawave/query/tables/ssdeep/SSDeepSimilarityQueryLogic.java +++ b/warehouse/query-core/src/main/java/datawave/query/tables/ssdeep/SSDeepSimilarityQueryLogic.java @@ -17,6 +17,11 @@ import com.google.common.collect.Multimap; +import datawave.core.common.connection.AccumuloConnectionFactory; +import datawave.core.query.configuration.GenericQueryConfiguration; +import datawave.core.query.logic.BaseQueryLogic; +import datawave.core.query.logic.QueryLogicTransformer; +import datawave.microservice.query.Query; import datawave.query.config.SSDeepSimilarityQueryConfiguration; import datawave.query.tables.ScannerFactory; import datawave.util.ssdeep.ChunkSizeEncoding; @@ -24,12 +29,7 @@ import datawave.util.ssdeep.NGramGenerator; import datawave.util.ssdeep.NGramTuple; import datawave.util.ssdeep.SSDeepHash; -import datawave.webservice.common.connection.AccumuloConnectionFactory; -import datawave.webservice.query.Query; -import datawave.webservice.query.configuration.GenericQueryConfiguration; import datawave.webservice.query.exception.QueryException; -import datawave.webservice.query.logic.BaseQueryLogic; -import datawave.webservice.query.logic.QueryLogicTransformer; public class SSDeepSimilarityQueryLogic extends BaseQueryLogic { diff --git a/warehouse/query-core/src/main/java/datawave/query/tables/ssdeep/SSDeepSimilarityQueryTransformer.java b/warehouse/query-core/src/main/java/datawave/query/tables/ssdeep/SSDeepSimilarityQueryTransformer.java index b6a50264ac9..4fbc0f3139b 100644 --- a/warehouse/query-core/src/main/java/datawave/query/tables/ssdeep/SSDeepSimilarityQueryTransformer.java +++ b/warehouse/query-core/src/main/java/datawave/query/tables/ssdeep/SSDeepSimilarityQueryTransformer.java @@ -5,11 +5,18 @@ import org.apache.accumulo.core.security.Authorizations; +import datawave.core.query.exception.EmptyObjectException; +import datawave.core.query.logic.BaseQueryLogicTransformer; import datawave.marking.MarkingFunctions; +import datawave.microservice.query.Query; +import datawave.microservice.query.QueryImpl; import datawave.query.config.SSDeepSimilarityQueryConfiguration; -import datawave.webservice.query.Query; -import datawave.webservice.query.exception.EmptyObjectException; -import datawave.webservice.query.logic.BaseQueryLogicTransformer; +import datawave.query.util.ssdeep.NGramScoreTuple; +import datawave.util.ssdeep.ChunkSizeEncoding; +import datawave.util.ssdeep.IntegerEncoding; +import datawave.util.ssdeep.NGramTuple; +import datawave.util.ssdeep.SSDeepHash; +import datawave.util.ssdeep.SSDeepHashScorer; import datawave.webservice.query.result.event.EventBase; import datawave.webservice.query.result.event.FieldBase; import datawave.webservice.query.result.event.ResponseObjectFactory; diff --git a/warehouse/query-core/src/main/java/datawave/query/tables/term/TermFrequencyQueryTable.java b/warehouse/query-core/src/main/java/datawave/query/tables/term/TermFrequencyQueryTable.java index 0e20480d306..9e6529bc303 100644 --- a/warehouse/query-core/src/main/java/datawave/query/tables/term/TermFrequencyQueryTable.java +++ b/warehouse/query-core/src/main/java/datawave/query/tables/term/TermFrequencyQueryTable.java @@ -7,7 +7,6 @@ import java.util.Set; import org.apache.accumulo.core.client.AccumuloClient; -import org.apache.accumulo.core.client.Connector; import org.apache.accumulo.core.client.Scanner; import org.apache.accumulo.core.client.TableNotFoundException; import org.apache.accumulo.core.data.Key; @@ -19,20 +18,19 @@ import com.google.common.collect.ImmutableSet; +import datawave.core.common.connection.AccumuloConnectionFactory.Priority; +import datawave.core.common.logging.ThreadConfigurableLogger; +import datawave.core.query.configuration.GenericQueryConfiguration; +import datawave.core.query.logic.BaseQueryLogic; +import datawave.core.query.logic.QueryLogicTransformer; import datawave.ingest.mapreduce.handler.ExtendedDataTypeHandler; +import datawave.microservice.query.Query; +import datawave.microservice.query.QueryImpl.Parameter; import datawave.query.QueryParameters; import datawave.query.config.TermFrequencyQueryConfiguration; import datawave.query.transformer.TermFrequencyQueryTransformer; import datawave.query.util.QueryScannerHelper; -import datawave.webservice.common.connection.AccumuloConnectionFactory; -import datawave.webservice.common.connection.AccumuloConnectionFactory.Priority; -import datawave.webservice.common.logging.ThreadConfigurableLogger; -import datawave.webservice.query.Query; -import datawave.webservice.query.QueryImpl.Parameter; -import datawave.webservice.query.configuration.GenericQueryConfiguration; import datawave.webservice.query.exception.QueryException; -import datawave.webservice.query.logic.BaseQueryLogic; -import datawave.webservice.query.logic.QueryLogicTransformer; public class TermFrequencyQueryTable extends BaseQueryLogic> { @@ -58,7 +56,7 @@ public Object clone() throws CloneNotSupportedException { @Override public Priority getConnectionPriority() { - return AccumuloConnectionFactory.Priority.NORMAL; + return Priority.NORMAL; } @Override diff --git a/warehouse/query-core/src/main/java/datawave/query/transformer/ContentQueryTransformer.java b/warehouse/query-core/src/main/java/datawave/query/transformer/ContentQueryTransformer.java index 40a3f4aab3e..204acf0294d 100644 --- a/warehouse/query-core/src/main/java/datawave/query/transformer/ContentQueryTransformer.java +++ b/warehouse/query-core/src/main/java/datawave/query/transformer/ContentQueryTransformer.java @@ -11,12 +11,12 @@ import org.apache.accumulo.core.security.Authorizations; import org.apache.log4j.Logger; +import datawave.core.query.logic.BaseQueryLogicTransformer; import datawave.marking.MarkingFunctions; import datawave.marking.MarkingFunctions.Exception; +import datawave.microservice.query.Query; import datawave.query.table.parser.ContentKeyValueFactory; import datawave.query.table.parser.ContentKeyValueFactory.ContentKeyValue; -import datawave.webservice.query.Query; -import datawave.webservice.query.logic.BaseQueryLogicTransformer; import datawave.webservice.query.result.event.EventBase; import datawave.webservice.query.result.event.FieldBase; import datawave.webservice.query.result.event.Metadata; diff --git a/warehouse/query-core/src/main/java/datawave/query/transformer/DocumentTransform.java b/warehouse/query-core/src/main/java/datawave/query/transformer/DocumentTransform.java index 98380e10aac..8bb9167f0ff 100644 --- a/warehouse/query-core/src/main/java/datawave/query/transformer/DocumentTransform.java +++ b/warehouse/query-core/src/main/java/datawave/query/transformer/DocumentTransform.java @@ -9,8 +9,8 @@ import com.google.common.base.Function; import datawave.marking.MarkingFunctions; +import datawave.microservice.query.Query; import datawave.query.attributes.Document; -import datawave.webservice.query.Query; public interface DocumentTransform extends Function,Map.Entry> { // called when adding the document transform diff --git a/warehouse/query-core/src/main/java/datawave/query/transformer/DocumentTransformer.java b/warehouse/query-core/src/main/java/datawave/query/transformer/DocumentTransformer.java index 1309873d346..93b06ac71d5 100644 --- a/warehouse/query-core/src/main/java/datawave/query/transformer/DocumentTransformer.java +++ b/warehouse/query-core/src/main/java/datawave/query/transformer/DocumentTransformer.java @@ -12,15 +12,15 @@ import com.google.common.base.Preconditions; +import datawave.core.query.exception.EmptyObjectException; +import datawave.core.query.logic.BaseQueryLogic; +import datawave.core.query.logic.Flushable; +import datawave.core.query.logic.WritesQueryMetrics; +import datawave.core.query.logic.WritesResultCardinalities; import datawave.marking.MarkingFunctions; +import datawave.microservice.query.Query; import datawave.query.attributes.Document; import datawave.util.StringUtils; -import datawave.webservice.query.Query; -import datawave.webservice.query.exception.EmptyObjectException; -import datawave.webservice.query.logic.BaseQueryLogic; -import datawave.webservice.query.logic.Flushable; -import datawave.webservice.query.logic.WritesQueryMetrics; -import datawave.webservice.query.logic.WritesResultCardinalities; import datawave.webservice.query.result.event.EventBase; import datawave.webservice.query.result.event.FieldBase; import datawave.webservice.query.result.event.Metadata; diff --git a/warehouse/query-core/src/main/java/datawave/query/transformer/DocumentTransformerInterface.java b/warehouse/query-core/src/main/java/datawave/query/transformer/DocumentTransformerInterface.java index fcae09aa242..69508bd9e0f 100644 --- a/warehouse/query-core/src/main/java/datawave/query/transformer/DocumentTransformerInterface.java +++ b/warehouse/query-core/src/main/java/datawave/query/transformer/DocumentTransformerInterface.java @@ -2,8 +2,8 @@ import java.util.List; -import datawave.webservice.query.Query; -import datawave.webservice.query.logic.QueryLogic; +import datawave.core.query.logic.QueryLogic; +import datawave.microservice.query.Query; import datawave.webservice.result.BaseQueryResponse; public interface DocumentTransformerInterface extends EventQueryTransformerInterface { diff --git a/warehouse/query-core/src/main/java/datawave/query/transformer/DocumentTransformerSupport.java b/warehouse/query-core/src/main/java/datawave/query/transformer/DocumentTransformerSupport.java index cbb295f2b13..213c1d2cf4b 100644 --- a/warehouse/query-core/src/main/java/datawave/query/transformer/DocumentTransformerSupport.java +++ b/warehouse/query-core/src/main/java/datawave/query/transformer/DocumentTransformerSupport.java @@ -24,7 +24,13 @@ import com.google.common.collect.Lists; import com.google.common.collect.Sets; +import datawave.core.query.exception.EmptyObjectException; +import datawave.core.query.logic.BaseQueryLogic; +import datawave.core.query.logic.WritesQueryMetrics; +import datawave.core.query.logic.WritesResultCardinalities; import datawave.marking.MarkingFunctions; +import datawave.microservice.query.Query; +import datawave.microservice.query.QueryImpl.Parameter; import datawave.microservice.querymetric.BaseQueryMetric; import datawave.query.DocumentSerialization; import datawave.query.attributes.Attribute; @@ -42,12 +48,6 @@ import datawave.query.jexl.JexlASTHelper; import datawave.util.StringUtils; import datawave.util.time.DateHelper; -import datawave.webservice.query.Query; -import datawave.webservice.query.QueryImpl.Parameter; -import datawave.webservice.query.exception.EmptyObjectException; -import datawave.webservice.query.logic.BaseQueryLogic; -import datawave.webservice.query.logic.WritesQueryMetrics; -import datawave.webservice.query.logic.WritesResultCardinalities; import datawave.webservice.query.result.event.EventBase; import datawave.webservice.query.result.event.FieldBase; import datawave.webservice.query.result.event.ResponseObjectFactory; @@ -68,6 +68,7 @@ public abstract class DocumentTransformerSupport extends EventQueryTransfor private static final Logger log = Logger.getLogger(DocumentTransformerSupport.class); private static final Map EMPTY_MARKINGS = new HashMap<>(); + private long sourceCount = 0; private long nextCount = 0; private long seekCount = 0; @@ -264,10 +265,46 @@ protected void extractMetrics(Document document, Key documentKey) { } } + @Override + public boolean hasMetrics() { + return sourceCount + nextCount + seekCount + yieldCount + docRanges + fiRanges > 0; + } + + @Override + public long getSourceCount() { + return sourceCount; + } + + @Override + public long getNextCount() { + return nextCount; + } + + @Override + public long getSeekCount() { + return seekCount; + } + + @Override + public long getYieldCount() { + return yieldCount; + } + + @Override + public long getDocRanges() { + return docRanges; + } + + @Override + public long getFiRanges() { + return fiRanges; + } + + @Override public void writeQueryMetrics(BaseQueryMetric metric) { // if any timing details have been returned, add metrics - if (sourceCount > 0) { + if (hasMetrics()) { metric.setSourceCount(sourceCount); metric.setNextCount(nextCount); metric.setSeekCount(seekCount); @@ -277,6 +314,16 @@ public void writeQueryMetrics(BaseQueryMetric metric) { } } + @Override + public void resetMetrics() { + sourceCount = 0; + nextCount = 0; + seekCount = 0; + yieldCount = 0; + docRanges = 0; + fiRanges = 0; + } + protected List getFieldValues(Document document, String field, boolean shortCircuit) { Map reverseModel = cardinalityConfiguration.getCardinalityFieldReverseMapping(); @@ -447,7 +494,6 @@ protected Collection> buildDocumentFields(Key documentKey, String f */ protected FieldBase createField(final String fieldName, final long ts, final Attribute attribute, Map markings, String columnVisibility) { - if (markings == null || markings.isEmpty()) { log.warn("Null or empty markings for " + fieldName + ":" + attribute); } diff --git a/warehouse/query-core/src/main/java/datawave/query/transformer/EdgeQueryTransformer.java b/warehouse/query-core/src/main/java/datawave/query/transformer/EdgeQueryTransformer.java index 98e0cb96de4..ee584c5733d 100644 --- a/warehouse/query-core/src/main/java/datawave/query/transformer/EdgeQueryTransformer.java +++ b/warehouse/query-core/src/main/java/datawave/query/transformer/EdgeQueryTransformer.java @@ -12,22 +12,22 @@ import com.google.protobuf.InvalidProtocolBufferException; -import datawave.edge.model.EdgeModelAware; +import datawave.core.query.cachedresults.CacheableLogic; +import datawave.edge.model.EdgeModelFields; import datawave.edge.util.EdgeKey; import datawave.edge.util.EdgeValue; import datawave.edge.util.EdgeValueHelper; import datawave.marking.MarkingFunctions; +import datawave.microservice.query.Query; import datawave.util.time.DateHelper; -import datawave.webservice.query.Query; -import datawave.webservice.query.cachedresults.CacheableLogic; import datawave.webservice.query.result.edge.EdgeBase; import datawave.webservice.query.result.event.ResponseObjectFactory; -public class EdgeQueryTransformer extends EdgeQueryTransformerSupport,EdgeBase> implements CacheableLogic, EdgeModelAware { +public class EdgeQueryTransformer extends EdgeQueryTransformerSupport,EdgeBase> implements CacheableLogic { private Logger log = Logger.getLogger(EdgeQueryTransformer.class); - public EdgeQueryTransformer(Query settings, MarkingFunctions markingFunctions, ResponseObjectFactory responseObjectFactory) { - super(settings, markingFunctions, responseObjectFactory); + public EdgeQueryTransformer(Query settings, MarkingFunctions markingFunctions, ResponseObjectFactory responseObjectFactory, EdgeModelFields fields) { + super(settings, markingFunctions, responseObjectFactory, fields); } @Override diff --git a/warehouse/query-core/src/main/java/datawave/query/transformer/EdgeQueryTransformerSupport.java b/warehouse/query-core/src/main/java/datawave/query/transformer/EdgeQueryTransformerSupport.java index a623eaa440f..c8b6e40ad1d 100644 --- a/warehouse/query-core/src/main/java/datawave/query/transformer/EdgeQueryTransformerSupport.java +++ b/warehouse/query-core/src/main/java/datawave/query/transformer/EdgeQueryTransformerSupport.java @@ -15,27 +15,29 @@ import com.google.common.collect.Sets; import com.google.protobuf.InvalidProtocolBufferException; -import datawave.edge.model.EdgeModelAware; +import datawave.core.query.cachedresults.CacheableLogic; +import datawave.core.query.logic.BaseQueryLogicTransformer; +import datawave.edge.model.EdgeModelFields; import datawave.edge.util.EdgeValue; import datawave.marking.MarkingFunctions; -import datawave.webservice.query.Query; -import datawave.webservice.query.cachedresults.CacheableLogic; +import datawave.microservice.query.Query; import datawave.webservice.query.cachedresults.CacheableQueryRow; import datawave.webservice.query.exception.QueryException; -import datawave.webservice.query.logic.BaseQueryLogicTransformer; import datawave.webservice.query.result.EdgeQueryResponseBase; import datawave.webservice.query.result.edge.EdgeBase; import datawave.webservice.query.result.event.ResponseObjectFactory; import datawave.webservice.result.BaseQueryResponse; -public abstract class EdgeQueryTransformerSupport extends BaseQueryLogicTransformer implements CacheableLogic, EdgeModelAware { +public abstract class EdgeQueryTransformerSupport extends BaseQueryLogicTransformer implements CacheableLogic { protected Authorizations auths; protected ResponseObjectFactory responseObjectFactory; + protected EdgeModelFields fields; - public EdgeQueryTransformerSupport(Query settings, MarkingFunctions markingFunctions, ResponseObjectFactory responseObjectFactory) { + public EdgeQueryTransformerSupport(Query settings, MarkingFunctions markingFunctions, ResponseObjectFactory responseObjectFactory, EdgeModelFields fields) { super(markingFunctions); this.responseObjectFactory = responseObjectFactory; auths = new Authorizations(settings.getQueryAuthorizations().split(",")); + this.fields = fields; } private static final String ERROR_INCORRECT_BYTE_ARRAY_SIZE = "The bitmask byte array is invalid. The array should have four bytes, but has %d bytes"; @@ -114,126 +116,117 @@ public BaseQueryResponse createResponse(List resultList) { } @Override - public List writeToCache(Object o) throws QueryException { - - List cqoList = new ArrayList<>(); + public CacheableQueryRow writeToCache(Object o) throws QueryException { EdgeBase edge = (EdgeBase) o; CacheableQueryRow cqo = responseObjectFactory.getCacheableQueryRow(); + cqo.setMarkingFunctions(this.markingFunctions); cqo.setColFam(""); cqo.setDataType(""); cqo.setEventId(generateEventId(edge)); cqo.setRow(""); if (edge.getSource() != null) { - cqo.addColumn(EDGE_SOURCE, edge.getSource(), edge.getMarkings(), "", 0l); + cqo.addColumn(fields.getSourceFieldName(), edge.getSource(), edge.getMarkings(), "", 0l); } if (edge.getSink() != null) { - cqo.addColumn(EDGE_SINK, edge.getSink(), edge.getMarkings(), "", 0l); + cqo.addColumn(fields.getSinkFieldName(), edge.getSink(), edge.getMarkings(), "", 0l); } if (edge.getEdgeType() != null) { - cqo.addColumn(EDGE_TYPE, edge.getEdgeType(), edge.getMarkings(), "", 0l); + cqo.addColumn(fields.getTypeFieldName(), edge.getEdgeType(), edge.getMarkings(), "", 0l); } if (edge.getEdgeRelationship() != null) { - cqo.addColumn(EDGE_RELATIONSHIP, edge.getEdgeRelationship(), edge.getMarkings(), "", 0l); + cqo.addColumn(fields.getRelationshipFieldName(), edge.getEdgeRelationship(), edge.getMarkings(), "", 0l); } if (edge.getEdgeAttribute1Source() != null) { - cqo.addColumn(EDGE_ATTRIBUTE1, edge.getEdgeAttribute1Source(), edge.getMarkings(), "", 0l); + cqo.addColumn(fields.getAttribute1FieldName(), edge.getEdgeAttribute1Source(), edge.getMarkings(), "", 0l); } if (edge.getStatsType() != null) { - cqo.addColumn(STATS_EDGE, edge.getStatsType(), edge.getMarkings(), "", 0l); + cqo.addColumn(fields.getStatsEdgeFieldName(), edge.getStatsType(), edge.getMarkings(), "", 0l); } if (edge.getEdgeAttribute2() != null) { - cqo.addColumn(EDGE_ATTRIBUTE2, edge.getEdgeAttribute2(), edge.getMarkings(), "", 0l); + cqo.addColumn(fields.getAttribute2FieldName(), edge.getEdgeAttribute2(), edge.getMarkings(), "", 0l); } if (edge.getDate() != null) { - cqo.addColumn(DATE, edge.getDate(), edge.getMarkings(), "", 0l); + cqo.addColumn(fields.getDateFieldName(), edge.getDate(), edge.getMarkings(), "", 0l); } if (edge.getCount() != null) { - cqo.addColumn(COUNT, edge.getCount().toString(), edge.getMarkings(), "", 0l); + cqo.addColumn(fields.getCountFieldName(), edge.getCount().toString(), edge.getMarkings(), "", 0l); } if (edge.getEdgeAttribute3() != null) { - cqo.addColumn(EDGE_ATTRIBUTE3, edge.getEdgeAttribute3(), edge.getMarkings(), "", 0l); + cqo.addColumn(fields.getAttribute3FieldName(), edge.getEdgeAttribute3(), edge.getMarkings(), "", 0l); } List counts = edge.getCounts(); if (counts != null && !counts.isEmpty()) { - cqo.addColumn(COUNTS, StringUtils.join(counts, '\0'), edge.getMarkings(), "", 0l); + cqo.addColumn(fields.getCountsFieldName(), StringUtils.join(counts, '\0'), edge.getMarkings(), "", 0l); } if (edge.getLoadDate() != null) { - cqo.addColumn(LOAD_DATE, edge.getLoadDate(), edge.getMarkings(), "", 0l); + cqo.addColumn(fields.getLoadDateFieldName(), edge.getLoadDate(), edge.getMarkings(), "", 0l); } if (edge.getActivityDate() != null) { - cqo.addColumn(ACTIVITY_DATE, edge.getActivityDate(), edge.getMarkings(), "", 0l); + cqo.addColumn(fields.getActivityDateFieldName(), edge.getActivityDate(), edge.getMarkings(), "", 0l); } - cqoList.add(cqo); - return cqoList; + return cqo; } @Override - public List readFromCache(List cacheableQueryRowList) { - - List edgeList = new ArrayList<>(); + public Object readFromCache(CacheableQueryRow cacheableQueryRow) { + Map markings = cacheableQueryRow.getMarkings(); - for (CacheableQueryRow cqr : cacheableQueryRowList) { - Map markings = cqr.getMarkings(); + EdgeBase edge = (EdgeBase) responseObjectFactory.getEdge(); - EdgeBase edge = (EdgeBase) responseObjectFactory.getEdge(); + edge.setMarkings(markings); - edge.setMarkings(markings); + Map columnValues = cacheableQueryRow.getColumnValues(); - Map columnValues = cqr.getColumnValues(); - - if (columnValues.containsKey(EDGE_SOURCE)) { - edge.setSource(columnValues.get(EDGE_SOURCE)); - } - if (columnValues.containsKey(EDGE_SINK)) { - edge.setSink(columnValues.get(EDGE_SINK)); - } - if (columnValues.containsKey(EDGE_TYPE)) { - edge.setEdgeType(columnValues.get(EDGE_TYPE)); - } - if (columnValues.containsKey(EDGE_RELATIONSHIP)) { - edge.setEdgeRelationship(columnValues.get(EDGE_RELATIONSHIP)); - } - if (columnValues.containsKey(EDGE_ATTRIBUTE1)) { - edge.setEdgeAttribute1Source(columnValues.get(EDGE_ATTRIBUTE1)); - } - if (columnValues.containsKey(STATS_EDGE)) { - edge.setStatsType(columnValues.get(STATS_EDGE)); - } - if (columnValues.containsKey(EDGE_ATTRIBUTE2)) { - edge.setEdgeAttribute2(columnValues.get(EDGE_ATTRIBUTE2)); - } - if (columnValues.containsKey(DATE)) { - edge.setDate(columnValues.get(DATE)); - } - if (columnValues.containsKey(COUNT)) { - if (!columnValues.get(COUNT).isEmpty()) { - edge.setCount(Long.valueOf(columnValues.get(COUNT))); - } - } - if (columnValues.containsKey(EDGE_ATTRIBUTE3)) { - edge.setEdgeAttribute3(columnValues.get(EDGE_ATTRIBUTE3)); - } - if (columnValues.containsKey(COUNTS)) { - String countStr = columnValues.get(COUNTS); - String[] countSplit = StringUtils.split(countStr, '\0'); - List countListAsLongs = new ArrayList<>(); - for (String s : countSplit) { - countListAsLongs.add(Long.valueOf(s)); - } - edge.setCounts(countListAsLongs); - } - if (columnValues.containsKey(LOAD_DATE)) { - edge.setLoadDate(columnValues.get(LOAD_DATE)); + if (columnValues.containsKey(fields.getSourceFieldName())) { + edge.setSource(columnValues.get(fields.getSourceFieldName())); + } + if (columnValues.containsKey(fields.getSinkFieldName())) { + edge.setSink(columnValues.get(fields.getSinkFieldName())); + } + if (columnValues.containsKey(fields.getTypeFieldName())) { + edge.setEdgeType(columnValues.get(fields.getTypeFieldName())); + } + if (columnValues.containsKey(fields.getRelationshipFieldName())) { + edge.setEdgeRelationship(columnValues.get(fields.getRelationshipFieldName())); + } + if (columnValues.containsKey(fields.getAttribute1FieldName())) { + edge.setEdgeAttribute1Source(columnValues.get(fields.getAttribute1FieldName())); + } + if (columnValues.containsKey(fields.getStatsEdgeFieldName())) { + edge.setStatsType(columnValues.get(fields.getStatsEdgeFieldName())); + } + if (columnValues.containsKey(fields.getAttribute2FieldName())) { + edge.setEdgeAttribute2(columnValues.get(fields.getAttribute2FieldName())); + } + if (columnValues.containsKey(fields.getDateFieldName())) { + edge.setDate(columnValues.get(fields.getDateFieldName())); + } + if (columnValues.containsKey(fields.getCountFieldName())) { + if (!columnValues.get(fields.getCountFieldName()).isEmpty()) { + edge.setCount(Long.valueOf(columnValues.get(fields.getCountFieldName()))); } - if (columnValues.containsKey(ACTIVITY_DATE)) { - edge.setActivityDate(columnValues.get(ACTIVITY_DATE)); + } + if (columnValues.containsKey(fields.getAttribute3FieldName())) { + edge.setEdgeAttribute3(columnValues.get(fields.getAttribute3FieldName())); + } + if (columnValues.containsKey(fields.getCountsFieldName())) { + String countStr = columnValues.get(fields.getCountsFieldName()); + String[] countSplit = StringUtils.split(countStr, '\0'); + List countListAsLongs = new ArrayList<>(); + for (String s : countSplit) { + countListAsLongs.add(Long.valueOf(s)); } - edgeList.add(edge); + edge.setCounts(countListAsLongs); } - - return edgeList; + if (columnValues.containsKey(fields.getLoadDateFieldName())) { + edge.setLoadDate(columnValues.get(fields.getLoadDateFieldName())); + } + if (columnValues.containsKey(fields.getActivityDateFieldName())) { + edge.setActivityDate(columnValues.get(fields.getActivityDateFieldName())); + } + return edge; } public String generateEventId(EdgeBase edge) { diff --git a/warehouse/query-core/src/main/java/datawave/query/transformer/EventQueryDataDecorator.java b/warehouse/query-core/src/main/java/datawave/query/transformer/EventQueryDataDecorator.java index 0601ad4094f..640dc36b34c 100644 --- a/warehouse/query-core/src/main/java/datawave/query/transformer/EventQueryDataDecorator.java +++ b/warehouse/query-core/src/main/java/datawave/query/transformer/EventQueryDataDecorator.java @@ -18,7 +18,7 @@ public class EventQueryDataDecorator { private Logger log = Logger.getLogger(EventQueryDataDecorator.class); private ResponseObjectFactory responseObjectFactory; - private EventQueryDataDecorator() {} + public EventQueryDataDecorator() {} public void decorateData(Multimap data) { // Get the values for the FieldName to put the decorated data into diff --git a/warehouse/query-core/src/main/java/datawave/query/transformer/EventQueryTransformer.java b/warehouse/query-core/src/main/java/datawave/query/transformer/EventQueryTransformer.java index 0463f36eef6..71f5f15373d 100644 --- a/warehouse/query-core/src/main/java/datawave/query/transformer/EventQueryTransformer.java +++ b/warehouse/query-core/src/main/java/datawave/query/transformer/EventQueryTransformer.java @@ -15,15 +15,15 @@ import com.esotericsoftware.kryo.io.Input; +import datawave.core.query.cachedresults.CacheableLogic; +import datawave.core.query.logic.BaseQueryLogic; import datawave.marking.MarkingFunctions; import datawave.marking.MarkingFunctions.Exception; +import datawave.microservice.query.Query; import datawave.query.Constants; import datawave.query.parser.EventFields; import datawave.query.parser.EventFields.FieldValue; import datawave.util.StringUtils; -import datawave.webservice.query.Query; -import datawave.webservice.query.cachedresults.CacheableLogic; -import datawave.webservice.query.logic.BaseQueryLogic; import datawave.webservice.query.result.event.EventBase; import datawave.webservice.query.result.event.FieldBase; import datawave.webservice.query.result.event.Metadata; diff --git a/warehouse/query-core/src/main/java/datawave/query/transformer/EventQueryTransformerInterface.java b/warehouse/query-core/src/main/java/datawave/query/transformer/EventQueryTransformerInterface.java index 9653a6b7e8a..43c0fd685ab 100644 --- a/warehouse/query-core/src/main/java/datawave/query/transformer/EventQueryTransformerInterface.java +++ b/warehouse/query-core/src/main/java/datawave/query/transformer/EventQueryTransformerInterface.java @@ -4,12 +4,12 @@ import org.apache.commons.collections4.Transformer; +import datawave.core.query.logic.QueryLogic; +import datawave.core.query.logic.QueryLogicTransformer; +import datawave.microservice.query.Query; import datawave.query.model.QueryModel; -import datawave.webservice.query.Query; import datawave.webservice.query.cachedresults.CacheableQueryRow; import datawave.webservice.query.exception.QueryException; -import datawave.webservice.query.logic.QueryLogic; -import datawave.webservice.query.logic.QueryLogicTransformer; public interface EventQueryTransformerInterface extends QueryLogicTransformer { diff --git a/warehouse/query-core/src/main/java/datawave/query/transformer/EventQueryTransformerSupport.java b/warehouse/query-core/src/main/java/datawave/query/transformer/EventQueryTransformerSupport.java index 3ea6f935738..edf372e7021 100644 --- a/warehouse/query-core/src/main/java/datawave/query/transformer/EventQueryTransformerSupport.java +++ b/warehouse/query-core/src/main/java/datawave/query/transformer/EventQueryTransformerSupport.java @@ -18,16 +18,16 @@ import com.esotericsoftware.kryo.Kryo; import com.google.common.collect.Lists; +import datawave.core.query.cachedresults.CacheableLogic; +import datawave.core.query.logic.BaseQueryLogic; +import datawave.core.query.logic.BaseQueryLogicTransformer; import datawave.marking.MarkingFunctions; +import datawave.microservice.query.Query; +import datawave.microservice.query.QueryImpl.Parameter; import datawave.query.model.QueryModel; import datawave.query.parser.EventFields; -import datawave.webservice.query.Query; -import datawave.webservice.query.QueryImpl.Parameter; -import datawave.webservice.query.cachedresults.CacheableLogic; import datawave.webservice.query.cachedresults.CacheableQueryRow; import datawave.webservice.query.exception.QueryException; -import datawave.webservice.query.logic.BaseQueryLogic; -import datawave.webservice.query.logic.BaseQueryLogicTransformer; import datawave.webservice.query.result.event.EventBase; import datawave.webservice.query.result.event.FieldBase; import datawave.webservice.query.result.event.Metadata; @@ -81,12 +81,11 @@ protected Authorizations getAuths() { } @Override - public List writeToCache(Object o) throws QueryException { - - List cqoList = new ArrayList<>(); + public CacheableQueryRow writeToCache(Object o) throws QueryException { EventBase event = (EventBase) o; CacheableQueryRow cqo = this.responseObjectFactory.getCacheableQueryRow(); + cqo.setMarkingFunctions(this.markingFunctions); Metadata metadata = event.getMetadata(); cqo.setColFam(metadata.getDataType() + ":" + cqo.getEventId()); cqo.setDataType(metadata.getDataType()); @@ -97,47 +96,39 @@ public List writeToCache(Object o) throws QueryException { for (FieldBase f : fields) { cqo.addColumn(f.getName(), f.getTypedValue(), f.getMarkings(), f.getColumnVisibility(), f.getTimestamp()); } - cqoList.add(cqo); - return cqoList; + return cqo; } @Override - public List readFromCache(List cacheableQueryRowList) { - - List eventList = new ArrayList<>(); - - for (CacheableQueryRow cqr : cacheableQueryRowList) { - Map markings = cqr.getMarkings(); - String dataType = cqr.getDataType(); - String internalId = cqr.getEventId(); - String row = cqr.getRow(); - - EventBase event = this.responseObjectFactory.getEvent(); - event.setMarkings(markings); - - Metadata metadata = new Metadata(); - metadata.setDataType(dataType); - metadata.setInternalId(internalId); - metadata.setRow(row); - metadata.setTable(logic.getTableName()); - event.setMetadata(metadata); - - List> fieldList = new ArrayList<>(); - Map columnValueMap = cqr.getColumnValues(); - for (Entry entry : columnValueMap.entrySet()) { - String columnName = entry.getKey(); - String columnValue = entry.getValue(); - Map columnMarkings = cqr.getColumnMarkings(columnName); - String columnVisibility = cqr.getColumnVisibility(columnName); - Long columnTimestamp = cqr.getColumnTimestamp(columnName); - FieldBase field = this.makeField(columnName, columnMarkings, columnVisibility, columnTimestamp, columnValue); - fieldList.add(field); - } - event.setFields(fieldList); - eventList.add(event); + public Object readFromCache(CacheableQueryRow cacheableQueryRow) { + Map markings = cacheableQueryRow.getMarkings(); + String dataType = cacheableQueryRow.getDataType(); + String internalId = cacheableQueryRow.getEventId(); + String row = cacheableQueryRow.getRow(); + + EventBase event = this.responseObjectFactory.getEvent(); + event.setMarkings(markings); + + Metadata metadata = new Metadata(); + metadata.setDataType(dataType); + metadata.setInternalId(internalId); + metadata.setRow(row); + metadata.setTable(logic.getTableName()); + event.setMetadata(metadata); + + List> fieldList = new ArrayList<>(); + Map columnValueMap = cacheableQueryRow.getColumnValues(); + for (Entry entry : columnValueMap.entrySet()) { + String columnName = entry.getKey(); + String columnValue = entry.getValue(); + Map columnMarkings = cacheableQueryRow.getColumnMarkings(columnName); + String columnVisibility = cacheableQueryRow.getColumnVisibility(columnName); + Long columnTimestamp = cacheableQueryRow.getColumnTimestamp(columnName); + FieldBase field = this.makeField(columnName, columnMarkings, columnVisibility, columnTimestamp, columnValue); + fieldList.add(field); } - - return eventList; + event.setFields(fieldList); + return event; } @Override diff --git a/warehouse/query-core/src/main/java/datawave/query/transformer/FacetedTransformer.java b/warehouse/query-core/src/main/java/datawave/query/transformer/FacetedTransformer.java index 572c337a1e5..aa1832daa7c 100644 --- a/warehouse/query-core/src/main/java/datawave/query/transformer/FacetedTransformer.java +++ b/warehouse/query-core/src/main/java/datawave/query/transformer/FacetedTransformer.java @@ -16,16 +16,16 @@ import com.google.common.base.Preconditions; +import datawave.core.query.exception.EmptyObjectException; +import datawave.core.query.logic.BaseQueryLogic; import datawave.data.type.StringType; import datawave.marking.MarkingFunctions; +import datawave.microservice.query.Query; import datawave.query.attributes.Attribute; import datawave.query.attributes.Attributes; import datawave.query.attributes.Cardinality; import datawave.query.attributes.Document; import datawave.query.attributes.FieldValueCardinality; -import datawave.webservice.query.Query; -import datawave.webservice.query.exception.EmptyObjectException; -import datawave.webservice.query.logic.BaseQueryLogic; import datawave.webservice.query.result.event.FacetsBase; import datawave.webservice.query.result.event.FieldCardinalityBase; import datawave.webservice.query.result.event.ResponseObjectFactory; diff --git a/warehouse/query-core/src/main/java/datawave/query/transformer/FieldIndexCountQueryTransformer.java b/warehouse/query-core/src/main/java/datawave/query/transformer/FieldIndexCountQueryTransformer.java index 92f43fe2db2..f570fa4fc15 100644 --- a/warehouse/query-core/src/main/java/datawave/query/transformer/FieldIndexCountQueryTransformer.java +++ b/warehouse/query-core/src/main/java/datawave/query/transformer/FieldIndexCountQueryTransformer.java @@ -11,17 +11,17 @@ import org.apache.hadoop.io.Text; import org.apache.log4j.Logger; +import datawave.core.query.cachedresults.CacheableLogic; +import datawave.core.query.logic.BaseQueryLogic; +import datawave.core.query.logic.BaseQueryLogicTransformer; import datawave.data.hash.UID; import datawave.marking.MarkingFunctions; +import datawave.microservice.query.Query; import datawave.query.Constants; import datawave.query.tables.shard.FieldIndexCountQueryLogic.Tuple; import datawave.util.TextUtil; -import datawave.webservice.query.Query; -import datawave.webservice.query.cachedresults.CacheableLogic; import datawave.webservice.query.cachedresults.CacheableQueryRow; import datawave.webservice.query.exception.QueryException; -import datawave.webservice.query.logic.BaseQueryLogic; -import datawave.webservice.query.logic.BaseQueryLogicTransformer; import datawave.webservice.query.result.event.EventBase; import datawave.webservice.query.result.event.FieldBase; import datawave.webservice.query.result.event.Metadata; @@ -149,12 +149,11 @@ public BaseQueryResponse createResponse(List resultList) { } @Override - public List writeToCache(Object o) throws QueryException { - - List cqoList = new ArrayList<>(); + public CacheableQueryRow writeToCache(Object o) throws QueryException { EventBase event = (EventBase) o; CacheableQueryRow cqo = responseObjectFactory.getCacheableQueryRow(); + cqo.setMarkingFunctions(this.markingFunctions); Metadata metadata = event.getMetadata(); cqo.setColFam(metadata.getDataType() + ":" + cqo.getEventId()); cqo.setDataType(metadata.getDataType()); @@ -169,9 +168,7 @@ public List writeToCache(Object o) throws QueryException { // set the size in bytes using the initial event size as an approximation cqo.setSizeInBytes(event.getSizeInBytes()); - cqoList.add(cqo); - - return cqoList; + return cqo; } private FieldBase makeField(String name, Map markings, String columnVisibility, Long timestamp, Object value) { @@ -185,44 +182,37 @@ private FieldBase makeField(String name, Map markings, String col } @Override - public List readFromCache(List cacheableQueryRowList) { - - List eventList = new ArrayList<>(); - - for (CacheableQueryRow cqr : cacheableQueryRowList) { - if (this.variableFieldList == null) { - this.variableFieldList = cqr.getVariableColumnNames(); - } - Map markings = cqr.getMarkings(); - String dataType = cqr.getDataType(); - String internalId = cqr.getEventId(); - String row = cqr.getRow(); - - EventBase event = this.responseObjectFactory.getEvent(); - event.setMarkings(markings); - - Metadata metadata = new Metadata(); - metadata.setDataType(dataType); - metadata.setInternalId(internalId); - metadata.setRow(row); - metadata.setTable(logic.getTableName()); - event.setMetadata(metadata); - - List fieldList = new ArrayList<>(); - Map columnValueMap = cqr.getColumnValues(); - for (Map.Entry entry : columnValueMap.entrySet()) { - String columnName = entry.getKey(); - String columnValue = entry.getValue(); - Map columnMarkings = cqr.getColumnMarkings(columnName); - String columnVisibility = cqr.getColumnVisibility(columnName); - Long columnTimestamp = cqr.getColumnTimestamp(columnName); - FieldBase field = this.makeField(columnName, columnMarkings, columnVisibility, columnTimestamp, columnValue); - fieldList.add(field); - } - event.setFields(fieldList); - eventList.add(event); + public Object readFromCache(CacheableQueryRow cacheableQueryRow) { + if (this.variableFieldList == null) { + this.variableFieldList = cacheableQueryRow.getVariableColumnNames(); } + Map markings = cacheableQueryRow.getMarkings(); + String dataType = cacheableQueryRow.getDataType(); + String internalId = cacheableQueryRow.getEventId(); + String row = cacheableQueryRow.getRow(); + + EventBase event = this.responseObjectFactory.getEvent(); + event.setMarkings(markings); - return eventList; + Metadata metadata = new Metadata(); + metadata.setDataType(dataType); + metadata.setInternalId(internalId); + metadata.setRow(row); + metadata.setTable(logic.getTableName()); + event.setMetadata(metadata); + + List fieldList = new ArrayList<>(); + Map columnValueMap = cacheableQueryRow.getColumnValues(); + for (Map.Entry entry : columnValueMap.entrySet()) { + String columnName = entry.getKey(); + String columnValue = entry.getValue(); + Map columnMarkings = cacheableQueryRow.getColumnMarkings(columnName); + String columnVisibility = cacheableQueryRow.getColumnVisibility(columnName); + Long columnTimestamp = cacheableQueryRow.getColumnTimestamp(columnName); + FieldBase field = this.makeField(columnName, columnMarkings, columnVisibility, columnTimestamp, columnValue); + fieldList.add(field); + } + event.setFields(fieldList); + return event; } } diff --git a/warehouse/query-core/src/main/java/datawave/query/transformer/GroupingDocumentTransformer.java b/warehouse/query-core/src/main/java/datawave/query/transformer/GroupingDocumentTransformer.java index df97c860154..9c764087cdf 100644 --- a/warehouse/query-core/src/main/java/datawave/query/transformer/GroupingDocumentTransformer.java +++ b/warehouse/query-core/src/main/java/datawave/query/transformer/GroupingDocumentTransformer.java @@ -21,11 +21,11 @@ import com.google.common.collect.Sets; import com.google.common.collect.TreeMultimap; +import datawave.core.query.logic.BaseQueryLogic; import datawave.marking.MarkingFunctions; +import datawave.microservice.query.Query; import datawave.query.model.QueryModel; import datawave.query.tables.ShardQueryLogic; -import datawave.webservice.query.Query; -import datawave.webservice.query.logic.BaseQueryLogic; import datawave.webservice.query.result.event.EventBase; import datawave.webservice.query.result.event.FieldBase; import datawave.webservice.query.result.event.ResponseObjectFactory; diff --git a/warehouse/query-core/src/main/java/datawave/query/transformer/ParentDocumentTransformer.java b/warehouse/query-core/src/main/java/datawave/query/transformer/ParentDocumentTransformer.java index 3238e7f050b..9b96d420a25 100644 --- a/warehouse/query-core/src/main/java/datawave/query/transformer/ParentDocumentTransformer.java +++ b/warehouse/query-core/src/main/java/datawave/query/transformer/ParentDocumentTransformer.java @@ -7,10 +7,10 @@ import org.apache.accumulo.core.data.Key; import org.apache.accumulo.core.data.Value; +import datawave.core.query.logic.BaseQueryLogic; import datawave.marking.MarkingFunctions; +import datawave.microservice.query.Query; import datawave.query.tld.TLD; -import datawave.webservice.query.Query; -import datawave.webservice.query.logic.BaseQueryLogic; import datawave.webservice.query.result.event.EventBase; import datawave.webservice.query.result.event.Metadata; import datawave.webservice.query.result.event.ResponseObjectFactory; diff --git a/warehouse/query-core/src/main/java/datawave/query/transformer/ShardIndexQueryTransformer.java b/warehouse/query-core/src/main/java/datawave/query/transformer/ShardIndexQueryTransformer.java index 0c5f5150a4e..8a1d6f8a60e 100644 --- a/warehouse/query-core/src/main/java/datawave/query/transformer/ShardIndexQueryTransformer.java +++ b/warehouse/query-core/src/main/java/datawave/query/transformer/ShardIndexQueryTransformer.java @@ -14,16 +14,16 @@ import com.google.protobuf.InvalidProtocolBufferException; +import datawave.core.query.cachedresults.CacheableLogic; +import datawave.core.query.logic.BaseQueryLogic; +import datawave.core.query.logic.BaseQueryLogicTransformer; import datawave.ingest.protobuf.Uid; import datawave.marking.MarkingFunctions; import datawave.marking.MarkingFunctions.Exception; +import datawave.microservice.query.Query; import datawave.query.model.QueryModel; -import datawave.webservice.query.Query; -import datawave.webservice.query.cachedresults.CacheableLogic; import datawave.webservice.query.cachedresults.CacheableQueryRow; import datawave.webservice.query.exception.QueryException; -import datawave.webservice.query.logic.BaseQueryLogic; -import datawave.webservice.query.logic.BaseQueryLogicTransformer; import datawave.webservice.query.result.event.EventBase; import datawave.webservice.query.result.event.FieldBase; import datawave.webservice.query.result.event.Metadata; @@ -139,12 +139,11 @@ public BaseQueryResponse createResponse(List resultList) { } @Override - public List writeToCache(Object o) throws QueryException { - - List cqoList = new ArrayList<>(); + public CacheableQueryRow writeToCache(Object o) throws QueryException { EventBase event = (EventBase) o; CacheableQueryRow cqo = responseObjectFactory.getCacheableQueryRow(); + cqo.setMarkingFunctions(this.markingFunctions); Metadata metadata = event.getMetadata(); cqo.setColFam(metadata.getDataType() + ":" + cqo.getEventId()); cqo.setDataType(metadata.getDataType()); @@ -159,55 +158,48 @@ public List writeToCache(Object o) throws QueryException { // set the size in bytes using the initial event size as an approximation cqo.setSizeInBytes(event.getSizeInBytes()); - cqoList.add(cqo); - return cqoList; + return cqo; } @Override - public List readFromCache(List cacheableQueryRowList) { + public Object readFromCache(CacheableQueryRow cacheableQueryRow) { + if (this.variableFieldList == null) { + this.variableFieldList = cacheableQueryRow.getVariableColumnNames(); + } + Map markings = cacheableQueryRow.getMarkings(); + String dataType = cacheableQueryRow.getDataType(); + String internalId = cacheableQueryRow.getEventId(); + String row = cacheableQueryRow.getRow(); - List eventList = new ArrayList<>(); + EventBase event = responseObjectFactory.getEvent(); - for (CacheableQueryRow cqr : cacheableQueryRowList) { - if (this.variableFieldList == null) { - this.variableFieldList = cqr.getVariableColumnNames(); - } - Map markings = cqr.getMarkings(); - String dataType = cqr.getDataType(); - String internalId = cqr.getEventId(); - String row = cqr.getRow(); - - EventBase event = responseObjectFactory.getEvent(); - - event.setMarkings(markings); - - Metadata metadata = new Metadata(); - metadata.setDataType(dataType); - metadata.setInternalId(internalId); - metadata.setRow(row); - metadata.setTable(logic.getTableName()); - event.setMetadata(metadata); - - List fieldList = new ArrayList<>(); - Map columnValueMap = cqr.getColumnValues(); - for (Map.Entry entry : columnValueMap.entrySet()) { - String columnName = entry.getKey(); - String columnValue = entry.getValue(); - String columnVisibility = cqr.getColumnVisibility(columnName); - Long columnTimestamp = cqr.getColumnTimestamp(columnName); - Map columnMarkings = cqr.getColumnMarkings(columnName); - FieldBase field = responseObjectFactory.getField(); - field.setName(columnName); - field.setMarkings(columnMarkings); - field.setColumnVisibility(columnVisibility); - field.setTimestamp(columnTimestamp); - field.setValue(columnValue); - fieldList.add(field); - } - event.setFields(fieldList); - eventList.add(event); + event.setMarkings(markings); + + Metadata metadata = new Metadata(); + metadata.setDataType(dataType); + metadata.setInternalId(internalId); + metadata.setRow(row); + metadata.setTable(logic.getTableName()); + event.setMetadata(metadata); + + List fieldList = new ArrayList<>(); + Map columnValueMap = cacheableQueryRow.getColumnValues(); + for (Map.Entry entry : columnValueMap.entrySet()) { + String columnName = entry.getKey(); + String columnValue = entry.getValue(); + String columnVisibility = cacheableQueryRow.getColumnVisibility(columnName); + Long columnTimestamp = cacheableQueryRow.getColumnTimestamp(columnName); + Map columnMarkings = cacheableQueryRow.getColumnMarkings(columnName); + FieldBase field = responseObjectFactory.getField(); + field.setName(columnName); + field.setMarkings(columnMarkings); + field.setColumnVisibility(columnVisibility); + field.setTimestamp(columnTimestamp); + field.setValue(columnValue); + fieldList.add(field); } + event.setFields(fieldList); - return eventList; + return event; } } diff --git a/warehouse/query-core/src/main/java/datawave/query/transformer/ShardQueryCountTableTransformer.java b/warehouse/query-core/src/main/java/datawave/query/transformer/ShardQueryCountTableTransformer.java index cbcced0871e..d2ba9c4b84c 100644 --- a/warehouse/query-core/src/main/java/datawave/query/transformer/ShardQueryCountTableTransformer.java +++ b/warehouse/query-core/src/main/java/datawave/query/transformer/ShardQueryCountTableTransformer.java @@ -9,14 +9,14 @@ import org.apache.accumulo.core.security.ColumnVisibility; import org.apache.log4j.Logger; +import datawave.core.query.cachedresults.CacheableLogic; +import datawave.core.query.logic.BaseQueryLogicTransformer; import datawave.marking.MarkingFunctions; import datawave.marking.MarkingFunctions.Exception; +import datawave.microservice.query.Query; import datawave.query.Constants; -import datawave.webservice.query.Query; -import datawave.webservice.query.cachedresults.CacheableLogic; import datawave.webservice.query.cachedresults.CacheableQueryRow; import datawave.webservice.query.exception.QueryException; -import datawave.webservice.query.logic.BaseQueryLogicTransformer; import datawave.webservice.query.result.event.EventBase; import datawave.webservice.query.result.event.FieldBase; import datawave.webservice.query.result.event.Metadata; @@ -97,12 +97,12 @@ public BaseQueryResponse createResponse(List resultList) { } @Override - public List writeToCache(Object o) throws QueryException { + public CacheableQueryRow writeToCache(Object o) throws QueryException { - List cqoList = new ArrayList<>(); EventBase event = (EventBase) o; CacheableQueryRow cqo = responseObjectFactory.getCacheableQueryRow(); + cqo.setMarkingFunctions(this.markingFunctions); Metadata metadata = event.getMetadata(); cqo.setColFam(metadata.getDataType() + ":" + cqo.getEventId()); cqo.setDataType(metadata.getDataType()); @@ -117,54 +117,46 @@ public List writeToCache(Object o) throws QueryException { // set the size in bytes using the initial event size as an approximation cqo.setSizeInBytes(event.getSizeInBytes()); - cqoList.add(cqo); - return cqoList; + return cqo; } @Override - public List readFromCache(List cacheableQueryRowList) { - - List eventList = new ArrayList<>(); - - for (CacheableQueryRow cqr : cacheableQueryRowList) { - if (this.variableFieldList == null) { - this.variableFieldList = cqr.getVariableColumnNames(); - } - Map markings = cqr.getMarkings(); - String dataType = cqr.getDataType(); - String internalId = cqr.getEventId(); - String row = cqr.getRow(); - - EventBase event = responseObjectFactory.getEvent(); - - event.setMarkings(markings); - - Metadata metadata = new Metadata(); - metadata.setDataType(dataType); - metadata.setInternalId(internalId); - metadata.setRow(row); - event.setMetadata(metadata); - - List fieldList = new ArrayList<>(); - Map columnValueMap = cqr.getColumnValues(); - for (Map.Entry entry : columnValueMap.entrySet()) { - String columnName = entry.getKey(); - String columnValue = entry.getValue(); - String columnVisibility = cqr.getColumnVisibility(columnName); - Long columnTimestamp = cqr.getColumnTimestamp(columnName); - Map columnMarkings = cqr.getColumnMarkings(columnName); - FieldBase field = responseObjectFactory.getField(); - field.setName(columnName); - field.setMarkings(columnMarkings); - field.setColumnVisibility(columnVisibility); - field.setTimestamp(columnTimestamp); - field.setValue(columnValue); - fieldList.add(field); - } - event.setFields(fieldList); - eventList.add(event); + public Object readFromCache(CacheableQueryRow cacheableQueryRow) { + if (this.variableFieldList == null) { + this.variableFieldList = cacheableQueryRow.getVariableColumnNames(); } + Map markings = cacheableQueryRow.getMarkings(); + String dataType = cacheableQueryRow.getDataType(); + String internalId = cacheableQueryRow.getEventId(); + String row = cacheableQueryRow.getRow(); - return eventList; + EventBase event = responseObjectFactory.getEvent(); + + event.setMarkings(markings); + + Metadata metadata = new Metadata(); + metadata.setDataType(dataType); + metadata.setInternalId(internalId); + metadata.setRow(row); + event.setMetadata(metadata); + + List fieldList = new ArrayList<>(); + Map columnValueMap = cacheableQueryRow.getColumnValues(); + for (Map.Entry entry : columnValueMap.entrySet()) { + String columnName = entry.getKey(); + String columnValue = entry.getValue(); + String columnVisibility = cacheableQueryRow.getColumnVisibility(columnName); + Long columnTimestamp = cacheableQueryRow.getColumnTimestamp(columnName); + Map columnMarkings = cacheableQueryRow.getColumnMarkings(columnName); + FieldBase field = responseObjectFactory.getField(); + field.setName(columnName); + field.setMarkings(columnMarkings); + field.setColumnVisibility(columnVisibility); + field.setTimestamp(columnTimestamp); + field.setValue(columnValue); + fieldList.add(field); + } + event.setFields(fieldList); + return event; } } diff --git a/warehouse/query-core/src/main/java/datawave/query/transformer/TermFrequencyQueryTransformer.java b/warehouse/query-core/src/main/java/datawave/query/transformer/TermFrequencyQueryTransformer.java index 2d71b29c64b..6bdb53140f7 100644 --- a/warehouse/query-core/src/main/java/datawave/query/transformer/TermFrequencyQueryTransformer.java +++ b/warehouse/query-core/src/main/java/datawave/query/transformer/TermFrequencyQueryTransformer.java @@ -10,12 +10,12 @@ import org.apache.commons.lang3.StringUtils; import org.apache.curator.shaded.com.google.common.collect.ImmutableList; +import datawave.core.query.exception.EmptyObjectException; +import datawave.core.query.logic.BaseQueryLogicTransformer; import datawave.marking.MarkingFunctions; +import datawave.microservice.query.Query; import datawave.query.table.parser.TermFrequencyKeyValueFactory; import datawave.query.table.parser.TermFrequencyKeyValueFactory.TermFrequencyKeyValue; -import datawave.webservice.query.Query; -import datawave.webservice.query.exception.EmptyObjectException; -import datawave.webservice.query.logic.BaseQueryLogicTransformer; import datawave.webservice.query.result.event.EventBase; import datawave.webservice.query.result.event.FieldBase; import datawave.webservice.query.result.event.Metadata; diff --git a/warehouse/query-core/src/main/java/datawave/query/util/GeoUtils.java b/warehouse/query-core/src/main/java/datawave/query/util/GeoUtils.java index d4d677da1a3..6c79376cbfd 100644 --- a/warehouse/query-core/src/main/java/datawave/query/util/GeoUtils.java +++ b/warehouse/query-core/src/main/java/datawave/query/util/GeoUtils.java @@ -18,7 +18,7 @@ import org.locationtech.jts.geom.GeometryFactory; import org.locationtech.jts.geom.PrecisionModel; -import datawave.webservice.common.logging.ThreadConfigurableLogger; +import datawave.core.common.logging.ThreadConfigurableLogger; /** * This utility class contains a variety of methods which can be used to perform operations on Geo ranges. @@ -170,7 +170,6 @@ public static List generateOptimizedPositionRanges(Geometry geometry, Li } return diff; }); - // these ranges are sorted by the minimum bound of the range so that we can // quickly merge contiguous segments into discrete ranges TreeSet positionSortedRanges = new TreeSet<>(Comparator.comparingDouble(o -> o.range[0])); diff --git a/warehouse/query-core/src/main/java/datawave/query/util/MetadataHelperCacheManagementListener.java b/warehouse/query-core/src/main/java/datawave/query/util/MetadataHelperCacheManagementListener.java index 966b4f9712b..21e1e489c1d 100644 --- a/warehouse/query-core/src/main/java/datawave/query/util/MetadataHelperCacheManagementListener.java +++ b/warehouse/query-core/src/main/java/datawave/query/util/MetadataHelperCacheManagementListener.java @@ -8,7 +8,7 @@ import org.apache.curator.framework.state.ConnectionState; import org.apache.log4j.Logger; -import datawave.webservice.common.cache.SharedCacheCoordinator; +import datawave.core.common.cache.SharedCacheCoordinator; /** * Uses the SharedCacheCoordinator to register listeners so that when an event is fired (for example, when a new model is loaded) the spring injected cache of diff --git a/warehouse/query-core/src/main/java/datawave/query/util/MetadataHelperUpdateHdfsListener.java b/warehouse/query-core/src/main/java/datawave/query/util/MetadataHelperUpdateHdfsListener.java index c170d1f89be..67ed2b5ca9c 100644 --- a/warehouse/query-core/src/main/java/datawave/query/util/MetadataHelperUpdateHdfsListener.java +++ b/warehouse/query-core/src/main/java/datawave/query/util/MetadataHelperUpdateHdfsListener.java @@ -5,20 +5,17 @@ import org.apache.accumulo.core.client.Accumulo; import org.apache.accumulo.core.client.AccumuloClient; -import org.apache.accumulo.core.client.ClientConfiguration; -import org.apache.accumulo.core.client.ZooKeeperInstance; -import org.apache.accumulo.core.client.security.tokens.PasswordToken; import org.apache.accumulo.core.security.Authorizations; import org.apache.curator.framework.CuratorFramework; import org.apache.curator.framework.recipes.locks.InterProcessMutex; import org.apache.curator.framework.state.ConnectionState; import org.apache.log4j.Logger; -import datawave.webservice.common.cache.SharedCacheCoordinator; -import datawave.webservice.common.cache.SharedTriState; -import datawave.webservice.common.cache.SharedTriStateListener; -import datawave.webservice.common.cache.SharedTriStateReader; -import datawave.webservice.util.EnvProvider; +import datawave.core.common.cache.SharedCacheCoordinator; +import datawave.core.common.cache.SharedTriState; +import datawave.core.common.cache.SharedTriStateListener; +import datawave.core.common.cache.SharedTriStateReader; +import datawave.core.common.util.EnvProvider; /** * Uses the SharedCacheCoordinator to register listeners so that when an event is fired (for example, when a new model is loaded) the TypeMetadata map will be diff --git a/warehouse/query-core/src/main/java/datawave/query/util/QueryInformation.java b/warehouse/query-core/src/main/java/datawave/query/util/QueryInformation.java index e7117359f11..1d35c70ff4d 100644 --- a/warehouse/query-core/src/main/java/datawave/query/util/QueryInformation.java +++ b/warehouse/query-core/src/main/java/datawave/query/util/QueryInformation.java @@ -7,7 +7,7 @@ import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableMap; -import datawave.webservice.query.Query; +import datawave.microservice.query.Query; /** * diff --git a/warehouse/query-core/src/main/java/datawave/query/util/QueryScannerHelper.java b/warehouse/query-core/src/main/java/datawave/query/util/QueryScannerHelper.java index 41e337f70b1..75cdcb8dbd1 100644 --- a/warehouse/query-core/src/main/java/datawave/query/util/QueryScannerHelper.java +++ b/warehouse/query-core/src/main/java/datawave/query/util/QueryScannerHelper.java @@ -9,9 +9,9 @@ import org.apache.accumulo.core.client.TableNotFoundException; import org.apache.accumulo.core.security.Authorizations; +import datawave.microservice.query.Query; import datawave.query.iterator.QueryInformationIterator; import datawave.security.util.ScannerHelper; -import datawave.webservice.query.Query; /** * diff --git a/warehouse/query-core/src/main/resources/DATAWAVE_EDGE.xml b/warehouse/query-core/src/main/resources/DATAWAVE_EDGE.xml index 9a19b0294c3..31d6cedfd7f 100644 --- a/warehouse/query-core/src/main/resources/DATAWAVE_EDGE.xml +++ b/warehouse/query-core/src/main/resources/DATAWAVE_EDGE.xml @@ -9,26 +9,26 @@ name representation as needed for the deployment environment's default query syntax. --> - - + + - - + + - - + + - - + + - - + + - - + + - - + + diff --git a/warehouse/query-core/src/test/java/datawave/audit/DatawaveSelectorExtractorTest.java b/warehouse/query-core/src/test/java/datawave/audit/DatawaveSelectorExtractorTest.java index 21161732f30..2723b2b8602 100644 --- a/warehouse/query-core/src/test/java/datawave/audit/DatawaveSelectorExtractorTest.java +++ b/warehouse/query-core/src/test/java/datawave/audit/DatawaveSelectorExtractorTest.java @@ -11,7 +11,7 @@ import com.google.common.collect.Lists; -import datawave.webservice.query.QueryImpl; +import datawave.microservice.query.QueryImpl; public class DatawaveSelectorExtractorTest { diff --git a/warehouse/query-core/src/test/java/datawave/audit/SplitSelectorExtractorTest.java b/warehouse/query-core/src/test/java/datawave/audit/SplitSelectorExtractorTest.java index eb587e3fe76..8b1c9166eb1 100644 --- a/warehouse/query-core/src/test/java/datawave/audit/SplitSelectorExtractorTest.java +++ b/warehouse/query-core/src/test/java/datawave/audit/SplitSelectorExtractorTest.java @@ -12,7 +12,7 @@ import com.google.common.collect.Lists; -import datawave.webservice.query.QueryImpl; +import datawave.microservice.query.QueryImpl; class SplitSelectorExtractorTest { diff --git a/web-services/query/src/test/java/datawave/webservice/query/iterator/KeyAggregatingTransformIteratorTest.java b/warehouse/query-core/src/test/java/datawave/core/query/iterator/KeyAggregatingTransformIteratorTest.java similarity index 99% rename from web-services/query/src/test/java/datawave/webservice/query/iterator/KeyAggregatingTransformIteratorTest.java rename to warehouse/query-core/src/test/java/datawave/core/query/iterator/KeyAggregatingTransformIteratorTest.java index 894aafc7809..04ace377a80 100644 --- a/web-services/query/src/test/java/datawave/webservice/query/iterator/KeyAggregatingTransformIteratorTest.java +++ b/warehouse/query-core/src/test/java/datawave/core/query/iterator/KeyAggregatingTransformIteratorTest.java @@ -1,4 +1,4 @@ -package datawave.webservice.query.iterator; +package datawave.core.query.iterator; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; diff --git a/warehouse/query-core/src/test/java/datawave/query/CheckpointableQueryTest.java b/warehouse/query-core/src/test/java/datawave/query/CheckpointableQueryTest.java new file mode 100644 index 00000000000..1eae2a84332 --- /dev/null +++ b/warehouse/query-core/src/test/java/datawave/query/CheckpointableQueryTest.java @@ -0,0 +1,17 @@ +package datawave.query; + +import org.apache.log4j.Logger; + +import datawave.query.tables.ShardQueryLogic; + +public class CheckpointableQueryTest extends AnyFieldQueryTest { + + private static final Logger log = Logger.getLogger(CheckpointableQueryTest.class); + + @Override + protected ShardQueryLogic createShardQueryLogic() { + ShardQueryLogic logic = super.createShardQueryLogic(); + logic.setCheckpointable(true); + return logic; + } +} diff --git a/warehouse/query-core/src/test/java/datawave/query/CompositeFunctionsTest.java b/warehouse/query-core/src/test/java/datawave/query/CompositeFunctionsTest.java index a79b9e9ada4..e5103f6a605 100644 --- a/warehouse/query-core/src/test/java/datawave/query/CompositeFunctionsTest.java +++ b/warehouse/query-core/src/test/java/datawave/query/CompositeFunctionsTest.java @@ -38,8 +38,10 @@ import org.junit.runner.RunWith; import datawave.configuration.spring.SpringBean; +import datawave.core.query.configuration.GenericQueryConfiguration; import datawave.helpers.PrintUtility; import datawave.ingest.data.TypeRegistry; +import datawave.microservice.query.QueryImpl; import datawave.query.attributes.Attribute; import datawave.query.attributes.Document; import datawave.query.attributes.PreNormalizedAttribute; @@ -52,8 +54,6 @@ import datawave.query.util.WiseGuysIngest; import datawave.util.TableName; import datawave.webservice.edgedictionary.RemoteEdgeDictionary; -import datawave.webservice.query.QueryImpl; -import datawave.webservice.query.configuration.GenericQueryConfiguration; /** * Tests the composite functions, the #JEXL lucene function, the matchesAtLeastCountOf function. and others @@ -165,9 +165,9 @@ public static JavaArchive createDeployment() { return ShrinkWrap.create(JavaArchive.class) .addPackages(true, "org.apache.deltaspike", "io.astefanutti.metrics.cdi", "datawave.query", "org.jboss.logging", - "datawave.webservice.query.result.event") + "datawave.webservice.query.result.event", "datawave.core.query.result.event") .deleteClass(DefaultEdgeEventQueryLogic.class).deleteClass(RemoteEdgeDictionary.class) - .deleteClass(datawave.query.metrics.QueryMetricQueryLogic.class).deleteClass(datawave.query.metrics.ShardTableQueryMetricHandler.class) + .deleteClass(datawave.query.metrics.QueryMetricQueryLogic.class) .addAsManifestResource(new StringAsset( "" + "datawave.query.tables.edge.MockAlternative" + ""), "beans.xml"); diff --git a/warehouse/query-core/src/test/java/datawave/query/DelayedIndexOnlyQueryTest.java b/warehouse/query-core/src/test/java/datawave/query/DelayedIndexOnlyQueryTest.java index eca7efa64db..52780845e5d 100644 --- a/warehouse/query-core/src/test/java/datawave/query/DelayedIndexOnlyQueryTest.java +++ b/warehouse/query-core/src/test/java/datawave/query/DelayedIndexOnlyQueryTest.java @@ -11,6 +11,7 @@ import org.junit.Test; import datawave.query.planner.DefaultQueryPlanner; +import datawave.query.tables.ShardQueryLogic; import datawave.query.testframework.AbstractFunctionalQuery; import datawave.query.testframework.AccumuloSetup; import datawave.query.testframework.CitiesDataType; @@ -60,7 +61,13 @@ public DelayedIndexOnlyQueryTest() { protected void testInit() { this.auths = CitiesDataType.getTestAuths(); this.documentKey = CitiesDataType.CityField.EVENT_ID.name(); - ((DefaultQueryPlanner) this.logic.getQueryPlanner()).setExecutableExpansion(false); + } + + @Override + protected ShardQueryLogic createShardQueryLogic() { + ShardQueryLogic logic = super.createShardQueryLogic(); + ((DefaultQueryPlanner) logic.getQueryPlanner()).setExecutableExpansion(false); + return logic; } @Test diff --git a/warehouse/query-core/src/test/java/datawave/query/ExcerptTest.java b/warehouse/query-core/src/test/java/datawave/query/ExcerptTest.java index 190b0efc4c2..70187ed975d 100644 --- a/warehouse/query-core/src/test/java/datawave/query/ExcerptTest.java +++ b/warehouse/query-core/src/test/java/datawave/query/ExcerptTest.java @@ -35,8 +35,10 @@ import com.google.common.collect.Sets; import datawave.configuration.spring.SpringBean; +import datawave.core.query.configuration.GenericQueryConfiguration; import datawave.helpers.PrintUtility; import datawave.ingest.data.TypeRegistry; +import datawave.microservice.query.QueryImpl; import datawave.query.attributes.Attribute; import datawave.query.attributes.Attributes; import datawave.query.attributes.Document; @@ -47,8 +49,6 @@ import datawave.query.util.WiseGuysIngest; import datawave.util.TableName; import datawave.webservice.edgedictionary.RemoteEdgeDictionary; -import datawave.webservice.query.QueryImpl; -import datawave.webservice.query.configuration.GenericQueryConfiguration; public abstract class ExcerptTest { @@ -120,7 +120,7 @@ public static JavaArchive createDeployment() throws Exception { .addPackages(true, "org.apache.deltaspike", "io.astefanutti.metrics.cdi", "datawave.query", "org.jboss.logging", "datawave.webservice.query.result.event") .deleteClass(DefaultEdgeEventQueryLogic.class).deleteClass(RemoteEdgeDictionary.class) - .deleteClass(datawave.query.metrics.QueryMetricQueryLogic.class).deleteClass(datawave.query.metrics.ShardTableQueryMetricHandler.class) + .deleteClass(datawave.query.metrics.QueryMetricQueryLogic.class) .addAsManifestResource(new StringAsset( "" + "datawave.query.tables.edge.MockAlternative" + ""), "beans.xml"); diff --git a/warehouse/query-core/src/test/java/datawave/query/FunctionalSetTest.java b/warehouse/query-core/src/test/java/datawave/query/FunctionalSetTest.java index e77da684bf6..825e2e5fafd 100644 --- a/warehouse/query-core/src/test/java/datawave/query/FunctionalSetTest.java +++ b/warehouse/query-core/src/test/java/datawave/query/FunctionalSetTest.java @@ -34,8 +34,10 @@ import org.junit.runner.RunWith; import datawave.configuration.spring.SpringBean; +import datawave.core.query.configuration.GenericQueryConfiguration; import datawave.helpers.PrintUtility; import datawave.ingest.data.TypeRegistry; +import datawave.microservice.query.QueryImpl; import datawave.query.attributes.Attribute; import datawave.query.attributes.Document; import datawave.query.attributes.PreNormalizedAttribute; @@ -46,8 +48,6 @@ import datawave.query.util.WiseGuysIngest; import datawave.util.TableName; import datawave.webservice.edgedictionary.RemoteEdgeDictionary; -import datawave.webservice.query.QueryImpl; -import datawave.webservice.query.configuration.GenericQueryConfiguration; /** * Loads some data in a mock accumulo table and then issues queries against the table using the shard query table. @@ -117,13 +117,12 @@ protected void runTestQuery(List expected, String querystr, Date startDa public static JavaArchive createDeployment() throws Exception { return ShrinkWrap.create(JavaArchive.class) .addPackages(true, "org.apache.deltaspike", "io.astefanutti.metrics.cdi", "datawave.query", "org.jboss.logging", - "datawave.webservice.query.result.event") + "datawave.webservice.query.result.event", "datawave.core.query.result.event") .deleteClass(DefaultEdgeEventQueryLogic.class).deleteClass(RemoteEdgeDictionary.class) - .deleteClass(datawave.query.metrics.QueryMetricQueryLogic.class).deleteClass(datawave.query.metrics.ShardTableQueryMetricHandler.class) + .deleteClass(datawave.query.metrics.QueryMetricQueryLogic.class) .addAsManifestResource(new StringAsset( "" + "datawave.query.tables.edge.MockAlternative" + ""), "beans.xml"); - } @AfterClass diff --git a/warehouse/query-core/src/test/java/datawave/query/HitsAreAlwaysIncludedCommonalityTokenTest.java b/warehouse/query-core/src/test/java/datawave/query/HitsAreAlwaysIncludedCommonalityTokenTest.java index 23ed7e7bc70..6ca813714d7 100644 --- a/warehouse/query-core/src/test/java/datawave/query/HitsAreAlwaysIncludedCommonalityTokenTest.java +++ b/warehouse/query-core/src/test/java/datawave/query/HitsAreAlwaysIncludedCommonalityTokenTest.java @@ -35,8 +35,10 @@ import com.google.common.collect.Sets; import datawave.configuration.spring.SpringBean; +import datawave.core.query.configuration.GenericQueryConfiguration; import datawave.helpers.PrintUtility; import datawave.ingest.data.TypeRegistry; +import datawave.microservice.query.QueryImpl; import datawave.query.attributes.Attribute; import datawave.query.attributes.Attributes; import datawave.query.attributes.Content; @@ -48,8 +50,6 @@ import datawave.query.util.CommonalityTokenTestDataIngest; import datawave.util.TableName; import datawave.webservice.edgedictionary.RemoteEdgeDictionary; -import datawave.webservice.query.QueryImpl; -import datawave.webservice.query.configuration.GenericQueryConfiguration; /** * Tests the limit.fields feature to ensure that hit terms are always included and that associated fields at the same grouping context are included along with @@ -125,9 +125,9 @@ public static JavaArchive createDeployment() throws Exception { return ShrinkWrap.create(JavaArchive.class) .addPackages(true, "org.apache.deltaspike", "io.astefanutti.metrics.cdi", "datawave.query", "org.jboss.logging", - "datawave.webservice.query.result.event") + "datawave.webservice.query.result.event", "datawave.core.query.result.event") .deleteClass(DefaultEdgeEventQueryLogic.class).deleteClass(RemoteEdgeDictionary.class) - .deleteClass(datawave.query.metrics.QueryMetricQueryLogic.class).deleteClass(datawave.query.metrics.ShardTableQueryMetricHandler.class) + .deleteClass(datawave.query.metrics.QueryMetricQueryLogic.class) .addAsManifestResource(new StringAsset( "" + "datawave.query.tables.edge.MockAlternative" + ""), "beans.xml"); diff --git a/warehouse/query-core/src/test/java/datawave/query/IfThisTestFailsThenHitTermsAreBroken.java b/warehouse/query-core/src/test/java/datawave/query/IfThisTestFailsThenHitTermsAreBroken.java index d1fdb383f1a..45427ff2623 100644 --- a/warehouse/query-core/src/test/java/datawave/query/IfThisTestFailsThenHitTermsAreBroken.java +++ b/warehouse/query-core/src/test/java/datawave/query/IfThisTestFailsThenHitTermsAreBroken.java @@ -40,6 +40,7 @@ import com.google.common.collect.ImmutableListMultimap; import com.google.common.collect.Multimap; +import datawave.core.query.configuration.GenericQueryConfiguration; import datawave.data.ColumnFamilyConstants; import datawave.data.hash.UID; import datawave.data.type.LcNoDiacriticsType; @@ -48,6 +49,7 @@ import datawave.ingest.data.TypeRegistry; import datawave.ingest.protobuf.Uid; import datawave.marking.MarkingFunctions; +import datawave.microservice.query.QueryImpl; import datawave.query.attributes.Attribute; import datawave.query.attributes.Attributes; import datawave.query.attributes.Document; @@ -61,8 +63,6 @@ import datawave.query.util.MetadataHelperFactory; import datawave.security.util.ScannerHelper; import datawave.util.TableName; -import datawave.webservice.query.QueryImpl; -import datawave.webservice.query.configuration.GenericQueryConfiguration; /** * diff --git a/warehouse/query-core/src/test/java/datawave/query/IvaratorInterruptTest.java b/warehouse/query-core/src/test/java/datawave/query/IvaratorInterruptTest.java index e6447d117a9..5685148258f 100644 --- a/warehouse/query-core/src/test/java/datawave/query/IvaratorInterruptTest.java +++ b/warehouse/query-core/src/test/java/datawave/query/IvaratorInterruptTest.java @@ -39,8 +39,10 @@ import org.junit.runner.RunWith; import datawave.configuration.spring.SpringBean; +import datawave.core.query.configuration.GenericQueryConfiguration; import datawave.helpers.PrintUtility; import datawave.ingest.data.TypeRegistry; +import datawave.microservice.query.QueryImpl; import datawave.query.attributes.Attribute; import datawave.query.attributes.Document; import datawave.query.attributes.PreNormalizedAttribute; @@ -53,8 +55,6 @@ import datawave.query.util.WiseGuysIngest; import datawave.util.TableName; import datawave.webservice.edgedictionary.RemoteEdgeDictionary; -import datawave.webservice.query.QueryImpl; -import datawave.webservice.query.configuration.GenericQueryConfiguration; public abstract class IvaratorInterruptTest { private static final Logger log = Logger.getLogger(IvaratorInterruptTest.class); @@ -78,9 +78,9 @@ public abstract class IvaratorInterruptTest { public static JavaArchive createDeployment() { return ShrinkWrap.create(JavaArchive.class) .addPackages(true, "org.apache.deltaspike", "io.astefanutti.metrics.cdi", "datawave.query", "org.jboss.logging", - "datawave.webservice.query.result.event") + "datawave.webservice.query.result.event", "datawave.core.query.result.event") .deleteClass(DefaultEdgeEventQueryLogic.class).deleteClass(RemoteEdgeDictionary.class) - .deleteClass(datawave.query.metrics.QueryMetricQueryLogic.class).deleteClass(datawave.query.metrics.ShardTableQueryMetricHandler.class) + .deleteClass(datawave.query.metrics.QueryMetricQueryLogic.class) .addAsManifestResource(new StringAsset( "" + "datawave.query.tables.edge.MockAlternative" + ""), "beans.xml"); diff --git a/warehouse/query-core/src/test/java/datawave/query/LenientFieldsTest.java b/warehouse/query-core/src/test/java/datawave/query/LenientFieldsTest.java index ba9c1ff216f..8fa1cfbe16d 100644 --- a/warehouse/query-core/src/test/java/datawave/query/LenientFieldsTest.java +++ b/warehouse/query-core/src/test/java/datawave/query/LenientFieldsTest.java @@ -40,8 +40,10 @@ import org.junit.runner.RunWith; import datawave.configuration.spring.SpringBean; +import datawave.core.query.configuration.GenericQueryConfiguration; import datawave.helpers.PrintUtility; import datawave.ingest.data.TypeRegistry; +import datawave.microservice.query.QueryImpl; import datawave.query.attributes.Attribute; import datawave.query.attributes.Document; import datawave.query.attributes.PreNormalizedAttribute; @@ -54,8 +56,6 @@ import datawave.test.JexlNodeAssert; import datawave.util.TableName; import datawave.webservice.edgedictionary.RemoteEdgeDictionary; -import datawave.webservice.query.QueryImpl; -import datawave.webservice.query.configuration.GenericQueryConfiguration; /** * Loads some data in a mock accumulo table and then issues queries against the table using the shard query table. @@ -156,7 +156,7 @@ public static JavaArchive createDeployment() throws Exception { .addPackages(true, "org.apache.deltaspike", "io.astefanutti.metrics.cdi", "datawave.query", "org.jboss.logging", "datawave.webservice.query.result.event") .deleteClass(DefaultEdgeEventQueryLogic.class).deleteClass(RemoteEdgeDictionary.class) - .deleteClass(datawave.query.metrics.QueryMetricQueryLogic.class).deleteClass(datawave.query.metrics.ShardTableQueryMetricHandler.class) + .deleteClass(datawave.query.metrics.QueryMetricQueryLogic.class) .addAsManifestResource(new StringAsset( "" + "datawave.query.tables.edge.MockAlternative" + ""), "beans.xml"); diff --git a/warehouse/query-core/src/test/java/datawave/query/LongRunningQueryTest.java b/warehouse/query-core/src/test/java/datawave/query/LongRunningQueryTest.java index 99580c2097e..cdae87d968e 100644 --- a/warehouse/query-core/src/test/java/datawave/query/LongRunningQueryTest.java +++ b/warehouse/query-core/src/test/java/datawave/query/LongRunningQueryTest.java @@ -15,10 +15,8 @@ import java.util.Set; import java.util.TimeZone; import java.util.UUID; -import java.util.concurrent.Executors; import org.apache.accumulo.core.client.AccumuloClient; -import org.apache.accumulo.core.client.Connector; import org.apache.accumulo.core.security.Authorizations; import org.apache.log4j.Logger; import org.junit.Before; @@ -29,8 +27,14 @@ import com.google.common.collect.Multimap; import com.google.common.collect.Sets; +import datawave.core.common.connection.AccumuloConnectionFactory; +import datawave.core.query.cache.ResultsPage; +import datawave.core.query.configuration.GenericQueryConfiguration; +import datawave.core.query.result.event.DefaultResponseObjectFactory; import datawave.helpers.PrintUtility; import datawave.marking.MarkingFunctions; +import datawave.microservice.query.QueryImpl; +import datawave.microservice.query.config.QueryExpirationProperties; import datawave.microservice.querymetric.QueryMetricFactoryImpl; import datawave.query.attributes.UniqueFields; import datawave.query.attributes.UniqueGranularity; @@ -43,13 +47,7 @@ import datawave.security.authorization.DatawaveUser; import datawave.security.authorization.SubjectIssuerDNPair; import datawave.util.TableName; -import datawave.webservice.common.connection.AccumuloConnectionFactory; -import datawave.webservice.query.QueryImpl; -import datawave.webservice.query.cache.QueryExpirationConfiguration; -import datawave.webservice.query.cache.ResultsPage; import datawave.webservice.query.cache.RunningQueryTimingImpl; -import datawave.webservice.query.configuration.GenericQueryConfiguration; -import datawave.webservice.query.result.event.DefaultResponseObjectFactory; import datawave.webservice.query.runner.RunningQuery; /** @@ -136,7 +134,7 @@ public void testLongRunningGroupByQuery() throws Exception { GenericQueryConfiguration config = logic.initialize(client, query, Collections.singleton(auths)); logic.setupQuery(config); - QueryExpirationConfiguration conf = new QueryExpirationConfiguration(); + QueryExpirationProperties conf = new QueryExpirationProperties(); conf.setMaxLongRunningTimeoutRetries(1000); RunningQueryTimingImpl timing = new RunningQueryTimingImpl(conf, 1); RunningQuery runningQuery = new RunningQuery(null, client, AccumuloConnectionFactory.Priority.NORMAL, logic, query, "", datawavePrincipal, timing, null, @@ -198,7 +196,7 @@ public void testLongRunningUniqueQuery() throws Exception { ShardQueryConfiguration config = (ShardQueryConfiguration) logic.initialize(client, query, Collections.singleton(auths)); logic.setupQuery(config); - QueryExpirationConfiguration conf = new QueryExpirationConfiguration(); + QueryExpirationProperties conf = new QueryExpirationProperties(); conf.setMaxLongRunningTimeoutRetries(1000); RunningQueryTimingImpl timing = new RunningQueryTimingImpl(conf, 1); RunningQuery runningQuery = new RunningQuery(null, client, AccumuloConnectionFactory.Priority.NORMAL, logic, query, "", datawavePrincipal, timing, null, diff --git a/warehouse/query-core/src/test/java/datawave/query/LuceneQueryTest.java b/warehouse/query-core/src/test/java/datawave/query/LuceneQueryTest.java index 68f6934f08c..5d47f7a6e23 100644 --- a/warehouse/query-core/src/test/java/datawave/query/LuceneQueryTest.java +++ b/warehouse/query-core/src/test/java/datawave/query/LuceneQueryTest.java @@ -15,6 +15,8 @@ import datawave.query.language.functions.jexl.EvaluationOnly; import datawave.query.language.functions.jexl.JexlQueryFunction; import datawave.query.language.parser.jexl.LuceneToJexlQueryParser; +import datawave.query.planner.DefaultQueryPlanner; +import datawave.query.tables.ShardQueryLogic; import datawave.query.testframework.AbstractFunctionalQuery; import datawave.query.testframework.AccumuloSetup; import datawave.query.testframework.CitiesDataType; @@ -262,6 +264,11 @@ public void testCompareFunction() throws Exception { protected void testInit() { this.auths = CitiesDataType.getTestAuths(); this.documentKey = CityField.EVENT_ID.name(); + } + + @Override + public ShardQueryLogic createShardQueryLogic() { + ShardQueryLogic logic = super.createShardQueryLogic(); LuceneToJexlQueryParser parser = new LuceneToJexlQueryParser(); @@ -272,6 +279,7 @@ protected void testInit() { } } - this.logic.setParser(parser); + logic.setParser(parser); + return logic; } } diff --git a/warehouse/query-core/src/test/java/datawave/query/MaxExpansionRegexQueryTest.java b/warehouse/query-core/src/test/java/datawave/query/MaxExpansionRegexQueryTest.java index f8151c183f0..3b67350d1d0 100644 --- a/warehouse/query-core/src/test/java/datawave/query/MaxExpansionRegexQueryTest.java +++ b/warehouse/query-core/src/test/java/datawave/query/MaxExpansionRegexQueryTest.java @@ -8,7 +8,6 @@ import static datawave.query.testframework.RawDataManager.RE_OP; import static datawave.query.testframework.RawDataManager.RN_OP; import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.io.File; @@ -17,7 +16,6 @@ import java.util.Collection; import java.util.List; -import org.apache.accumulo.core.security.Authorizations; import org.apache.log4j.Logger; import org.junit.Assert; import org.junit.BeforeClass; @@ -273,7 +271,11 @@ public void testMaxIvaratorResultsFailsQuery() throws Exception { runTest(query, expect); // verify that the ivarators ran and completed - assertTrue(countComplete(dirs) >= 1); + if (this.logic.isCheckpointable()) { + assertEquals(8, countComplete(dirs)); + } else { + assertEquals(3, countComplete(dirs)); + } // clear list before new set is added dirs.clear(); @@ -327,8 +329,13 @@ public void testMaxIvaratorResults() throws Exception { this.logic.setIvaratorCacheBufferSize(2); runTest(query, expect); + // verify that the ivarators ran and completed - assertTrue(countComplete(dirs) >= 1); + if (this.logic.isCheckpointable()) { + assertEquals(8, countComplete(dirs)); + } else { + assertEquals(3, countComplete(dirs)); + } // clear list before new set is added dirs.clear(); diff --git a/warehouse/query-core/src/test/java/datawave/query/MixedGeoAndGeoWaveTest.java b/warehouse/query-core/src/test/java/datawave/query/MixedGeoAndGeoWaveTest.java index 6fdf29398f0..1bd0afce016 100644 --- a/warehouse/query-core/src/test/java/datawave/query/MixedGeoAndGeoWaveTest.java +++ b/warehouse/query-core/src/test/java/datawave/query/MixedGeoAndGeoWaveTest.java @@ -1,13 +1,13 @@ package datawave.query; -import static datawave.webservice.query.QueryParameters.QUERY_AUTHORIZATIONS; -import static datawave.webservice.query.QueryParameters.QUERY_BEGIN; -import static datawave.webservice.query.QueryParameters.QUERY_END; -import static datawave.webservice.query.QueryParameters.QUERY_EXPIRATION; -import static datawave.webservice.query.QueryParameters.QUERY_LOGIC_NAME; -import static datawave.webservice.query.QueryParameters.QUERY_NAME; -import static datawave.webservice.query.QueryParameters.QUERY_PERSISTENCE; -import static datawave.webservice.query.QueryParameters.QUERY_STRING; +import static datawave.microservice.query.QueryParameters.QUERY_AUTHORIZATIONS; +import static datawave.microservice.query.QueryParameters.QUERY_BEGIN; +import static datawave.microservice.query.QueryParameters.QUERY_END; +import static datawave.microservice.query.QueryParameters.QUERY_EXPIRATION; +import static datawave.microservice.query.QueryParameters.QUERY_LOGIC_NAME; +import static datawave.microservice.query.QueryParameters.QUERY_NAME; +import static datawave.microservice.query.QueryParameters.QUERY_PERSISTENCE; +import static datawave.microservice.query.QueryParameters.QUERY_STRING; import java.net.URL; import java.text.SimpleDateFormat; @@ -75,6 +75,9 @@ import datawave.ingest.mapreduce.partition.BalancedShardPartitioner; import datawave.ingest.table.config.ShardTableConfigHelper; import datawave.ingest.table.config.TableConfigHelper; +import datawave.microservice.query.DefaultQueryParameters; +import datawave.microservice.query.Query; +import datawave.microservice.query.QueryImpl; import datawave.policy.IngestPolicyEnforcer; import datawave.query.config.ShardQueryConfiguration; import datawave.query.exceptions.InvalidQueryException; @@ -86,10 +89,6 @@ import datawave.query.testframework.MockStatusReporter; import datawave.util.TableName; import datawave.webservice.edgedictionary.RemoteEdgeDictionary; -import datawave.webservice.query.Query; -import datawave.webservice.query.QueryImpl; -import datawave.webservice.query.QueryParameters; -import datawave.webservice.query.QueryParametersImpl; import datawave.webservice.query.result.event.DefaultEvent; import datawave.webservice.query.result.event.DefaultField; @@ -182,9 +181,10 @@ public class MixedGeoAndGeoWaveTest { @Deployment public static JavaArchive createDeployment() throws Exception { return ShrinkWrap.create(JavaArchive.class) - .addPackages(true, "org.apache.deltaspike", "io.astefanutti.metrics.cdi", "datawave.query", "datawave.webservice.query.result.event") + .addPackages(true, "org.apache.deltaspike", "io.astefanutti.metrics.cdi", "datawave.query", "datawave.webservice.query.result.event", + "datawave.core.query.result.event") .deleteClass(DefaultEdgeEventQueryLogic.class).deleteClass(RemoteEdgeDictionary.class) - .deleteClass(datawave.query.metrics.QueryMetricQueryLogic.class).deleteClass(datawave.query.metrics.ShardTableQueryMetricHandler.class) + .deleteClass(datawave.query.metrics.QueryMetricQueryLogic.class) .addAsManifestResource(new StringAsset( "" + "datawave.query.tables.edge.MockAlternative" + ""), "beans.xml"); @@ -704,7 +704,7 @@ private Iterator getResultsIterator(String queryString, ShardQueryLogic logic) t params.putSingle(QUERY_BEGIN, BEGIN_DATE); params.putSingle(QUERY_END, END_DATE); - QueryParameters queryParams = new QueryParametersImpl(); + datawave.microservice.query.QueryParameters queryParams = new DefaultQueryParameters(); queryParams.validate(params); Set auths = new HashSet<>(); diff --git a/warehouse/query-core/src/test/java/datawave/query/NumericListQueryTest.java b/warehouse/query-core/src/test/java/datawave/query/NumericListQueryTest.java index 154cf6a53b3..a99e5708b17 100644 --- a/warehouse/query-core/src/test/java/datawave/query/NumericListQueryTest.java +++ b/warehouse/query-core/src/test/java/datawave/query/NumericListQueryTest.java @@ -36,11 +36,12 @@ import com.google.common.collect.Sets; import datawave.configuration.spring.SpringBean; +import datawave.core.query.configuration.GenericQueryConfiguration; import datawave.helpers.PrintUtility; import datawave.ingest.data.TypeRegistry; +import datawave.microservice.query.QueryImpl; import datawave.query.attributes.Attribute; import datawave.query.attributes.Attributes; -import datawave.query.attributes.Content; import datawave.query.attributes.Document; import datawave.query.function.JexlEvaluation; import datawave.query.function.deserializer.KryoDocumentDeserializer; @@ -51,8 +52,6 @@ import datawave.test.JexlNodeAssert; import datawave.util.TableName; import datawave.webservice.edgedictionary.RemoteEdgeDictionary; -import datawave.webservice.query.QueryImpl; -import datawave.webservice.query.configuration.GenericQueryConfiguration; /** * Tests the limit.fields feature to ensure that hit terms are always included and that associated fields at the same grouping context are included along with @@ -131,7 +130,7 @@ public static JavaArchive createDeployment() throws Exception { .addPackages(true, "org.apache.deltaspike", "io.astefanutti.metrics.cdi", "datawave.query", "org.jboss.logging", "datawave.webservice.query.result.event") .deleteClass(DefaultEdgeEventQueryLogic.class).deleteClass(RemoteEdgeDictionary.class) - .deleteClass(datawave.query.metrics.QueryMetricQueryLogic.class).deleteClass(datawave.query.metrics.ShardTableQueryMetricHandler.class) + .deleteClass(datawave.query.metrics.QueryMetricQueryLogic.class) .addAsManifestResource(new StringAsset( "" + "datawave.query.tables.edge.MockAlternative" + ""), "beans.xml"); diff --git a/warehouse/query-core/src/test/java/datawave/query/ShapesTest.java b/warehouse/query-core/src/test/java/datawave/query/ShapesTest.java index 6fae4a0cd29..f266d5eb7b3 100644 --- a/warehouse/query-core/src/test/java/datawave/query/ShapesTest.java +++ b/warehouse/query-core/src/test/java/datawave/query/ShapesTest.java @@ -39,8 +39,10 @@ import com.google.common.collect.Sets; import datawave.configuration.spring.SpringBean; +import datawave.core.query.configuration.GenericQueryConfiguration; import datawave.helpers.PrintUtility; import datawave.ingest.data.TypeRegistry; +import datawave.microservice.query.QueryImpl; import datawave.query.attributes.Attribute; import datawave.query.attributes.Document; import datawave.query.attributes.TypeAttribute; @@ -53,8 +55,6 @@ import datawave.query.util.ShapesIngest; import datawave.util.TableName; import datawave.webservice.edgedictionary.RemoteEdgeDictionary; -import datawave.webservice.query.QueryImpl; -import datawave.webservice.query.configuration.GenericQueryConfiguration; /** * A set of tests that emphasize the influence of datatypes on query planning and execution @@ -148,7 +148,6 @@ public static JavaArchive createDeployment() throws Exception { .deleteClass(DefaultEdgeEventQueryLogic.class) .deleteClass(RemoteEdgeDictionary.class) .deleteClass(datawave.query.metrics.QueryMetricQueryLogic.class) - .deleteClass(datawave.query.metrics.ShardTableQueryMetricHandler.class) .addAsManifestResource(new StringAsset( "" + "datawave.query.tables.edge.MockAlternative" + ""), "beans.xml"); diff --git a/warehouse/query-core/src/test/java/datawave/query/TestLimitReturnedGroupsToHitTermGroups.java b/warehouse/query-core/src/test/java/datawave/query/TestLimitReturnedGroupsToHitTermGroups.java index 215d07ca179..b72bc23eee7 100644 --- a/warehouse/query-core/src/test/java/datawave/query/TestLimitReturnedGroupsToHitTermGroups.java +++ b/warehouse/query-core/src/test/java/datawave/query/TestLimitReturnedGroupsToHitTermGroups.java @@ -36,11 +36,12 @@ import com.google.common.collect.Sets; import datawave.configuration.spring.SpringBean; +import datawave.core.query.configuration.GenericQueryConfiguration; import datawave.helpers.PrintUtility; import datawave.ingest.data.TypeRegistry; +import datawave.microservice.query.QueryImpl; import datawave.query.attributes.Attribute; import datawave.query.attributes.Attributes; -import datawave.query.attributes.Content; import datawave.query.attributes.Document; import datawave.query.function.JexlEvaluation; import datawave.query.function.deserializer.KryoDocumentDeserializer; @@ -49,8 +50,6 @@ import datawave.query.util.CommonalityTokenTestDataIngest; import datawave.util.TableName; import datawave.webservice.edgedictionary.RemoteEdgeDictionary; -import datawave.webservice.query.QueryImpl; -import datawave.webservice.query.configuration.GenericQueryConfiguration; /** * Tests the limit.fields feature to ensure that hit terms are always included and that associated fields at the same grouping context are included along with @@ -127,9 +126,9 @@ public static JavaArchive createDeployment() throws Exception { return ShrinkWrap.create(JavaArchive.class) .addPackages(true, "org.apache.deltaspike", "io.astefanutti.metrics.cdi", "datawave.query", "org.jboss.logging", - "datawave.webservice.query.result.event") + "datawave.webservice.query.result.event", "datawave.core.query.result.event") .deleteClass(DefaultEdgeEventQueryLogic.class).deleteClass(RemoteEdgeDictionary.class) - .deleteClass(datawave.query.metrics.QueryMetricQueryLogic.class).deleteClass(datawave.query.metrics.ShardTableQueryMetricHandler.class) + .deleteClass(datawave.query.metrics.QueryMetricQueryLogic.class) .addAsManifestResource(new StringAsset( "" + "datawave.query.tables.edge.MockAlternative" + ""), "beans.xml"); diff --git a/warehouse/query-core/src/test/java/datawave/query/TextFunctionQueryTest.java b/warehouse/query-core/src/test/java/datawave/query/TextFunctionQueryTest.java index 6f2d3c7b573..d107388349c 100644 --- a/warehouse/query-core/src/test/java/datawave/query/TextFunctionQueryTest.java +++ b/warehouse/query-core/src/test/java/datawave/query/TextFunctionQueryTest.java @@ -15,6 +15,7 @@ import datawave.query.language.parser.jexl.LuceneToJexlQueryParser; import datawave.query.planner.DefaultQueryPlanner; +import datawave.query.tables.ShardQueryLogic; import datawave.query.testframework.AbstractFunctionalQuery; import datawave.query.testframework.AccumuloSetup; import datawave.query.testframework.CitiesDataType; @@ -121,7 +122,12 @@ public void testMultiFieldText() throws Exception { protected void testInit() { this.auths = CitiesDataType.getTestAuths(); this.documentKey = CityField.EVENT_ID.name(); + } - this.logic.setParser(new LuceneToJexlQueryParser()); + @Override + public ShardQueryLogic createShardQueryLogic() { + ShardQueryLogic logic = super.createShardQueryLogic(); + logic.setParser(new LuceneToJexlQueryParser()); + return logic; } } diff --git a/warehouse/query-core/src/test/java/datawave/query/UnevaluatedFieldsQueryTest.java b/warehouse/query-core/src/test/java/datawave/query/UnevaluatedFieldsQueryTest.java index 5d1a16ad23c..595952230c5 100644 --- a/warehouse/query-core/src/test/java/datawave/query/UnevaluatedFieldsQueryTest.java +++ b/warehouse/query-core/src/test/java/datawave/query/UnevaluatedFieldsQueryTest.java @@ -19,6 +19,7 @@ import org.junit.Test; import datawave.query.exceptions.InvalidQueryException; +import datawave.query.tables.ShardQueryLogic; import datawave.query.testframework.AbstractFields; import datawave.query.testframework.AbstractFunctionalQuery; import datawave.query.testframework.AccumuloSetup; @@ -101,8 +102,13 @@ public void testAndNot() throws Exception { protected void testInit() { this.auths = CitiesDataType.getTestAuths(); this.documentKey = CityField.EVENT_ID.name(); + } - this.logic.setUnevaluatedFields(UnevaluatedCityFields.indexOnly); + @Override + public ShardQueryLogic createShardQueryLogic() { + ShardQueryLogic logic = super.createShardQueryLogic(); + logic.setUnevaluatedFields(UnevaluatedCityFields.indexOnly); + return logic; } // ============================================ diff --git a/warehouse/query-core/src/test/java/datawave/query/UnindexedNumericQueryTest.java b/warehouse/query-core/src/test/java/datawave/query/UnindexedNumericQueryTest.java index f4aa7cb93eb..58d22dc60b4 100644 --- a/warehouse/query-core/src/test/java/datawave/query/UnindexedNumericQueryTest.java +++ b/warehouse/query-core/src/test/java/datawave/query/UnindexedNumericQueryTest.java @@ -21,6 +21,7 @@ import org.junit.ClassRule; import org.junit.Test; +import datawave.core.query.configuration.QueryData; import datawave.data.type.NumberType; import datawave.query.config.ShardQueryConfiguration; import datawave.query.iterator.QueryIterator; @@ -34,7 +35,6 @@ import datawave.query.testframework.FieldConfig; import datawave.query.testframework.FileType; import datawave.query.testframework.GenericCityFields; -import datawave.webservice.query.configuration.QueryData; public class UnindexedNumericQueryTest extends AbstractFunctionalQuery { @@ -82,7 +82,7 @@ public void testNumericTerm() throws Exception { NumberType nt = new NumberType(); String norm90 = nt.normalize(min); - Iterator queries = config.getQueries(); + Iterator queries = config.getQueriesIter(); Assert.assertTrue(queries.hasNext()); QueryData data = queries.next(); for (IteratorSetting it : data.getSettings()) { @@ -118,7 +118,7 @@ public void testRange() throws Exception { String norm90 = nt.normalize(min); String norm122 = nt.normalize(max); - Iterator queries = config.getQueries(); + Iterator queries = config.getQueriesIter(); Assert.assertTrue(queries.hasNext()); QueryData data = queries.next(); for (IteratorSetting it : data.getSettings()) { diff --git a/warehouse/query-core/src/test/java/datawave/query/UniqueTest.java b/warehouse/query-core/src/test/java/datawave/query/UniqueTest.java index af4f4de4de5..b4032285ac1 100644 --- a/warehouse/query-core/src/test/java/datawave/query/UniqueTest.java +++ b/warehouse/query-core/src/test/java/datawave/query/UniqueTest.java @@ -36,8 +36,11 @@ import com.google.common.collect.Sets; import datawave.configuration.spring.SpringBean; +import datawave.core.query.configuration.GenericQueryConfiguration; +import datawave.core.query.iterator.DatawaveTransformIterator; import datawave.helpers.PrintUtility; import datawave.ingest.data.TypeRegistry; +import datawave.microservice.query.QueryImpl; import datawave.query.exceptions.InvalidQueryException; import datawave.query.function.deserializer.KryoDocumentDeserializer; import datawave.query.tables.ShardQueryLogic; @@ -46,9 +49,6 @@ import datawave.query.util.WiseGuysIngest; import datawave.util.TableName; import datawave.webservice.edgedictionary.RemoteEdgeDictionary; -import datawave.webservice.query.QueryImpl; -import datawave.webservice.query.configuration.GenericQueryConfiguration; -import datawave.webservice.query.iterator.DatawaveTransformIterator; import datawave.webservice.query.result.event.EventBase; import datawave.webservice.result.BaseQueryResponse; import datawave.webservice.result.DefaultEventQueryResponse; @@ -135,9 +135,9 @@ public static JavaArchive createDeployment() throws Exception { return ShrinkWrap.create(JavaArchive.class) .addPackages(true, "org.apache.deltaspike", "io.astefanutti.metrics.cdi", "datawave.query", "org.jboss.logging", - "datawave.webservice.query.result.event") + "datawave.webservice.query.result.event", "datawave.core.query.result.event") .deleteClass(DefaultEdgeEventQueryLogic.class).deleteClass(RemoteEdgeDictionary.class) - .deleteClass(datawave.query.metrics.QueryMetricQueryLogic.class).deleteClass(datawave.query.metrics.ShardTableQueryMetricHandler.class) + .deleteClass(datawave.query.metrics.QueryMetricQueryLogic.class) .addAsManifestResource(new StringAsset( "" + "datawave.query.tables.edge.MockAlternative" + ""), "beans.xml"); diff --git a/warehouse/query-core/src/test/java/datawave/query/UseOccurrenceToCountInJexlContextTest.java b/warehouse/query-core/src/test/java/datawave/query/UseOccurrenceToCountInJexlContextTest.java index 7f64cced888..d4a090d4a52 100644 --- a/warehouse/query-core/src/test/java/datawave/query/UseOccurrenceToCountInJexlContextTest.java +++ b/warehouse/query-core/src/test/java/datawave/query/UseOccurrenceToCountInJexlContextTest.java @@ -35,6 +35,7 @@ import com.google.common.collect.ImmutableListMultimap; import com.google.common.collect.Multimap; +import datawave.core.query.configuration.GenericQueryConfiguration; import datawave.data.ColumnFamilyConstants; import datawave.data.hash.UID; import datawave.data.type.LcNoDiacriticsType; @@ -43,6 +44,7 @@ import datawave.ingest.data.TypeRegistry; import datawave.ingest.protobuf.Uid; import datawave.marking.MarkingFunctions; +import datawave.microservice.query.QueryImpl; import datawave.query.attributes.Attribute; import datawave.query.attributes.Attributes; import datawave.query.attributes.Document; @@ -55,8 +57,6 @@ import datawave.query.util.DateIndexHelperFactory; import datawave.query.util.MetadataHelperFactory; import datawave.util.TableName; -import datawave.webservice.query.QueryImpl; -import datawave.webservice.query.configuration.GenericQueryConfiguration; /** * diff --git a/warehouse/query-core/src/test/java/datawave/query/cardinality/TestCardinalityWithQuery.java b/warehouse/query-core/src/test/java/datawave/query/cardinality/TestCardinalityWithQuery.java index 15a999a6462..41c1cf7adbc 100644 --- a/warehouse/query-core/src/test/java/datawave/query/cardinality/TestCardinalityWithQuery.java +++ b/warehouse/query-core/src/test/java/datawave/query/cardinality/TestCardinalityWithQuery.java @@ -33,8 +33,12 @@ import datawave.accumulo.inmemory.InMemoryAccumuloClient; import datawave.accumulo.inmemory.InMemoryInstance; +import datawave.core.common.connection.AccumuloConnectionFactory; +import datawave.core.query.logic.AbstractQueryLogicTransformer; +import datawave.core.query.result.event.DefaultResponseObjectFactory; import datawave.ingest.protobuf.Uid; import datawave.marking.MarkingFunctions; +import datawave.microservice.query.QueryImpl; import datawave.microservice.querymetric.QueryMetricFactoryImpl; import datawave.query.QueryTestTableHelper; import datawave.query.tables.ShardQueryLogic; @@ -46,10 +50,6 @@ import datawave.security.authorization.DatawaveUser.UserType; import datawave.security.authorization.SubjectIssuerDNPair; import datawave.security.util.DnUtils; -import datawave.webservice.common.connection.AccumuloConnectionFactory; -import datawave.webservice.query.QueryImpl; -import datawave.webservice.query.logic.AbstractQueryLogicTransformer; -import datawave.webservice.query.result.event.DefaultResponseObjectFactory; import datawave.webservice.query.runner.RunningQuery; import datawave.webservice.result.EventQueryResponseBase; diff --git a/warehouse/query-core/src/test/java/datawave/query/config/ShardQueryConfigurationTest.java b/warehouse/query-core/src/test/java/datawave/query/config/ShardQueryConfigurationTest.java index 918b1f6139f..f1f22b30725 100644 --- a/warehouse/query-core/src/test/java/datawave/query/config/ShardQueryConfigurationTest.java +++ b/warehouse/query-core/src/test/java/datawave/query/config/ShardQueryConfigurationTest.java @@ -18,7 +18,9 @@ import org.junit.Test; import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.MapperFeature; import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.json.JsonMapper; import com.google.common.collect.ArrayListMultimap; import com.google.common.collect.HashMultimap; import com.google.common.collect.ImmutableMultimap; @@ -32,6 +34,8 @@ import datawave.data.type.LcNoDiacriticsType; import datawave.data.type.NoOpType; import datawave.data.type.Type; +import datawave.microservice.query.Query; +import datawave.microservice.query.QueryImpl; import datawave.query.DocumentSerialization; import datawave.query.attributes.ExcerptFields; import datawave.query.attributes.UniqueFields; @@ -42,8 +46,6 @@ import datawave.query.jexl.JexlASTHelper; import datawave.query.model.QueryModel; import datawave.util.TableName; -import datawave.webservice.query.Query; -import datawave.webservice.query.QueryImpl; public class ShardQueryConfigurationTest { @@ -87,6 +89,29 @@ public void setUp() throws Exception { // The set of default values (optionally as predicates, // alternate values (to test the setters/getters), // and optional alternate predicates for testing equality. + defaultValues.put("checkpointable", false); + updatedValues.put("checkpointable", true); + + defaultValues.put("auths", Sets.newHashSet()); + updatedValues.put("auths", Sets.newHashSet("FOO", "BAR")); + + defaultValues.put("queries", Collections.emptyList()); + updatedValues.put("queries", Lists.newArrayList(new QueryImpl())); + + defaultValues.put("bloom", null); + updatedValues.put("bloom", null); + alreadySet.add("bloom"); + + defaultValues.put("activeQueryLogName", ""); + updatedValues.put("activeQueryLogName", "ShardQueryConfiguration"); + alreadySet.add("activeQueryLogName"); + + defaultValues.put("limitTermExpansionToModel", false); + updatedValues.put("limitTermExpansionToModel", true); + + defaultValues.put("shardDateFormat", "yyyyMMdd"); + updatedValues.put("shardDateFormat", "yyyyMMddHHmmss"); + defaultValues.put("authorizations", Collections.singleton(Authorizations.EMPTY)); updatedValues.put("authorizations", Collections.singleton(new Authorizations("FOO", "BAR"))); @@ -109,6 +134,8 @@ public void setUp() throws Exception { updatedValues.put("bypassAccumulo", true); defaultValues.put("accumuloPassword", ""); updatedValues.put("accumuloPassword", "secret"); + defaultValues.put("reduceResults", false); + updatedValues.put("reduceResults", true); defaultValues.put("tldQuery", false); updatedValues.put("tldQuery", true); defaultValues.put("filterOptions", Maps.newHashMap()); @@ -468,8 +495,6 @@ public void setUp() throws Exception { updatedValues.put("pruneQueryByIngestTypes", true); defaultValues.put("numIndexLookupThreads", 8); updatedValues.put("numIndexLookupThreads", 18); - defaultValues.put("expansionLimitedToModelContents", false); - updatedValues.put("expansionLimitedToModelContents", true); defaultValues.put("accrueStats", false); updatedValues.put("accrueStats", true); defaultValues.put("dataTypes", HashMultimap.create()); @@ -478,15 +503,9 @@ public void setUp() throws Exception { defaultValues.put("enricherClassNames", null); updatedValues.put("enricherClassNames", Lists.newArrayList("proj.datawave.query.enricher.someEnricherClass")); - defaultValues.put("enricherClassNamesAsString", null); - updatedValues.put("enricherClassNamesAsString", "proj.datawave.query.enricher.someEnricherClass"); - alreadySet.add("enricherClassNamesAsString"); - defaultValues.put("filterClassNames", null); + defaultValues.put("filterClassNames", Collections.emptyList()); updatedValues.put("filterClassNames", Lists.newArrayList("proj.datawave.query.filter.someFilterClass")); - defaultValues.put("filterClassNamesAsString", null); - updatedValues.put("filterClassNamesAsString", "proj.datawave.query.filter.someFilterClass"); - alreadySet.add("filterClassNamesAsString"); defaultValues.put("nonEventKeyPrefixes", Sets.newHashSet("d", "tf")); updatedValues.put("nonEventKeyPrefixes", Sets.newHashSet("d", "tf", "fi")); @@ -496,9 +515,6 @@ public void setUp() throws Exception { defaultValues.put("unevaluatedFields", Sets.newHashSet()); updatedValues.put("unevaluatedFields", Sets.newHashSet("FIELD_U", "FIELD_V")); - defaultValues.put("unevaluatedFieldsAsString", ""); - updatedValues.put("unevaluatedFieldsAsString", "FIELD_U,FIELD_V"); - alreadySet.add("unevaluatedFieldsAsString"); defaultValues.put("datatypeFilter", Sets.newHashSet()); updatedValues.put("datatypeFilter", Sets.newHashSet("TYPE_A", "TYPE_B")); @@ -589,7 +605,7 @@ private ArrayListMultimap createArrayListMultimap(Multimap m } private void testValues(ShardQueryConfiguration config, Map values, Map predicates) throws Exception { - ObjectMapper mapper = new ObjectMapper(); + ObjectMapper mapper = JsonMapper.builder().enable(MapperFeature.PROPAGATE_TRANSIENT_MARKER).build(); JsonNode root = mapper.readTree(mapper.writeValueAsString(config)); Set fieldsFound = new HashSet<>(); for (Iterator it = root.fieldNames(); it.hasNext();) { diff --git a/warehouse/query-core/src/test/java/datawave/query/discovery/DiscoveryLogicTest.java b/warehouse/query-core/src/test/java/datawave/query/discovery/DiscoveryLogicTest.java index f41f1613353..b8d3c226d0a 100644 --- a/warehouse/query-core/src/test/java/datawave/query/discovery/DiscoveryLogicTest.java +++ b/warehouse/query-core/src/test/java/datawave/query/discovery/DiscoveryLogicTest.java @@ -30,16 +30,16 @@ import com.google.common.collect.ImmutableSet; import com.google.common.collect.Sets; +import datawave.core.query.configuration.GenericQueryConfiguration; +import datawave.core.query.result.event.DefaultResponseObjectFactory; import datawave.data.type.LcNoDiacriticsType; import datawave.ingest.protobuf.Uid; import datawave.marking.MarkingFunctions; +import datawave.microservice.query.QueryImpl; import datawave.query.MockAccumuloRecordWriter; import datawave.query.QueryTestTableHelper; import datawave.query.util.MetadataHelperFactory; import datawave.util.TableName; -import datawave.webservice.query.QueryImpl; -import datawave.webservice.query.configuration.GenericQueryConfiguration; -import datawave.webservice.query.result.event.DefaultResponseObjectFactory; public class DiscoveryLogicTest { private static Logger log = Logger.getLogger(DiscoveryLogicTest.class); @@ -93,6 +93,7 @@ public void setup() throws Throwable { logic.setIndexTableName(TableName.SHARD_INDEX); logic.setReverseIndexTableName(TableName.SHARD_RINDEX); logic.setModelTableName(QueryTestTableHelper.METADATA_TABLE_NAME); + logic.setMetadataTableName(QueryTestTableHelper.METADATA_TABLE_NAME); logic.setModelName("DATAWAVE"); logic.setFullTableScanEnabled(false); logic.setMaxResults(-1); diff --git a/warehouse/query-core/src/test/java/datawave/query/edge/ExtendedEdgeQueryLogicTest.java b/warehouse/query-core/src/test/java/datawave/query/edge/ExtendedEdgeQueryLogicTest.java index b84c41eb4a5..702e4210079 100644 --- a/warehouse/query-core/src/test/java/datawave/query/edge/ExtendedEdgeQueryLogicTest.java +++ b/warehouse/query-core/src/test/java/datawave/query/edge/ExtendedEdgeQueryLogicTest.java @@ -6,26 +6,25 @@ import java.util.Map.Entry; import java.util.Set; -import javax.inject.Inject; - import org.apache.accumulo.core.data.Key; import org.apache.accumulo.core.data.Value; import org.apache.accumulo.core.security.Authorizations; import org.junit.Assert; import org.junit.Test; -import datawave.configuration.spring.SpringBean; import datawave.core.iterators.ColumnRangeIterator; +import datawave.core.query.configuration.GenericQueryConfiguration; +import datawave.core.query.logic.QueryLogic; +import datawave.microservice.query.QueryImpl; import datawave.query.tables.edge.EdgeQueryFunctionalTest; import datawave.query.tables.edge.EdgeQueryLogic; -import datawave.webservice.query.QueryImpl; -import datawave.webservice.query.configuration.GenericQueryConfiguration; public class ExtendedEdgeQueryLogicTest extends EdgeQueryFunctionalTest { - @Inject - @SpringBean(name = "ExtendedEdgeQuery") - private DefaultExtendedEdgeQueryLogic logic; + @Override + public QueryLogic createLogic() throws Exception { + return factory.getQueryLogic("ExtendedEdgeQuery"); + } @Override public DefaultExtendedEdgeQueryLogic runLogic(QueryImpl q, Set auths) throws Exception { @@ -33,10 +32,10 @@ public DefaultExtendedEdgeQueryLogic runLogic(QueryImpl q, Set a } public DefaultExtendedEdgeQueryLogic runLogic(QueryImpl q, Set auths, long scanLimit) throws Exception { - GenericQueryConfiguration config = logic.initialize(client, q, auths); logic.setDateFilterScanLimit(scanLimit); + GenericQueryConfiguration config = logic.initialize(client, q, auths); logic.setupQuery(config); - return logic; + return (DefaultExtendedEdgeQueryLogic) logic; } @Test @@ -51,7 +50,7 @@ public void testEdgeQuerySyntax() throws Exception { expected.add("mars STATS/ACTIVITY/Planets/TO:20150713/COSMOS_DATA [B]"); expected.add("mercury STATS/ACTIVITY/Planets/TO:20150713/COSMOS_DATA [B]"); - compareResults(logic, expected); + compareResults(logic, factory, expected); } @Test @@ -67,7 +66,7 @@ public void testEdgeQuerySyntax_WithQueryModel() throws Exception { expected.add("mars STATS/ACTIVITY/Planets/TO:20150713/COSMOS_DATA [B]"); expected.add("mercury STATS/ACTIVITY/Planets/TO:20150713/COSMOS_DATA [B]"); - compareResults(logic, expected); + compareResults(logic, factory, expected); } @Test @@ -83,7 +82,7 @@ public void testEdgeQuerySyntaxLuceneWithQueryModel() throws Exception { expected.add("mars STATS/ACTIVITY/Planets/TO:20150713/COSMOS_DATA [B]"); expected.add("mercury STATS/ACTIVITY/Planets/TO:20150713/COSMOS_DATA [B]"); - compareResults(logic, expected); + compareResults(logic, factory, expected); } @Test(expected = UnsupportedOperationException.class) @@ -95,7 +94,7 @@ public void testUnknownFunction() throws Exception { List expected = new ArrayList<>(); - compareResults(logic, expected); + compareResults(logic, factory, expected); } @Test @@ -114,7 +113,7 @@ public void testEdgeSummaryQuerySyntax() throws Exception { expected.add("mars%00;ceres AdjacentDwarfPlanets/TO-FROM:20150713/COSMOS_DATA-COSMOS_DATA [B]"); expected.add("mars STATS/ACTIVITY/Planets/TO:20150713/COSMOS_DATA [B]"); - compareResults(logic, expected); + compareResults(logic, factory, expected); } @Test @@ -206,11 +205,11 @@ public void testEdgeQueryWithScanLimit() throws Exception { expected.add("mars STATS/ACTIVITY/Planets/TO:20150713/COSMOS_DATA [B]"); expected.add("mercury STATS/ACTIVITY/Planets/TO:20150713/COSMOS_DATA [B]"); - compareResults(logic, expected); + compareResults(logic, factory, expected); try { logic = runLogic(q, auths, 1); - compareResults(logic, expected); + compareResults(logic, factory, expected); Assert.fail("Expected to fail because the scan limit was reached"); } catch (ColumnRangeIterator.ScanLimitReached e) { // expected diff --git a/warehouse/query-core/src/test/java/datawave/query/function/HitsAreAlwaysIncludedTest.java b/warehouse/query-core/src/test/java/datawave/query/function/HitsAreAlwaysIncludedTest.java index d51665f769c..fdf940651e5 100644 --- a/warehouse/query-core/src/test/java/datawave/query/function/HitsAreAlwaysIncludedTest.java +++ b/warehouse/query-core/src/test/java/datawave/query/function/HitsAreAlwaysIncludedTest.java @@ -43,9 +43,11 @@ import com.google.common.io.Files; import datawave.configuration.spring.SpringBean; +import datawave.core.query.configuration.GenericQueryConfiguration; import datawave.data.type.DateType; import datawave.helpers.PrintUtility; import datawave.ingest.data.TypeRegistry; +import datawave.microservice.query.QueryImpl; import datawave.query.QueryTestTableHelper; import datawave.query.attributes.Attribute; import datawave.query.attributes.Attributes; @@ -58,8 +60,6 @@ import datawave.query.util.LimitFieldsTestingIngest; import datawave.util.TableName; import datawave.webservice.edgedictionary.RemoteEdgeDictionary; -import datawave.webservice.query.QueryImpl; -import datawave.webservice.query.configuration.GenericQueryConfiguration; /** * Tests the limit.fields feature to ensure that hit terms are always included and that associated fields at the same grouping context are included along with @@ -138,9 +138,9 @@ public static JavaArchive createDeployment() throws Exception { return ShrinkWrap.create(JavaArchive.class) .addPackages(true, "org.apache.deltaspike", "io.astefanutti.metrics.cdi", "datawave.query", "org.jboss.logging", - "datawave.webservice.query.result.event") + "datawave.webservice.query.result.event", "datawave.core.query.result.event") .deleteClass(DefaultEdgeEventQueryLogic.class).deleteClass(RemoteEdgeDictionary.class) - .deleteClass(datawave.query.metrics.QueryMetricQueryLogic.class).deleteClass(datawave.query.metrics.ShardTableQueryMetricHandler.class) + .deleteClass(datawave.query.metrics.QueryMetricQueryLogic.class) .addAsManifestResource(new StringAsset( "" + "datawave.query.tables.edge.MockAlternative" + ""), "beans.xml"); diff --git a/warehouse/query-core/src/test/java/datawave/query/index/lookup/EntryParserTest.java b/warehouse/query-core/src/test/java/datawave/query/index/lookup/EntryParserTest.java index c70f95dbe5f..addf06ff5ee 100644 --- a/warehouse/query-core/src/test/java/datawave/query/index/lookup/EntryParserTest.java +++ b/warehouse/query-core/src/test/java/datawave/query/index/lookup/EntryParserTest.java @@ -10,7 +10,6 @@ import java.util.Collections; import java.util.LinkedList; import java.util.List; -import java.util.Map; import java.util.TreeMap; import org.apache.accumulo.core.data.Key; @@ -20,8 +19,7 @@ import org.apache.commons.jexl3.parser.JexlNode; import org.junit.Test; -import com.google.common.collect.Maps; - +import datawave.core.query.configuration.Result; import datawave.ingest.protobuf.Uid; import datawave.query.jexl.JexlNodeFactory; import datawave.query.jexl.visitors.JexlStringBuildingVisitor; @@ -57,7 +55,7 @@ public void testParse_skipNodeDelay() throws IOException { iterator.seek(new Range(), Collections.emptySet(), false); EntryParser parser = new EntryParser("hello", "world", true); - Map.Entry top = Maps.immutableEntry(iterator.getTopKey(), iterator.getTopValue()); + Result top = new Result(iterator.getTopKey(), iterator.getTopValue()); Tuple2 tuple = parser.apply(top); assertTrue(iterator.hasTop()); @@ -92,7 +90,7 @@ public void testParse_NoDocIds_isDayRange() throws IOException { assertTrue(iterator.hasTop()); EntryParser parser = new EntryParser("hello", "world", false); - Map.Entry top = Maps.immutableEntry(iterator.getTopKey(), iterator.getTopValue()); + Result top = new Result(iterator.getTopKey(), iterator.getTopValue()); Tuple2 tuple = parser.apply(top); assertNotNull(tuple); @@ -124,7 +122,7 @@ public void testParse_NoDocIds_isShardRange() throws IOException { assertTrue(iterator.hasTop()); EntryParser parser = new EntryParser("hello", "world", false); - Map.Entry top = Maps.immutableEntry(iterator.getTopKey(), iterator.getTopValue()); + Result top = new Result(iterator.getTopKey(), iterator.getTopValue()); Tuple2 tuple = parser.apply(top); assertNotNull(tuple); diff --git a/warehouse/query-core/src/test/java/datawave/query/index/lookup/TupleToRangeTest.java b/warehouse/query-core/src/test/java/datawave/query/index/lookup/TupleToRangeTest.java index b44d11c50ed..e068319f626 100644 --- a/warehouse/query-core/src/test/java/datawave/query/index/lookup/TupleToRangeTest.java +++ b/warehouse/query-core/src/test/java/datawave/query/index/lookup/TupleToRangeTest.java @@ -25,6 +25,7 @@ import datawave.query.jexl.JexlASTHelper; import datawave.query.planner.QueryPlan; import datawave.query.util.Tuple2; +import datawave.util.TableName; public class TupleToRangeTest { @@ -70,7 +71,7 @@ public void testGenerateDocumentRanges() { expectedRanges.add(makeTestRange(shard, "docId2")); // Create the ranges - Iterator ranges = TupleToRange.createDocumentRanges(queryNode, shard, indexInfo, config.isTldQuery()); + Iterator ranges = TupleToRange.createDocumentRanges(TableName.SHARD, queryNode, shard, indexInfo, config.isTldQuery()); // Assert ranges against expected ranges eval(expectedRanges, ranges); @@ -91,7 +92,7 @@ public void testGenerateTldDocumentRanges() { // Create the ranges config.setTldQuery(true); - Iterator ranges = TupleToRange.createDocumentRanges(queryNode, shard, indexInfo, config.isTldQuery()); + Iterator ranges = TupleToRange.createDocumentRanges(TableName.SHARD, queryNode, shard, indexInfo, config.isTldQuery()); // Assert ranges against expected ranges eval(expectedRanges, ranges); @@ -108,7 +109,7 @@ public void testGenerateShardRange() { expectedRanges.add(makeShardedRange(shard)); // Create the ranges - Iterator ranges = TupleToRange.createShardRange(queryNode, shard, indexInfo); + Iterator ranges = TupleToRange.createShardRange(TableName.SHARD, queryNode, shard, indexInfo); // Assert ranges against expected ranges eval(expectedRanges, ranges); @@ -125,7 +126,7 @@ public void testGenerateDayRange() { expectedRanges.add(makeDayRange(shard)); // Create the ranges - Iterator ranges = TupleToRange.createDayRange(queryNode, shard, indexInfo); + Iterator ranges = TupleToRange.createDayRange(TableName.SHARD, queryNode, shard, indexInfo); // Assert ranges against expected ranges eval(expectedRanges, ranges); @@ -146,7 +147,7 @@ public void testApplyWithDocumentRange() { expectedRanges.add(makeTestRange(shard, "docId2")); // Create the ranges - TupleToRange tupleToRange = new TupleToRange(queryNode, config); + TupleToRange tupleToRange = new TupleToRange(TableName.SHARD, queryNode, config); Iterator ranges = tupleToRange.apply(tuple); // Assert ranges against expected ranges @@ -169,7 +170,7 @@ public void testApplyWithTldDocumentRange() { // Create the ranges config.setTldQuery(true); - TupleToRange tupleToRange = new TupleToRange(queryNode, config); + TupleToRange tupleToRange = new TupleToRange(TableName.SHARD, queryNode, config); Iterator ranges = tupleToRange.apply(tuple); // Assert ranges against expected ranges @@ -188,7 +189,7 @@ public void testApplyWithShardRange() { expectedRanges.add(makeShardedRange(shard)); // Create the ranges - TupleToRange tupleToRange = new TupleToRange(queryNode, config); + TupleToRange tupleToRange = new TupleToRange(TableName.SHARD, queryNode, config); Iterator ranges = tupleToRange.apply(tuple); // Assert ranges against expected ranges @@ -207,7 +208,7 @@ public void testApplyWithDayRange() { expectedRanges.add(makeDayRange(shard)); // Create the ranges - TupleToRange tupleToRange = new TupleToRange(queryNode, config); + TupleToRange tupleToRange = new TupleToRange(TableName.SHARD, queryNode, config); Iterator ranges = tupleToRange.apply(tuple); // Assert ranges against expected ranges diff --git a/warehouse/query-core/src/test/java/datawave/query/jexl/functions/ContentFunctionQueryTest.java b/warehouse/query-core/src/test/java/datawave/query/jexl/functions/ContentFunctionQueryTest.java index 901972a250a..50c1781a4fd 100644 --- a/warehouse/query-core/src/test/java/datawave/query/jexl/functions/ContentFunctionQueryTest.java +++ b/warehouse/query-core/src/test/java/datawave/query/jexl/functions/ContentFunctionQueryTest.java @@ -1,14 +1,14 @@ package datawave.query.jexl.functions; +import static datawave.microservice.query.QueryParameters.QUERY_AUTHORIZATIONS; +import static datawave.microservice.query.QueryParameters.QUERY_BEGIN; +import static datawave.microservice.query.QueryParameters.QUERY_END; +import static datawave.microservice.query.QueryParameters.QUERY_EXPIRATION; +import static datawave.microservice.query.QueryParameters.QUERY_LOGIC_NAME; +import static datawave.microservice.query.QueryParameters.QUERY_NAME; +import static datawave.microservice.query.QueryParameters.QUERY_PERSISTENCE; +import static datawave.microservice.query.QueryParameters.QUERY_STRING; import static datawave.query.QueryParameters.DATE_RANGE_TYPE; -import static datawave.webservice.query.QueryParameters.QUERY_AUTHORIZATIONS; -import static datawave.webservice.query.QueryParameters.QUERY_BEGIN; -import static datawave.webservice.query.QueryParameters.QUERY_END; -import static datawave.webservice.query.QueryParameters.QUERY_EXPIRATION; -import static datawave.webservice.query.QueryParameters.QUERY_LOGIC_NAME; -import static datawave.webservice.query.QueryParameters.QUERY_NAME; -import static datawave.webservice.query.QueryParameters.QUERY_PERSISTENCE; -import static datawave.webservice.query.QueryParameters.QUERY_STRING; import java.io.BufferedReader; import java.io.ByteArrayInputStream; @@ -81,6 +81,10 @@ import datawave.ingest.protobuf.Uid; import datawave.ingest.table.config.ShardTableConfigHelper; import datawave.ingest.table.config.TableConfigHelper; +import datawave.microservice.query.DefaultQueryParameters; +import datawave.microservice.query.Query; +import datawave.microservice.query.QueryImpl; +import datawave.microservice.query.QueryParameters; import datawave.policy.IngestPolicyEnforcer; import datawave.query.config.ShardQueryConfiguration; import datawave.query.iterator.ivarator.IvaratorCacheDirConfig; @@ -91,10 +95,6 @@ import datawave.query.testframework.MockStatusReporter; import datawave.util.TableName; import datawave.webservice.edgedictionary.RemoteEdgeDictionary; -import datawave.webservice.query.Query; -import datawave.webservice.query.QueryImpl; -import datawave.webservice.query.QueryParameters; -import datawave.webservice.query.QueryParametersImpl; import datawave.webservice.query.result.event.DefaultEvent; import datawave.webservice.query.result.event.DefaultField; @@ -133,9 +133,10 @@ public class ContentFunctionQueryTest { @Deployment public static JavaArchive createDeployment() throws Exception { return ShrinkWrap.create(JavaArchive.class) - .addPackages(true, "org.apache.deltaspike", "io.astefanutti.metrics.cdi", "datawave.query", "datawave.webservice.query.result.event") + .addPackages(true, "org.apache.deltaspike", "io.astefanutti.metrics.cdi", "datawave.query", "datawave.webservice.query.result.event", + "datawave.core.query.result.event") .deleteClass(DefaultEdgeEventQueryLogic.class).deleteClass(RemoteEdgeDictionary.class) - .deleteClass(datawave.query.metrics.QueryMetricQueryLogic.class).deleteClass(datawave.query.metrics.ShardTableQueryMetricHandler.class) + .deleteClass(datawave.query.metrics.QueryMetricQueryLogic.class) .addAsManifestResource(new StringAsset( "" + "datawave.query.tables.edge.MockAlternative" + ""), "beans.xml"); @@ -340,7 +341,7 @@ private Iterator getResultsIterator(String queryString, ShardQueryLogic logic, M params.set(QUERY_BEGIN, BEGIN_DATE); params.set(QUERY_END, END_DATE); - QueryParameters queryParams = new QueryParametersImpl(); + QueryParameters queryParams = new DefaultQueryParameters(); queryParams.validate(params); Set auths = new HashSet<>(); diff --git a/warehouse/query-core/src/test/java/datawave/query/jexl/nodes/ExceededOrThresholdMarkerJexlNodeTest.java b/warehouse/query-core/src/test/java/datawave/query/jexl/nodes/ExceededOrThresholdMarkerJexlNodeTest.java index 43080626572..1cbaa29f623 100644 --- a/warehouse/query-core/src/test/java/datawave/query/jexl/nodes/ExceededOrThresholdMarkerJexlNodeTest.java +++ b/warehouse/query-core/src/test/java/datawave/query/jexl/nodes/ExceededOrThresholdMarkerJexlNodeTest.java @@ -1,13 +1,13 @@ package datawave.query.jexl.nodes; -import static datawave.webservice.query.QueryParameters.QUERY_AUTHORIZATIONS; -import static datawave.webservice.query.QueryParameters.QUERY_BEGIN; -import static datawave.webservice.query.QueryParameters.QUERY_END; -import static datawave.webservice.query.QueryParameters.QUERY_EXPIRATION; -import static datawave.webservice.query.QueryParameters.QUERY_LOGIC_NAME; -import static datawave.webservice.query.QueryParameters.QUERY_NAME; -import static datawave.webservice.query.QueryParameters.QUERY_PERSISTENCE; -import static datawave.webservice.query.QueryParameters.QUERY_STRING; +import static datawave.microservice.query.QueryParameters.QUERY_AUTHORIZATIONS; +import static datawave.microservice.query.QueryParameters.QUERY_BEGIN; +import static datawave.microservice.query.QueryParameters.QUERY_END; +import static datawave.microservice.query.QueryParameters.QUERY_EXPIRATION; +import static datawave.microservice.query.QueryParameters.QUERY_LOGIC_NAME; +import static datawave.microservice.query.QueryParameters.QUERY_NAME; +import static datawave.microservice.query.QueryParameters.QUERY_PERSISTENCE; +import static datawave.microservice.query.QueryParameters.QUERY_STRING; import java.io.IOException; import java.net.URL; @@ -72,6 +72,10 @@ import datawave.ingest.mapreduce.partition.BalancedShardPartitioner; import datawave.ingest.table.config.ShardTableConfigHelper; import datawave.ingest.table.config.TableConfigHelper; +import datawave.microservice.query.DefaultQueryParameters; +import datawave.microservice.query.Query; +import datawave.microservice.query.QueryImpl; +import datawave.microservice.query.QueryParameters; import datawave.policy.IngestPolicyEnforcer; import datawave.query.config.ShardQueryConfiguration; import datawave.query.iterator.ivarator.IvaratorCacheDirConfig; @@ -84,10 +88,6 @@ import datawave.query.testframework.MockStatusReporter; import datawave.util.TableName; import datawave.webservice.edgedictionary.RemoteEdgeDictionary; -import datawave.webservice.query.Query; -import datawave.webservice.query.QueryImpl; -import datawave.webservice.query.QueryParameters; -import datawave.webservice.query.QueryParametersImpl; import datawave.webservice.query.result.event.DefaultEvent; import datawave.webservice.query.result.event.DefaultField; @@ -184,9 +184,10 @@ public class ExceededOrThresholdMarkerJexlNodeTest { @Deployment public static JavaArchive createDeployment() throws Exception { return ShrinkWrap.create(JavaArchive.class) - .addPackages(true, "org.apache.deltaspike", "io.astefanutti.metrics.cdi", "datawave.query", "datawave.webservice.query.result.event") + .addPackages(true, "org.apache.deltaspike", "io.astefanutti.metrics.cdi", "datawave.query", "datawave.webservice.query.result.event", + "datawave.core.query.result.event") .deleteClass(DefaultEdgeEventQueryLogic.class).deleteClass(RemoteEdgeDictionary.class) - .deleteClass(datawave.query.metrics.QueryMetricQueryLogic.class).deleteClass(datawave.query.metrics.ShardTableQueryMetricHandler.class) + .deleteClass(datawave.query.metrics.QueryMetricQueryLogic.class) .addAsManifestResource(new StringAsset( "" + "datawave.query.tables.edge.MockAlternative" + ""), "beans.xml"); @@ -628,7 +629,7 @@ private Iterator getQueryRangesIterator(String queryString, ShardQueryLogic logi params.putSingle(QUERY_BEGIN, BEGIN_DATE); params.putSingle(QUERY_END, END_DATE); - QueryParameters queryParams = new QueryParametersImpl(); + QueryParameters queryParams = new DefaultQueryParameters(); queryParams.validate(params); Set auths = new HashSet<>(); @@ -643,7 +644,7 @@ private Iterator getQueryRangesIterator(String queryString, ShardQueryLogic logi logic.setupQuery(config); - return Iterators.transform(config.getQueries(), queryData -> { + return Iterators.transform(config.getQueriesIter(), queryData -> { try { return JexlStringBuildingVisitor .buildQuery(PushdownLargeFieldedListsVisitor.pushdown(config, JexlASTHelper.parseJexlQuery(queryData.getQuery()), null, null)); @@ -664,7 +665,7 @@ private Iterator getResultsIterator(String queryString, ShardQueryLogic logic) t params.putSingle(QUERY_BEGIN, BEGIN_DATE); params.putSingle(QUERY_END, END_DATE); - QueryParameters queryParams = new QueryParametersImpl(); + QueryParameters queryParams = new DefaultQueryParameters(); queryParams.validate(params); Set auths = new HashSet<>(); diff --git a/warehouse/query-core/src/test/java/datawave/query/jexl/visitors/EdgeTableRangeBuildingVisitorTest.java b/warehouse/query-core/src/test/java/datawave/query/jexl/visitors/EdgeTableRangeBuildingVisitorTest.java index a333654128e..19b15912cc9 100644 --- a/warehouse/query-core/src/test/java/datawave/query/jexl/visitors/EdgeTableRangeBuildingVisitorTest.java +++ b/warehouse/query-core/src/test/java/datawave/query/jexl/visitors/EdgeTableRangeBuildingVisitorTest.java @@ -12,6 +12,8 @@ import org.junit.Before; import org.junit.Test; +import datawave.edge.model.DefaultEdgeModelFieldsFactory; +import datawave.edge.model.EdgeModelFields; import datawave.query.tables.edge.EdgeQueryLogic; public class EdgeTableRangeBuildingVisitorTest { @@ -25,7 +27,7 @@ public class EdgeTableRangeBuildingVisitorTest { public void setup() { parser = new Parser(new StringProvider(";")); - visitor = new EdgeTableRangeBuildingVisitor(false, emptyList(), termLimit, emptyList()); + visitor = new EdgeTableRangeBuildingVisitor(false, emptyList(), termLimit, emptyList(), new DefaultEdgeModelFieldsFactory().createFields()); } @Test diff --git a/warehouse/query-core/src/test/java/datawave/query/jexl/visitors/ExecutableExpansionVisitorTest.java b/warehouse/query-core/src/test/java/datawave/query/jexl/visitors/ExecutableExpansionVisitorTest.java index 693b2a753ee..c50cc0ae384 100644 --- a/warehouse/query-core/src/test/java/datawave/query/jexl/visitors/ExecutableExpansionVisitorTest.java +++ b/warehouse/query-core/src/test/java/datawave/query/jexl/visitors/ExecutableExpansionVisitorTest.java @@ -50,10 +50,12 @@ import com.google.common.collect.Multimap; import datawave.configuration.spring.SpringBean; +import datawave.core.query.configuration.GenericQueryConfiguration; import datawave.data.type.GeometryType; import datawave.data.type.Type; import datawave.helpers.PrintUtility; import datawave.ingest.data.TypeRegistry; +import datawave.microservice.query.QueryImpl; import datawave.query.CompositeFunctionsTest; import datawave.query.QueryTestTableHelper; import datawave.query.attributes.Attribute; @@ -74,8 +76,6 @@ import datawave.query.util.WiseGuysIngest; import datawave.util.TableName; import datawave.webservice.edgedictionary.RemoteEdgeDictionary; -import datawave.webservice.query.QueryImpl; -import datawave.webservice.query.configuration.GenericQueryConfiguration; public abstract class ExecutableExpansionVisitorTest { @RunWith(Arquillian.class) @@ -153,7 +153,8 @@ public static JavaArchive createDeployment() throws Exception { .addPackages(true, "org.apache.deltaspike", "io.astefanutti.metrics.cdi", "datawave.query", "org.jboss.logging", "datawave.webservice.query.result.event") .deleteClass(DefaultEdgeEventQueryLogic.class).deleteClass(RemoteEdgeDictionary.class) - .deleteClass(datawave.query.metrics.QueryMetricQueryLogic.class).deleteClass(datawave.query.metrics.ShardTableQueryMetricHandler.class) + .deleteClass(datawave.query.metrics.QueryMetricQueryLogic.class) + .deleteClass(datawave.query.tables.edge.DefaultEdgeEventQueryLogic.class) .addAsManifestResource(new StringAsset( "" + "datawave.query.tables.edge.MockAlternative" + ""), "beans.xml"); diff --git a/warehouse/query-core/src/test/java/datawave/query/planner/CompositeIndexTest.java b/warehouse/query-core/src/test/java/datawave/query/planner/CompositeIndexTest.java index c0fe6282a1b..5494e493191 100644 --- a/warehouse/query-core/src/test/java/datawave/query/planner/CompositeIndexTest.java +++ b/warehouse/query-core/src/test/java/datawave/query/planner/CompositeIndexTest.java @@ -1,15 +1,15 @@ package datawave.query.planner; +import static datawave.microservice.query.QueryParameters.QUERY_AUTHORIZATIONS; +import static datawave.microservice.query.QueryParameters.QUERY_BEGIN; +import static datawave.microservice.query.QueryParameters.QUERY_END; +import static datawave.microservice.query.QueryParameters.QUERY_EXPIRATION; +import static datawave.microservice.query.QueryParameters.QUERY_LOGIC_NAME; +import static datawave.microservice.query.QueryParameters.QUERY_NAME; +import static datawave.microservice.query.QueryParameters.QUERY_PERSISTENCE; +import static datawave.microservice.query.QueryParameters.QUERY_STRING; import static datawave.query.testframework.RawDataManager.JEXL_AND_OP; import static datawave.query.testframework.RawDataManager.JEXL_OR_OP; -import static datawave.webservice.query.QueryParameters.QUERY_AUTHORIZATIONS; -import static datawave.webservice.query.QueryParameters.QUERY_BEGIN; -import static datawave.webservice.query.QueryParameters.QUERY_END; -import static datawave.webservice.query.QueryParameters.QUERY_EXPIRATION; -import static datawave.webservice.query.QueryParameters.QUERY_LOGIC_NAME; -import static datawave.webservice.query.QueryParameters.QUERY_NAME; -import static datawave.webservice.query.QueryParameters.QUERY_PERSISTENCE; -import static datawave.webservice.query.QueryParameters.QUERY_STRING; import java.net.URL; import java.text.SimpleDateFormat; @@ -57,6 +57,7 @@ import datawave.accumulo.inmemory.InMemoryAccumuloClient; import datawave.accumulo.inmemory.InMemoryInstance; import datawave.configuration.spring.SpringBean; +import datawave.core.query.configuration.QueryData; import datawave.data.ColumnFamilyConstants; import datawave.data.type.GeometryType; import datawave.data.type.NumberType; @@ -76,6 +77,10 @@ import datawave.ingest.mapreduce.partition.BalancedShardPartitioner; import datawave.ingest.table.config.ShardTableConfigHelper; import datawave.ingest.table.config.TableConfigHelper; +import datawave.microservice.query.DefaultQueryParameters; +import datawave.microservice.query.Query; +import datawave.microservice.query.QueryImpl; +import datawave.microservice.query.QueryParameters; import datawave.policy.IngestPolicyEnforcer; import datawave.query.composite.CompositeMetadataHelper; import datawave.query.config.ShardQueryConfiguration; @@ -85,11 +90,6 @@ import datawave.query.testframework.MockStatusReporter; import datawave.util.TableName; import datawave.webservice.edgedictionary.RemoteEdgeDictionary; -import datawave.webservice.query.Query; -import datawave.webservice.query.QueryImpl; -import datawave.webservice.query.QueryParameters; -import datawave.webservice.query.QueryParametersImpl; -import datawave.webservice.query.configuration.QueryData; import datawave.webservice.query.result.event.DefaultEvent; import datawave.webservice.query.result.event.DefaultField; @@ -195,9 +195,10 @@ public class CompositeIndexTest { @Deployment public static JavaArchive createDeployment() throws Exception { return ShrinkWrap.create(JavaArchive.class) - .addPackages(true, "org.apache.deltaspike", "io.astefanutti.metrics.cdi", "datawave.query", "datawave.webservice.query.result.event") + .addPackages(true, "org.apache.deltaspike", "io.astefanutti.metrics.cdi", "datawave.query", "datawave.webservice.query.result.event", + "datawave.core.query.result.event") .deleteClass(DefaultEdgeEventQueryLogic.class).deleteClass(RemoteEdgeDictionary.class) - .deleteClass(datawave.query.metrics.QueryMetricQueryLogic.class).deleteClass(datawave.query.metrics.ShardTableQueryMetricHandler.class) + .deleteClass(datawave.query.metrics.QueryMetricQueryLogic.class) .addAsManifestResource(new StringAsset( "" + "datawave.query.tables.edge.MockAlternative" + ""), "beans.xml"); @@ -520,7 +521,7 @@ private Iterator getQueryRangesIterator(String queryString, ShardQueryLogic logi params.putSingle(QUERY_BEGIN, BEGIN_DATE); params.putSingle(QUERY_END, END_DATE); - QueryParameters queryParams = new QueryParametersImpl(); + QueryParameters queryParams = new DefaultQueryParameters(); queryParams.validate(params); Set auths = new HashSet<>(); @@ -535,7 +536,7 @@ private Iterator getQueryRangesIterator(String queryString, ShardQueryLogic logi logic.setupQuery(config); - return config.getQueries(); + return config.getQueriesIter(); } private Iterator getResultsIterator(String queryString, ShardQueryLogic logic) throws Exception { @@ -549,7 +550,7 @@ private Iterator getResultsIterator(String queryString, ShardQueryLogic logic) t params.putSingle(QUERY_BEGIN, BEGIN_DATE); params.putSingle(QUERY_END, END_DATE); - QueryParameters queryParams = new QueryParametersImpl(); + QueryParameters queryParams = new DefaultQueryParameters(); queryParams.validate(params); Set auths = new HashSet<>(); diff --git a/warehouse/query-core/src/test/java/datawave/query/planner/GeoSortedQueryDataTest.java b/warehouse/query-core/src/test/java/datawave/query/planner/GeoSortedQueryDataTest.java index 706b76712ba..5d61fd9577c 100644 --- a/warehouse/query-core/src/test/java/datawave/query/planner/GeoSortedQueryDataTest.java +++ b/warehouse/query-core/src/test/java/datawave/query/planner/GeoSortedQueryDataTest.java @@ -1,13 +1,13 @@ package datawave.query.planner; -import static datawave.webservice.query.QueryParameters.QUERY_AUTHORIZATIONS; -import static datawave.webservice.query.QueryParameters.QUERY_BEGIN; -import static datawave.webservice.query.QueryParameters.QUERY_END; -import static datawave.webservice.query.QueryParameters.QUERY_EXPIRATION; -import static datawave.webservice.query.QueryParameters.QUERY_LOGIC_NAME; -import static datawave.webservice.query.QueryParameters.QUERY_NAME; -import static datawave.webservice.query.QueryParameters.QUERY_PERSISTENCE; -import static datawave.webservice.query.QueryParameters.QUERY_STRING; +import static datawave.microservice.query.QueryParameters.QUERY_AUTHORIZATIONS; +import static datawave.microservice.query.QueryParameters.QUERY_BEGIN; +import static datawave.microservice.query.QueryParameters.QUERY_END; +import static datawave.microservice.query.QueryParameters.QUERY_EXPIRATION; +import static datawave.microservice.query.QueryParameters.QUERY_LOGIC_NAME; +import static datawave.microservice.query.QueryParameters.QUERY_NAME; +import static datawave.microservice.query.QueryParameters.QUERY_PERSISTENCE; +import static datawave.microservice.query.QueryParameters.QUERY_STRING; import static org.junit.Assert.assertThrows; import static org.junit.Assert.assertTrue; @@ -56,6 +56,7 @@ import datawave.accumulo.inmemory.InMemoryAccumuloClient; import datawave.accumulo.inmemory.InMemoryInstance; import datawave.configuration.spring.SpringBean; +import datawave.core.query.configuration.QueryData; import datawave.ingest.config.RawRecordContainerImpl; import datawave.ingest.data.RawRecordContainer; import datawave.ingest.data.Type; @@ -65,6 +66,10 @@ import datawave.ingest.data.config.ingest.ContentBaseIngestHelper; import datawave.ingest.mapreduce.handler.shard.AbstractColumnBasedHandler; import datawave.ingest.mapreduce.job.BulkIngestKey; +import datawave.microservice.query.DefaultQueryParameters; +import datawave.microservice.query.Query; +import datawave.microservice.query.QueryImpl; +import datawave.microservice.query.QueryParameters; import datawave.query.config.ShardQueryConfiguration; import datawave.query.exceptions.DatawaveFatalQueryException; import datawave.query.jexl.JexlASTHelper; @@ -74,11 +79,6 @@ import datawave.query.testframework.MockStatusReporter; import datawave.util.TableName; import datawave.webservice.edgedictionary.RemoteEdgeDictionary; -import datawave.webservice.query.Query; -import datawave.webservice.query.QueryImpl; -import datawave.webservice.query.QueryParameters; -import datawave.webservice.query.QueryParametersImpl; -import datawave.webservice.query.configuration.QueryData; @RunWith(Arquillian.class) public class GeoSortedQueryDataTest { @@ -148,9 +148,10 @@ public class GeoSortedQueryDataTest { @Deployment public static JavaArchive createDeployment() throws Exception { return ShrinkWrap.create(JavaArchive.class) - .addPackages(true, "org.apache.deltaspike", "io.astefanutti.metrics.cdi", "datawave.query", "datawave.webservice.query.result.event") + .addPackages(true, "org.apache.deltaspike", "io.astefanutti.metrics.cdi", "datawave.query", "datawave.webservice.query.result.event", + "datawave.core.query.result.event") .deleteClass(DefaultEdgeEventQueryLogic.class).deleteClass(RemoteEdgeDictionary.class) - .deleteClass(datawave.query.metrics.QueryMetricQueryLogic.class).deleteClass(datawave.query.metrics.ShardTableQueryMetricHandler.class) + .deleteClass(datawave.query.metrics.QueryMetricQueryLogic.class) .addAsManifestResource(new StringAsset( "" + "datawave.query.tables.edge.MockAlternative" + ""), "beans.xml"); @@ -338,7 +339,7 @@ private Iterator initializeGeoQuery() throws Exception { params.putSingle(QUERY_BEGIN, BEGIN_DATE); params.putSingle(QUERY_END, END_DATE); - QueryParameters queryParams = new QueryParametersImpl(); + QueryParameters queryParams = new DefaultQueryParameters(); queryParams.validate(params); Set auths = new HashSet<>(); @@ -353,7 +354,7 @@ private Iterator initializeGeoQuery() throws Exception { logic.setupQuery(config); - return config.getQueries(); + return config.getQueriesIter(); } public static class TestIngestHelper extends ContentBaseIngestHelper { diff --git a/warehouse/query-core/src/test/java/datawave/query/planner/MultiValueCompositeIndexTest.java b/warehouse/query-core/src/test/java/datawave/query/planner/MultiValueCompositeIndexTest.java index 012331c871e..f217938d8e1 100644 --- a/warehouse/query-core/src/test/java/datawave/query/planner/MultiValueCompositeIndexTest.java +++ b/warehouse/query-core/src/test/java/datawave/query/planner/MultiValueCompositeIndexTest.java @@ -1,15 +1,15 @@ package datawave.query.planner; +import static datawave.microservice.query.QueryParameters.QUERY_AUTHORIZATIONS; +import static datawave.microservice.query.QueryParameters.QUERY_BEGIN; +import static datawave.microservice.query.QueryParameters.QUERY_END; +import static datawave.microservice.query.QueryParameters.QUERY_EXPIRATION; +import static datawave.microservice.query.QueryParameters.QUERY_LOGIC_NAME; +import static datawave.microservice.query.QueryParameters.QUERY_NAME; +import static datawave.microservice.query.QueryParameters.QUERY_PERSISTENCE; +import static datawave.microservice.query.QueryParameters.QUERY_STRING; import static datawave.query.testframework.RawDataManager.JEXL_AND_OP; import static datawave.query.testframework.RawDataManager.JEXL_OR_OP; -import static datawave.webservice.query.QueryParameters.QUERY_AUTHORIZATIONS; -import static datawave.webservice.query.QueryParameters.QUERY_BEGIN; -import static datawave.webservice.query.QueryParameters.QUERY_END; -import static datawave.webservice.query.QueryParameters.QUERY_EXPIRATION; -import static datawave.webservice.query.QueryParameters.QUERY_LOGIC_NAME; -import static datawave.webservice.query.QueryParameters.QUERY_NAME; -import static datawave.webservice.query.QueryParameters.QUERY_PERSISTENCE; -import static datawave.webservice.query.QueryParameters.QUERY_STRING; import java.io.UnsupportedEncodingException; import java.net.URL; @@ -57,6 +57,7 @@ import datawave.accumulo.inmemory.InMemoryAccumuloClient; import datawave.accumulo.inmemory.InMemoryInstance; import datawave.configuration.spring.SpringBean; +import datawave.core.query.configuration.QueryData; import datawave.data.type.GeometryType; import datawave.data.type.NumberType; import datawave.ingest.config.RawRecordContainerImpl; @@ -74,6 +75,10 @@ import datawave.ingest.mapreduce.partition.BalancedShardPartitioner; import datawave.ingest.table.config.ShardTableConfigHelper; import datawave.ingest.table.config.TableConfigHelper; +import datawave.microservice.query.DefaultQueryParameters; +import datawave.microservice.query.Query; +import datawave.microservice.query.QueryImpl; +import datawave.microservice.query.QueryParameters; import datawave.policy.IngestPolicyEnforcer; import datawave.query.config.ShardQueryConfiguration; import datawave.query.iterator.ivarator.IvaratorCacheDirConfig; @@ -82,11 +87,6 @@ import datawave.query.testframework.MockStatusReporter; import datawave.util.TableName; import datawave.webservice.edgedictionary.RemoteEdgeDictionary; -import datawave.webservice.query.Query; -import datawave.webservice.query.QueryImpl; -import datawave.webservice.query.QueryParameters; -import datawave.webservice.query.QueryParametersImpl; -import datawave.webservice.query.configuration.QueryData; import datawave.webservice.query.result.event.DefaultEvent; import datawave.webservice.query.result.event.DefaultField; @@ -178,9 +178,10 @@ public int hashCode() { @Deployment public static JavaArchive createDeployment() throws Exception { return ShrinkWrap.create(JavaArchive.class) - .addPackages(true, "org.apache.deltaspike", "io.astefanutti.metrics.cdi", "datawave.query", "datawave.webservice.query.result.event") + .addPackages(true, "org.apache.deltaspike", "io.astefanutti.metrics.cdi", "datawave.query", "datawave.webservice.query.result.event", + "datawave.core.query.result.event") .deleteClass(DefaultEdgeEventQueryLogic.class).deleteClass(RemoteEdgeDictionary.class) - .deleteClass(datawave.query.metrics.QueryMetricQueryLogic.class).deleteClass(datawave.query.metrics.ShardTableQueryMetricHandler.class) + .deleteClass(datawave.query.metrics.QueryMetricQueryLogic.class) .addAsManifestResource(new StringAsset( "" + "datawave.query.tables.edge.MockAlternative" + ""), "beans.xml"); @@ -410,7 +411,7 @@ private Iterator getQueryRangesIterator(String queryString, ShardQueryLogic logi params.putSingle(QUERY_BEGIN, BEGIN_DATE); params.putSingle(QUERY_END, END_DATE); - QueryParameters queryParams = new QueryParametersImpl(); + QueryParameters queryParams = new DefaultQueryParameters(); queryParams.validate(params); Set auths = new HashSet<>(); @@ -425,7 +426,7 @@ private Iterator getQueryRangesIterator(String queryString, ShardQueryLogic logi logic.setupQuery(config); - return config.getQueries(); + return config.getQueriesIter(); } private Iterator getResultsIterator(String queryString, ShardQueryLogic logic) throws Exception { @@ -439,7 +440,7 @@ private Iterator getResultsIterator(String queryString, ShardQueryLogic logic) t params.putSingle(QUERY_BEGIN, BEGIN_DATE); params.putSingle(QUERY_END, END_DATE); - QueryParameters queryParams = new QueryParametersImpl(); + QueryParameters queryParams = new DefaultQueryParameters(); queryParams.validate(params); Set auths = new HashSet<>(); diff --git a/warehouse/query-core/src/test/java/datawave/query/planner/ThreadedRangeBundlerTest.java b/warehouse/query-core/src/test/java/datawave/query/planner/ThreadedRangeBundlerTest.java index 97f3086b3f2..ef494e34ada 100644 --- a/warehouse/query-core/src/test/java/datawave/query/planner/ThreadedRangeBundlerTest.java +++ b/warehouse/query-core/src/test/java/datawave/query/planner/ThreadedRangeBundlerTest.java @@ -16,9 +16,9 @@ import org.junit.Test; import org.junit.rules.ExpectedException; +import datawave.core.query.configuration.QueryData; +import datawave.microservice.query.Query; import datawave.query.CloseableIterable; -import datawave.webservice.query.Query; -import datawave.webservice.query.configuration.QueryData; public class ThreadedRangeBundlerTest { diff --git a/warehouse/query-core/src/test/java/datawave/query/predicate/ValueToAttributesTest.java b/warehouse/query-core/src/test/java/datawave/query/predicate/ValueToAttributesTest.java index 11aaea1e9c2..0dd9877d65e 100644 --- a/warehouse/query-core/src/test/java/datawave/query/predicate/ValueToAttributesTest.java +++ b/warehouse/query-core/src/test/java/datawave/query/predicate/ValueToAttributesTest.java @@ -33,9 +33,11 @@ import org.junit.runner.RunWith; import datawave.configuration.spring.SpringBean; +import datawave.core.query.configuration.GenericQueryConfiguration; import datawave.helpers.PrintUtility; import datawave.ingest.data.TypeRegistry; import datawave.marking.MarkingFunctions; +import datawave.microservice.query.QueryImpl; import datawave.query.QueryTestTableHelper; import datawave.query.attributes.Attribute; import datawave.query.attributes.Document; @@ -51,8 +53,6 @@ import datawave.query.util.TypeMetadata; import datawave.util.TableName; import datawave.webservice.edgedictionary.RemoteEdgeDictionary; -import datawave.webservice.query.QueryImpl; -import datawave.webservice.query.configuration.GenericQueryConfiguration; /** */ @@ -125,9 +125,9 @@ public static JavaArchive createDeployment() throws Exception { return ShrinkWrap.create(JavaArchive.class) .addPackages(true, "org.apache.deltaspike", "io.astefanutti.metrics.cdi", "datawave.query", "org.jboss.logging", - "datawave.webservice.query.result.event") + "datawave.webservice.query.result.event", "datawave.core.query.result.event") .deleteClass(DefaultEdgeEventQueryLogic.class).deleteClass(RemoteEdgeDictionary.class) - .deleteClass(datawave.query.metrics.QueryMetricQueryLogic.class).deleteClass(datawave.query.metrics.ShardTableQueryMetricHandler.class) + .deleteClass(datawave.query.metrics.QueryMetricQueryLogic.class) .addAsManifestResource(new StringAsset( "" + "datawave.query.tables.edge.MockAlternative" + ""), "beans.xml"); diff --git a/warehouse/query-core/src/test/java/datawave/query/tables/DedupingIteratorTest.java b/warehouse/query-core/src/test/java/datawave/query/tables/DedupingIteratorTest.java index 17a8fa14019..a00e3876679 100644 --- a/warehouse/query-core/src/test/java/datawave/query/tables/DedupingIteratorTest.java +++ b/warehouse/query-core/src/test/java/datawave/query/tables/DedupingIteratorTest.java @@ -15,13 +15,14 @@ import org.junit.BeforeClass; import org.junit.Test; +import datawave.core.query.configuration.Result; import datawave.query.Constants; public class DedupingIteratorTest { public static final int DUPS_LIST_SZ = 2160; public static final int DEDUPED_LIST_SZ = 1500; - private static List> DUPS_LIST; + private static List DUPS_LIST; private int bloomExpected; private double bloomFpp; @@ -59,7 +60,7 @@ public void before() { public void test_nodups() { assertEquals(DUPS_LIST_SZ, DUPS_LIST.size()); - Iterable> input = () -> new DedupingIterator(DUPS_LIST.iterator(), bloomExpected, bloomFpp); + Iterable input = () -> new DedupingIterator(DUPS_LIST.iterator(), bloomExpected, bloomFpp); List> output = new ArrayList<>(); input.forEach(output::add); @@ -76,36 +77,19 @@ public void test_bloomExpectedTooSmall() { assertEquals(DUPS_LIST_SZ, DUPS_LIST.size()); - Iterable> input = () -> new DedupingIterator(DUPS_LIST.iterator(), bloomExpected, bloomFpp); + Iterable input = () -> new DedupingIterator(DUPS_LIST.iterator(), bloomExpected, bloomFpp); - List> output = new ArrayList<>(); + List output = new ArrayList<>(); input.forEach(output::add); // False positives should've prevented some entries from being included assertTrue(output.size() < DEDUPED_LIST_SZ); } - private static class TestEntry implements Map.Entry { - - private Key key; + private static class TestEntry extends Result { TestEntry(Key key) { - this.key = key; - } - - @Override - public Key getKey() { - return this.key; - } - - @Override - public Value getValue() { - return null; - } - - @Override - public Value setValue(Value val) { - return null; + super(key, null); } @Override @@ -115,12 +99,12 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; TestEntry testEntry = (TestEntry) o; - return key.equals(testEntry.key); + return getKey().equals(testEntry.getKey()); } @Override public int hashCode() { - return key.hashCode(); + return getKey().hashCode(); } } } diff --git a/warehouse/query-core/src/test/java/datawave/query/tables/IndexQueryLogicTest.java b/warehouse/query-core/src/test/java/datawave/query/tables/IndexQueryLogicTest.java index d3d95dfb83a..23920ed8f71 100644 --- a/warehouse/query-core/src/test/java/datawave/query/tables/IndexQueryLogicTest.java +++ b/warehouse/query-core/src/test/java/datawave/query/tables/IndexQueryLogicTest.java @@ -20,6 +20,7 @@ import com.google.common.collect.Lists; import com.google.common.collect.Sets; +import datawave.core.query.result.event.DefaultResponseObjectFactory; import datawave.marking.MarkingFunctions; import datawave.query.Constants; import datawave.query.QueryTestTableHelper; @@ -38,7 +39,6 @@ import datawave.security.authorization.DatawavePrincipal; import datawave.security.authorization.DatawaveUser; import datawave.security.authorization.SubjectIssuerDNPair; -import datawave.webservice.query.result.event.DefaultResponseObjectFactory; /** * See {@link GenericCarFields#index} for which fields are indexed in the data set used by this test. diff --git a/warehouse/query-core/src/test/java/datawave/query/tables/RangeStreamScannerTest.java b/warehouse/query-core/src/test/java/datawave/query/tables/RangeStreamScannerTest.java index a2866dcb2dc..b16f60393b9 100644 --- a/warehouse/query-core/src/test/java/datawave/query/tables/RangeStreamScannerTest.java +++ b/warehouse/query-core/src/test/java/datawave/query/tables/RangeStreamScannerTest.java @@ -6,7 +6,6 @@ import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; -import java.util.AbstractMap; import java.util.ArrayList; import java.util.Collections; import java.util.Date; @@ -46,6 +45,7 @@ import datawave.accumulo.inmemory.InMemoryAccumuloClient; import datawave.accumulo.inmemory.InMemoryInstance; +import datawave.core.query.configuration.Result; import datawave.data.type.LcNoDiacriticsType; import datawave.data.type.NumberType; import datawave.data.type.Type; @@ -389,9 +389,9 @@ public void testAdvanceQueueToShard() throws Exception { sortedDatas.put(entry.getKey(), entry.getValue()); } - Queue> datas = Queues.newArrayDeque(); + Queue datas = Queues.newArrayDeque(); for (Key key : sortedDatas.keySet()) { - datas.add(new AbstractMap.SimpleEntry<>(key, sortedDatas.get(key))); + datas.add(new Result<>(key, sortedDatas.get(key))); } RangeStreamScanner scanner = buildRangeStreamScanner("FOO", "bar"); @@ -428,12 +428,12 @@ public void testTrimTrailingUnderscoreFromKey() { @Test public void testTrimTrailingUnderscoreFromEntry() { // Expected case. - Entry underscored = new AbstractMap.SimpleEntry<>(new Key("bar", "FOO", "20190314_"), new Value()); - Entry expected = new AbstractMap.SimpleEntry<>(new Key("bar", "FOO", "20190314"), new Value()); + Result underscored = new Result<>(new Key("bar", "FOO", "20190314_"), new Value()); + Result expected = new Result<>(new Key("bar", "FOO", "20190314"), new Value()); assertEquals(expected, RangeStreamScanner.trimTrailingUnderscore(underscored)); // Ensure shard ranges are not affected. - Entry shard = new AbstractMap.SimpleEntry<>(new Key("bar", "FOO", "20190314_0"), new Value()); + Result shard = new Result<>(new Key("bar", "FOO", "20190314_0"), new Value()); assertEquals(shard, RangeStreamScanner.trimTrailingUnderscore(shard)); } @@ -442,12 +442,12 @@ public void testCurrentEntryMatchesShard_exactMatch() throws Exception { RangeStreamScanner scanner = buildRangeStreamScanner("FOO", "bar"); // Top value is a day - Entry topDay = buildEntry("20190314", "FOO", "bar"); + Result topDay = buildEntry("20190314", "FOO", "bar"); scanner.currentEntry = topDay; assertEquals("20190314", scanner.currentEntryMatchesShard("20190314")); // Top value is a shard - Entry topShard = buildEntry("20190314_0", "FOO", "bar"); + Result topShard = buildEntry("20190314_0", "FOO", "bar"); scanner.currentEntry = topShard; assertEquals("20190314_0", scanner.currentEntryMatchesShard("20190314_0")); } @@ -456,11 +456,11 @@ public void testCurrentEntryMatchesShard_exactMatch() throws Exception { public void testCurrentEntryMatchesShard_topShardBeyondSeekShard() throws Exception { RangeStreamScanner scanner = buildRangeStreamScanner("FOO", "bar"); - Entry topDay = buildEntry("20190314", "FOO", "bar"); + Result topDay = buildEntry("20190314", "FOO", "bar"); scanner.currentEntry = topDay; assertEquals("20190314", scanner.currentEntryMatchesShard("20190310")); - Entry topShard = buildEntry("20190314_0", "FOO", "bar"); + Result topShard = buildEntry("20190314_0", "FOO", "bar"); scanner.currentEntry = topShard; assertEquals("20190314_0", scanner.currentEntryMatchesShard("20190310_0")); } @@ -469,7 +469,7 @@ public void testCurrentEntryMatchesShard_topShardBeyondSeekShard() throws Except public void testCurrentEntryMatchesShard_topShardMatchesDay() throws Exception { RangeStreamScanner scanner = buildRangeStreamScanner("FOO", "bar"); - Entry topShard = buildEntry("20190314_0", "FOO", "bar"); + Result topShard = buildEntry("20190314_0", "FOO", "bar"); scanner.currentEntry = topShard; assertEquals("20190314_0", scanner.currentEntryMatchesShard("20190310")); } @@ -478,7 +478,7 @@ public void testCurrentEntryMatchesShard_topShardMatchesDay() throws Exception { public void testCurrentEntryMatchesShard_topDayMatchesShard() throws Exception { RangeStreamScanner scanner = buildRangeStreamScanner("FOO", "bar"); - Entry topDay = buildEntry("20190314", "FOO", "bar"); + Result topDay = buildEntry("20190314", "FOO", "bar"); scanner.currentEntry = topDay; assertEquals("20190314", scanner.currentEntryMatchesShard("20190310_0")); } @@ -487,17 +487,17 @@ public void testCurrentEntryMatchesShard_topDayMatchesShard() throws Exception { public void testCurrentEntryMatchesShard_noMatch() throws Exception { RangeStreamScanner scanner = buildRangeStreamScanner("FOO", "bar"); - Entry topDay = buildEntry("20190314", "FOO", "bar"); + Result topDay = buildEntry("20190314", "FOO", "bar"); scanner.currentEntry = topDay; assertNull(scanner.currentEntryMatchesShard("20190315")); - Entry topShard = buildEntry("20190314_0", "FOO", "bar"); + Result topShard = buildEntry("20190314_0", "FOO", "bar"); scanner.currentEntry = topShard; assertNull("20190314_0", scanner.currentEntryMatchesShard("20190315_0")); } // Assumes entries have the datatype stripped off, per the CreateUidsIterator contract. - private Entry buildEntry(String shard, String field, String value) { + private Result buildEntry(String shard, String field, String value) { Uid.List.Builder builder = Uid.List.newBuilder(); builder.addAllUID(Collections.singletonList("uid0")); builder.setCOUNT(1); @@ -507,6 +507,6 @@ private Entry buildEntry(String shard, String field, String value) { Value uids = new Value(list.toByteArray()); Key key = new Key(value, field, shard); - return new AbstractMap.SimpleEntry<>(key, uids); + return new Result<>(key, uids); } } diff --git a/warehouse/query-core/src/test/java/datawave/query/tables/RemoteEdgeQueryLogicHttpTest.java b/warehouse/query-core/src/test/java/datawave/query/tables/RemoteEdgeQueryLogicHttpTest.java index 825b5ab1360..a54a35803bf 100644 --- a/warehouse/query-core/src/test/java/datawave/query/tables/RemoteEdgeQueryLogicHttpTest.java +++ b/warehouse/query-core/src/test/java/datawave/query/tables/RemoteEdgeQueryLogicHttpTest.java @@ -47,17 +47,17 @@ import com.sun.net.httpserver.HttpHandler; import com.sun.net.httpserver.HttpServer; +import datawave.core.query.configuration.GenericQueryConfiguration; +import datawave.core.query.result.event.DefaultResponseObjectFactory; +import datawave.microservice.query.QueryImpl; +import datawave.microservice.query.QueryParameters; import datawave.security.authorization.DatawavePrincipal; import datawave.security.util.DnUtils; import datawave.webservice.common.json.DefaultMapperDecorator; import datawave.webservice.common.remote.TestJSSESecurityDomain; -import datawave.webservice.query.QueryImpl; -import datawave.webservice.query.QueryParameters; -import datawave.webservice.query.configuration.GenericQueryConfiguration; import datawave.webservice.query.remote.RemoteQueryServiceImpl; import datawave.webservice.query.result.edge.DefaultEdge; import datawave.webservice.query.result.edge.EdgeBase; -import datawave.webservice.query.result.event.DefaultResponseObjectFactory; import datawave.webservice.result.DefaultEdgeQueryResponse; import datawave.webservice.result.GenericResponse; import datawave.webservice.result.VoidResponse; @@ -233,7 +233,7 @@ public void after() { @Test public void testRemoteQuery() throws Exception { - logic.setPrincipal(new DatawavePrincipal(commonName)); + logic.setCurrentUser(new DatawavePrincipal(commonName)); QueryImpl settings = new QueryImpl(); settings.setQuery(query); GenericQueryConfiguration config = logic.initialize(null, settings, null); diff --git a/warehouse/query-core/src/test/java/datawave/query/tables/RemoteEdgeQueryLogicTest.java b/warehouse/query-core/src/test/java/datawave/query/tables/RemoteEdgeQueryLogicTest.java index 7326badcf01..bdbe0fd28d9 100644 --- a/warehouse/query-core/src/test/java/datawave/query/tables/RemoteEdgeQueryLogicTest.java +++ b/warehouse/query-core/src/test/java/datawave/query/tables/RemoteEdgeQueryLogicTest.java @@ -19,13 +19,14 @@ import com.google.common.collect.Lists; import com.google.common.collect.Sets; +import datawave.core.query.configuration.GenericQueryConfiguration; +import datawave.core.query.remote.RemoteQueryService; +import datawave.microservice.query.QueryImpl; import datawave.security.authorization.DatawavePrincipal; import datawave.security.authorization.DatawaveUser; +import datawave.security.authorization.ProxiedUserDetails; import datawave.security.authorization.SubjectIssuerDNPair; import datawave.webservice.common.remote.RemoteHttpService; -import datawave.webservice.common.remote.RemoteQueryService; -import datawave.webservice.query.QueryImpl; -import datawave.webservice.query.configuration.GenericQueryConfiguration; import datawave.webservice.query.result.edge.DefaultEdge; import datawave.webservice.query.result.edge.EdgeBase; import datawave.webservice.result.BaseQueryResponse; @@ -136,27 +137,27 @@ public TestRemoteQueryService(GenericResponse createResponse, BaseQueryR } @Override - public GenericResponse createQuery(String queryLogicName, Map> queryParameters, Object callerObject) { + public GenericResponse createQuery(String queryLogicName, Map> queryParameters, ProxiedUserDetails callerObject) { return createResponse; } @Override - public BaseQueryResponse next(String id, Object callerObject) { + public BaseQueryResponse next(String id, ProxiedUserDetails callerObject) { return nextResponses.poll(); } @Override - public VoidResponse close(String id, Object callerObject) { + public VoidResponse close(String id, ProxiedUserDetails callerObject) { return new VoidResponse(); } @Override - public GenericResponse planQuery(String queryLogicName, Map> queryParameters, Object callerObject) { + public GenericResponse planQuery(String queryLogicName, Map> queryParameters, ProxiedUserDetails callerObject) { throw new UnsupportedOperationException(); } @Override - public GenericResponse planQuery(String id, Object callerObject) { + public GenericResponse planQuery(String id, ProxiedUserDetails callerObject) { throw new UnsupportedOperationException(); } diff --git a/warehouse/query-core/src/test/java/datawave/query/tables/RemoteEventQueryLogicHttpTest.java b/warehouse/query-core/src/test/java/datawave/query/tables/RemoteEventQueryLogicHttpTest.java index ce34f2c2463..630bbd2b11b 100644 --- a/warehouse/query-core/src/test/java/datawave/query/tables/RemoteEventQueryLogicHttpTest.java +++ b/warehouse/query-core/src/test/java/datawave/query/tables/RemoteEventQueryLogicHttpTest.java @@ -48,17 +48,17 @@ import com.sun.net.httpserver.HttpHandler; import com.sun.net.httpserver.HttpServer; +import datawave.core.query.configuration.GenericQueryConfiguration; +import datawave.core.query.result.event.DefaultResponseObjectFactory; +import datawave.microservice.query.QueryImpl; +import datawave.microservice.query.QueryParameters; import datawave.security.authorization.DatawavePrincipal; import datawave.security.util.DnUtils; import datawave.webservice.common.json.DefaultMapperDecorator; import datawave.webservice.common.remote.TestJSSESecurityDomain; -import datawave.webservice.query.QueryImpl; -import datawave.webservice.query.QueryParameters; -import datawave.webservice.query.configuration.GenericQueryConfiguration; import datawave.webservice.query.remote.RemoteQueryServiceImpl; import datawave.webservice.query.result.event.DefaultEvent; import datawave.webservice.query.result.event.DefaultField; -import datawave.webservice.query.result.event.DefaultResponseObjectFactory; import datawave.webservice.query.result.event.EventBase; import datawave.webservice.result.DefaultEventQueryResponse; import datawave.webservice.result.GenericResponse; @@ -214,7 +214,7 @@ public void after() { @Test public void testRemoteQuery() throws Exception { - logic.setPrincipal(new DatawavePrincipal(commonName)); + logic.setCurrentUser(new DatawavePrincipal(commonName)); QueryImpl settings = new QueryImpl(); settings.setQuery(query); GenericQueryConfiguration config = logic.initialize(null, settings, null); diff --git a/warehouse/query-core/src/test/java/datawave/query/tables/RemoteEventQueryLogicTest.java b/warehouse/query-core/src/test/java/datawave/query/tables/RemoteEventQueryLogicTest.java index e25c6682166..f7000e34f5e 100644 --- a/warehouse/query-core/src/test/java/datawave/query/tables/RemoteEventQueryLogicTest.java +++ b/warehouse/query-core/src/test/java/datawave/query/tables/RemoteEventQueryLogicTest.java @@ -20,13 +20,14 @@ import com.google.common.collect.Lists; import com.google.common.collect.Sets; +import datawave.core.query.configuration.GenericQueryConfiguration; +import datawave.core.query.remote.RemoteQueryService; +import datawave.microservice.query.QueryImpl; import datawave.security.authorization.DatawavePrincipal; import datawave.security.authorization.DatawaveUser; +import datawave.security.authorization.ProxiedUserDetails; import datawave.security.authorization.SubjectIssuerDNPair; import datawave.webservice.common.remote.RemoteHttpService; -import datawave.webservice.common.remote.RemoteQueryService; -import datawave.webservice.query.QueryImpl; -import datawave.webservice.query.configuration.GenericQueryConfiguration; import datawave.webservice.query.result.event.DefaultEvent; import datawave.webservice.query.result.event.DefaultField; import datawave.webservice.query.result.event.EventBase; @@ -116,27 +117,27 @@ public TestRemoteQueryService(GenericResponse createResponse, BaseQueryR } @Override - public GenericResponse createQuery(String queryLogicName, Map> queryParameters, Object callerObject) { + public GenericResponse createQuery(String queryLogicName, Map> queryParameters, ProxiedUserDetails callerObject) { return createResponse; } @Override - public BaseQueryResponse next(String id, Object callerObject) { + public BaseQueryResponse next(String id, ProxiedUserDetails callerObject) { return nextResponses.poll(); } @Override - public VoidResponse close(String id, Object callerObject) { + public VoidResponse close(String id, ProxiedUserDetails callerObject) { return new VoidResponse(); } @Override - public GenericResponse planQuery(String queryLogicName, Map> queryParameters, Object callerObject) { + public GenericResponse planQuery(String queryLogicName, Map> queryParameters, ProxiedUserDetails callerObject) { throw new UnsupportedOperationException(); } @Override - public GenericResponse planQuery(String id, Object callerObject) { + public GenericResponse planQuery(String id, ProxiedUserDetails callerObject) { throw new UnsupportedOperationException(); } diff --git a/warehouse/query-core/src/test/java/datawave/query/tables/ShardQueryLogicTest.java b/warehouse/query-core/src/test/java/datawave/query/tables/ShardQueryLogicTest.java index 541c85301f9..8a0be8427ed 100644 --- a/warehouse/query-core/src/test/java/datawave/query/tables/ShardQueryLogicTest.java +++ b/warehouse/query-core/src/test/java/datawave/query/tables/ShardQueryLogicTest.java @@ -35,8 +35,11 @@ import com.google.common.collect.Sets; import datawave.configuration.spring.SpringBean; +import datawave.core.query.configuration.GenericQueryConfiguration; +import datawave.core.query.iterator.DatawaveTransformIterator; import datawave.helpers.PrintUtility; import datawave.ingest.data.TypeRegistry; +import datawave.microservice.query.QueryImpl; import datawave.query.QueryTestTableHelper; import datawave.query.RebuildingScannerTestHelper; import datawave.query.function.deserializer.KryoDocumentDeserializer; @@ -45,9 +48,6 @@ import datawave.query.util.WiseGuysIngest; import datawave.util.TableName; import datawave.webservice.edgedictionary.RemoteEdgeDictionary; -import datawave.webservice.query.QueryImpl; -import datawave.webservice.query.configuration.GenericQueryConfiguration; -import datawave.webservice.query.iterator.DatawaveTransformIterator; import datawave.webservice.query.result.event.DefaultField; import datawave.webservice.query.result.event.EventBase; import datawave.webservice.result.BaseQueryResponse; @@ -135,7 +135,7 @@ public static JavaArchive createDeployment() throws Exception { .addPackages(true, "org.apache.deltaspike", "io.astefanutti.metrics.cdi", "datawave.query", "org.jboss.logging", "datawave.webservice.query.result.event") .deleteClass(DefaultEdgeEventQueryLogic.class).deleteClass(RemoteEdgeDictionary.class) - .deleteClass(datawave.query.metrics.QueryMetricQueryLogic.class).deleteClass(datawave.query.metrics.ShardTableQueryMetricHandler.class) + .deleteClass(datawave.query.metrics.QueryMetricQueryLogic.class) .addAsManifestResource(new StringAsset( "" + "datawave.query.tables.edge.MockAlternative" + ""), "beans.xml"); diff --git a/warehouse/query-core/src/test/java/datawave/query/tables/async/event/VisitorFunctionTest.java b/warehouse/query-core/src/test/java/datawave/query/tables/async/event/VisitorFunctionTest.java index b2a2f1a5520..73111fcdc88 100644 --- a/warehouse/query-core/src/test/java/datawave/query/tables/async/event/VisitorFunctionTest.java +++ b/warehouse/query-core/src/test/java/datawave/query/tables/async/event/VisitorFunctionTest.java @@ -22,6 +22,8 @@ import org.junit.Before; import org.junit.Test; +import datawave.core.query.configuration.QueryData; +import datawave.microservice.query.Query; import datawave.query.config.ShardQueryConfiguration; import datawave.query.exceptions.DatawaveFatalQueryException; import datawave.query.iterator.QueryIterator; @@ -32,7 +34,7 @@ import datawave.query.tables.async.ScannerChunk; import datawave.query.util.MetadataHelper; import datawave.query.util.MockMetadataHelper; -import datawave.webservice.query.Query; +import datawave.util.TableName; public class VisitorFunctionTest extends EasyMockSupport { private VisitorFunction function; @@ -98,7 +100,14 @@ public void underTermThresholdTest() throws IOException, TableNotFoundException, iteratorSetting.addOption(QueryOptions.QUERY, "FIELD1 == 'a'"); options.addScanIterator(iteratorSetting); - ScannerChunk chunk = new ScannerChunk(options, Collections.singleton(new Range("20210101_0", "20210101_0"))); + // @formatter:off + QueryData qd = new QueryData() + .withTableName(TableName.SHARD) + .withQuery("FIELD1 == 'a'") + .withRanges(Collections.singleton(new Range("20210101_0", "20210101_0"))) + .withSettings(Collections.singletonList(iteratorSetting)); + // @formatter:on + ScannerChunk chunk = new ScannerChunk(options, qd.getRanges(), qd); replayAll(); @@ -136,7 +145,15 @@ public void overTermThresholdTest() throws IOException, TableNotFoundException, iteratorSetting.addOption(QueryOptions.QUERY, query); options.addScanIterator(iteratorSetting); - ScannerChunk chunk = new ScannerChunk(options, Collections.singleton(new Range("20210101_0", "20210101_0"))); + // @formatter:off + QueryData qd = new QueryData() + .withTableName(TableName.SHARD) + .withQuery(query) + .withRanges(Collections.singleton(new Range("20210101_0", "20210101_0"))) + .withSettings(Collections.singletonList(iteratorSetting)); + // @formatter:on + + ScannerChunk chunk = new ScannerChunk(options, qd.getRanges(), qd); replayAll(); @@ -182,7 +199,14 @@ public void overIvaratorTermThresholdTest() throws IOException, TableNotFoundExc iteratorSetting.addOption(QueryOptions.QUERY, query); options.addScanIterator(iteratorSetting); - ScannerChunk chunk = new ScannerChunk(options, Collections.singleton(new Range("20210101_0", "20210101_0"))); + // @formatter:off + QueryData qd = new QueryData() + .withTableName(TableName.SHARD) + .withQuery(query) + .withRanges(Collections.singleton(new Range("20210101_0", "20210101_0"))) + .withSettings(Collections.singletonList(iteratorSetting)); + // @formatter:on + ScannerChunk chunk = new ScannerChunk(options, Collections.singleton(new Range("20210101_0", "20210101_0")), qd); replayAll(); @@ -230,7 +254,14 @@ public void overTermThresholdCantReduceTest() throws IOException, TableNotFoundE iteratorSetting.addOption(QueryOptions.QUERY, query); options.addScanIterator(iteratorSetting); - ScannerChunk chunk = new ScannerChunk(options, Collections.singleton(new Range("20210101_0", "20210101_0"))); + // @formatter:off + QueryData qd = new QueryData() + .withTableName(TableName.SHARD) + .withQuery(query) + .withRanges(Collections.singleton(new Range("20210101_0", "20210101_0"))) + .withSettings(Collections.singletonList(iteratorSetting)); + // @formatter:on + ScannerChunk chunk = new ScannerChunk(options, qd.getRanges(), qd); replayAll(); @@ -267,7 +298,14 @@ public void overTermThresholdAfterFirstReductionOverrideSecondTest() throws IOEx iteratorSetting.addOption(QueryOptions.QUERY, query); options.addScanIterator(iteratorSetting); - ScannerChunk chunk = new ScannerChunk(options, Collections.singleton(new Range("20210101_0", "20210101_0"))); + // @formatter:off + QueryData qd = new QueryData() + .withTableName(TableName.SHARD) + .withQuery(query) + .withRanges(Collections.singleton(new Range("20210101_0", "20210101_0"))) + .withSettings(Collections.singletonList(iteratorSetting)); + // @formatter:on + ScannerChunk chunk = new ScannerChunk(options, qd.getRanges(), qd); replayAll(); @@ -314,7 +352,14 @@ public void rangeOverTermThresholdTest() throws IOException, TableNotFoundExcept iteratorSetting.addOption(QueryOptions.QUERY, query); options.addScanIterator(iteratorSetting); - ScannerChunk chunk = new ScannerChunk(options, Collections.singleton(new Range("20210101_0", "20210101_0"))); + // @formatter:off + QueryData qd = new QueryData() + .withTableName(TableName.SHARD) + .withQuery(query) + .withRanges(Collections.singleton(new Range("20210101_0", "20210101_0"))) + .withSettings(Collections.singletonList(iteratorSetting)); + // @formatter:on + ScannerChunk chunk = new ScannerChunk(options, qd.getRanges(), qd); replayAll(); diff --git a/warehouse/query-core/src/test/java/datawave/query/tables/content/ContentQueryTableTest.java b/warehouse/query-core/src/test/java/datawave/query/tables/content/ContentQueryLogicTest.java similarity index 82% rename from warehouse/query-core/src/test/java/datawave/query/tables/content/ContentQueryTableTest.java rename to warehouse/query-core/src/test/java/datawave/query/tables/content/ContentQueryLogicTest.java index 44ae4a511ec..d6d461c50e4 100644 --- a/warehouse/query-core/src/test/java/datawave/query/tables/content/ContentQueryTableTest.java +++ b/warehouse/query-core/src/test/java/datawave/query/tables/content/ContentQueryLogicTest.java @@ -21,23 +21,21 @@ import org.apache.commons.collections4.iterators.TransformIterator; import org.junit.Before; import org.junit.Test; -import org.mockito.Mock; import org.powermock.api.easymock.PowerMock; import com.google.common.collect.Sets; +import datawave.core.common.connection.AccumuloConnectionFactory; +import datawave.core.query.configuration.GenericQueryConfiguration; +import datawave.core.query.logic.BaseQueryLogic; +import datawave.core.query.logic.QueryLogicTransformer; +import datawave.microservice.query.Query; import datawave.query.config.ContentQueryConfiguration; import datawave.query.tables.ScannerFactory; -import datawave.webservice.common.connection.AccumuloConnectionFactory; -import datawave.webservice.query.Query; -import datawave.webservice.query.QueryImpl.Parameter; -import datawave.webservice.query.configuration.GenericQueryConfiguration; import datawave.webservice.query.exception.QueryException; -import datawave.webservice.query.logic.BaseQueryLogic; -import datawave.webservice.query.logic.QueryLogicTransformer; -public class ContentQueryTableTest { - private ContentQueryTable contentQueryTable; +public class ContentQueryLogicTest { + private ContentQueryLogic contentQueryLogic; private ScannerFactory mockScannerFactory; private BatchScanner mockScanner; private GenericQueryConfiguration mockGenericConfig; @@ -47,58 +45,58 @@ public class ContentQueryTableTest { @Before public void setup() throws TableNotFoundException { - contentQueryTable = new ContentQueryTable(); + contentQueryLogic = new ContentQueryLogic(); mockScannerFactory = mock(ScannerFactory.class); mockScanner = mock(BatchScanner.class); mockGenericConfig = mock(GenericQueryConfiguration.class); mockContentConfig = mock(ContentQueryConfiguration.class); - contentQueryTable.scannerFactory = mockScannerFactory; + contentQueryLogic.scannerFactory = mockScannerFactory; when(mockScannerFactory.newScanner(any(), any(), anyInt(), any())).thenReturn(mockScanner); } @Test public void setupQueryInvalidConfigurationThrowsException() { - assertThrows(QueryException.class, () -> contentQueryTable.setupQuery(mockGenericConfig)); + assertThrows(QueryException.class, () -> contentQueryLogic.setupQuery(mockGenericConfig)); } @Test public void setupQueryValidConfigurationSetsUpScanner() throws Exception { - contentQueryTable.setupQuery(mockContentConfig); + contentQueryLogic.setupQuery(mockContentConfig); verify(mockScanner).setRanges(any()); } @Test public void setupQueryWithViewNameSetsIteratorSetting() throws Exception { - contentQueryTable.viewName = "FOO"; - contentQueryTable.setupQuery(mockContentConfig); + contentQueryLogic.viewName = "FOO"; + contentQueryLogic.setupQuery(mockContentConfig); verify(mockScanner).addScanIterator(any()); } @Test public void setupQueryWithViewNameSetsIteratorSetting2() throws Exception { - contentQueryTable.viewName = "BAR"; - contentQueryTable.setupQuery(mockContentConfig); + contentQueryLogic.viewName = "BAR"; + contentQueryLogic.setupQuery(mockContentConfig); verify(mockScanner).addScanIterator(any()); } @Test public void setupQueryWithViewNameSetsIteratorSetting3() throws Exception { - contentQueryTable.viewName = "BAZ"; - contentQueryTable.setupQuery(mockContentConfig); + contentQueryLogic.viewName = "BAZ"; + contentQueryLogic.setupQuery(mockContentConfig); verify(mockScanner).addScanIterator(any()); } @Test public void setupQueryTableNotFoundThrowsRuntimeException() throws Exception { when(mockScannerFactory.newScanner(any(), any(), anyInt(), any())).thenThrow(TableNotFoundException.class); - assertThrows(RuntimeException.class, () -> contentQueryTable.setupQuery(mockContentConfig)); + assertThrows(RuntimeException.class, () -> contentQueryLogic.setupQuery(mockContentConfig)); } @Test public void testConstructorCopy() throws Exception { // borrowed from TestBaseQueryLogic.java - ContentQueryTable subject = new TestContentQuery(); + ContentQueryLogic subject = new TestContentQuery(); int result1 = subject.getMaxPageSize(); long result2 = subject.getPageByteTrigger(); TransformIterator result3 = subject.getTransformIterator(this.query); @@ -114,7 +112,7 @@ public void testConstructorCopy() throws Exception { public void testContainsDnWithAccess() { // borrowed from TestBaseQueryLogic.java Set dns = Sets.newHashSet("dn=user", "dn=user chain 1", "dn=user chain 2"); - ContentQueryTable logic = new TestContentQuery(); + ContentQueryLogic logic = new TestContentQuery(); // Assert cases given allowedDNs == null. Access should not be blocked at all. assertTrue(logic.containsDNWithAccess(dns)); @@ -140,7 +138,7 @@ public void testContainsDnWithAccess() { assertFalse(logic.containsDNWithAccess(Collections.emptySet())); } - private class TestContentQuery extends ContentQueryTable { + private class TestContentQuery extends ContentQueryLogic { // borrowed from TestBaseQueryLogic.java public TestContentQuery() { super(); diff --git a/warehouse/query-core/src/test/java/datawave/query/tables/edge/BaseEdgeQueryTest.java b/warehouse/query-core/src/test/java/datawave/query/tables/edge/BaseEdgeQueryTest.java index 6eb55c0e351..3bc60eae1ea 100644 --- a/warehouse/query-core/src/test/java/datawave/query/tables/edge/BaseEdgeQueryTest.java +++ b/warehouse/query-core/src/test/java/datawave/query/tables/edge/BaseEdgeQueryTest.java @@ -7,8 +7,11 @@ import java.util.Collections; import java.util.Date; import java.util.HashMap; +import java.util.Iterator; +import java.util.LinkedList; import java.util.List; import java.util.Map; +import java.util.Queue; import java.util.Set; import java.util.TimeZone; import java.util.UUID; @@ -16,7 +19,6 @@ import org.apache.accumulo.core.client.AccumuloClient; import org.apache.accumulo.core.client.BatchWriterConfig; -import org.apache.accumulo.core.client.security.tokens.PasswordToken; import org.apache.accumulo.core.data.Key; import org.apache.accumulo.core.data.Mutation; import org.apache.accumulo.core.data.Value; @@ -26,21 +28,30 @@ import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.apache.hadoop.mapreduce.TaskAttemptID; import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl; +import org.apache.log4j.Logger; import org.junit.Assert; import org.junit.BeforeClass; import datawave.accumulo.inmemory.InMemoryAccumuloClient; import datawave.accumulo.inmemory.InMemoryInstance; +import datawave.core.query.configuration.GenericQueryConfiguration; +import datawave.core.query.logic.BaseQueryLogic; +import datawave.core.query.logic.CheckpointableQueryLogic; +import datawave.core.query.logic.QueryCheckpoint; +import datawave.core.query.logic.QueryKey; +import datawave.core.query.logic.QueryLogicFactory; import datawave.data.normalizer.Normalizer; +import datawave.microservice.query.QueryImpl; import datawave.query.MockAccumuloRecordWriter; -import datawave.webservice.query.QueryImpl; -import datawave.webservice.query.logic.BaseQueryLogic; +import datawave.webservice.query.exception.QueryException; /** * A base test class to encapsulate everything needed to run query tests against an edge query logic. */ public abstract class BaseEdgeQueryTest { + public static final Logger log = Logger.getLogger(BaseEdgeQueryTest.class); + protected static final boolean protobufEdgeFormat = true; public static final String EDGE_TABLE_NAME = "edge"; @@ -135,15 +146,50 @@ public static List createEdges(String yyyyMMdd) { return retVal; } - public void compareResults(BaseQueryLogic> logic, List expected) { + public void compareResults(BaseQueryLogic> logic, QueryLogicFactory factory, List expected) { int recordsFound = 0; List foundKeys = new ArrayList<>(); - for (Map.Entry entry : logic) { - foundKeys.add(entry.getKey()); - Key k = entry.getKey(); - System.out.println("key = " + k.toStringNoTime()); - Assert.assertTrue(UNEXPECTED_RECORD + " : " + k.toStringNoTime(), expected.contains(k.toStringNoTime())); - recordsFound++; + boolean disableCheckpoint = false; + if (!disableCheckpoint && logic instanceof CheckpointableQueryLogic && ((CheckpointableQueryLogic) logic).isCheckpointable() && factory != null) { + Queue cps = new LinkedList<>(); + GenericQueryConfiguration config = logic.getConfig(); + AccumuloClient client = config.getClient(); + QueryKey queryKey = new QueryKey("default", logic.getConfig().getQuery().getId().toString(), logic.getLogicName()); + cps.addAll(((CheckpointableQueryLogic) logic).checkpoint(queryKey)); + while (!cps.isEmpty()) { + QueryCheckpoint cp = cps.remove(); + // create a new instance of the logic + try { + + logic = (BaseQueryLogic>) factory.getQueryLogic(logic.getLogicName()); + } catch (CloneNotSupportedException | QueryException e) { + Assert.fail("Failed to recreate checkpointable query logic for " + logic.getLogicName() + ": " + e.getMessage()); + } + // now reset the logic given the checkpoint + try { + ((CheckpointableQueryLogic) logic).setupQuery(client, config, cp); + } catch (Exception e) { + log.error("Failed to setup query given last checkpoint", e); + Assert.fail("Failed to setup query given last checkpoint: " + e.getMessage()); + } + Iterator> iter = logic.iterator(); + if (iter.hasNext()) { + Map.Entry next = iter.next(); + Key k = next.getKey(); + System.out.println("key = " + k.toStringNoTime()); + Assert.assertTrue(UNEXPECTED_RECORD + " : " + k.toStringNoTime(), expected.contains(k.toStringNoTime())); + recordsFound++; + cps.addAll(((CheckpointableQueryLogic) logic).checkpoint(queryKey)); + } + } + } else { + for (Map.Entry entry : logic) { + foundKeys.add(entry.getKey()); + Key k = entry.getKey(); + System.out.println("key = " + k.toStringNoTime()); + Assert.assertTrue(UNEXPECTED_RECORD + " : " + k.toStringNoTime(), expected.contains(k.toStringNoTime())); + recordsFound++; + } } Assert.assertEquals(UNEXPECTED_NUM_RECORDS, expected.size(), recordsFound); diff --git a/warehouse/query-core/src/test/java/datawave/query/tables/edge/CheckpointableEdgeQueryTest.java b/warehouse/query-core/src/test/java/datawave/query/tables/edge/CheckpointableEdgeQueryTest.java new file mode 100644 index 00000000000..c2f977a6e9d --- /dev/null +++ b/warehouse/query-core/src/test/java/datawave/query/tables/edge/CheckpointableEdgeQueryTest.java @@ -0,0 +1,17 @@ +package datawave.query.tables.edge; + +import java.util.Set; + +import org.apache.accumulo.core.security.Authorizations; + +import datawave.microservice.query.QueryImpl; + +public class CheckpointableEdgeQueryTest extends EdgeQueryFunctionalTest { + + @Override + public EdgeQueryLogic runLogic(QueryImpl q, Set auths) throws Exception { + logic.setCheckpointable(true); + return super.runLogic(q, auths); + } + +} diff --git a/warehouse/query-core/src/test/java/datawave/query/tables/edge/CheckpointableExtendedEdgeQueryTest.java b/warehouse/query-core/src/test/java/datawave/query/tables/edge/CheckpointableExtendedEdgeQueryTest.java new file mode 100644 index 00000000000..98c4324c89f --- /dev/null +++ b/warehouse/query-core/src/test/java/datawave/query/tables/edge/CheckpointableExtendedEdgeQueryTest.java @@ -0,0 +1,19 @@ +package datawave.query.tables.edge; + +import java.util.Set; + +import org.apache.accumulo.core.security.Authorizations; + +import datawave.microservice.query.QueryImpl; +import datawave.query.edge.DefaultExtendedEdgeQueryLogic; +import datawave.query.edge.ExtendedEdgeQueryLogicTest; + +public class CheckpointableExtendedEdgeQueryTest extends ExtendedEdgeQueryLogicTest { + + @Override + public DefaultExtendedEdgeQueryLogic runLogic(QueryImpl q, Set auths) throws Exception { + logic.setCheckpointable(true); + return super.runLogic(q, auths); + } + +} diff --git a/warehouse/query-core/src/test/java/datawave/query/tables/edge/DefaultEdgeEventQueryLogicTest.java b/warehouse/query-core/src/test/java/datawave/query/tables/edge/DefaultEdgeEventQueryLogicTest.java index 89fe6346e9d..fbaafdd80aa 100644 --- a/warehouse/query-core/src/test/java/datawave/query/tables/edge/DefaultEdgeEventQueryLogicTest.java +++ b/warehouse/query-core/src/test/java/datawave/query/tables/edge/DefaultEdgeEventQueryLogicTest.java @@ -11,6 +11,9 @@ import org.junit.Before; import org.junit.Test; +import datawave.edge.model.DefaultEdgeModelFieldsFactory; +import datawave.microservice.query.Query; +import datawave.microservice.query.QueryImpl; import datawave.query.QueryParameters; import datawave.query.language.parser.QueryParser; import datawave.query.language.parser.jexl.LuceneToJexlQueryParser; @@ -18,8 +21,6 @@ import datawave.webservice.dictionary.edge.DefaultEdgeDictionary; import datawave.webservice.dictionary.edge.DefaultMetadata; import datawave.webservice.dictionary.edge.EventField; -import datawave.webservice.query.Query; -import datawave.webservice.query.QueryImpl; public class DefaultEdgeEventQueryLogicTest { @@ -29,7 +30,8 @@ public class DefaultEdgeEventQueryLogicTest { @Before public void setUp() throws Exception { - logic.setEdgeQueryModel(EdgeQueryModel.loadModel("/DATAWAVE_EDGE.xml")); + logic.setEdgeModelFieldsFactory(new DefaultEdgeModelFieldsFactory()); + logic.setEdgeQueryModel(EdgeQueryModel.loadModel("/DATAWAVE_EDGE.xml", logic.getEdgeFields())); // Create the results of the DatawaveMetadata table scan for edge data metadata = new LinkedList<>(); diff --git a/warehouse/query-core/src/test/java/datawave/query/tables/edge/EdgeQueryFunctionalTest.java b/warehouse/query-core/src/test/java/datawave/query/tables/edge/EdgeQueryFunctionalTest.java index 7b7dc5ad0ee..5f810580812 100644 --- a/warehouse/query-core/src/test/java/datawave/query/tables/edge/EdgeQueryFunctionalTest.java +++ b/warehouse/query-core/src/test/java/datawave/query/tables/edge/EdgeQueryFunctionalTest.java @@ -1,33 +1,53 @@ package datawave.query.tables.edge; import java.util.ArrayList; +import java.util.Collection; +import java.util.HashSet; import java.util.List; +import java.util.Map; import java.util.Set; import javax.inject.Inject; import org.apache.accumulo.core.security.Authorizations; -import org.apache.log4j.Logger; import org.jboss.arquillian.container.test.api.Deployment; import org.jboss.arquillian.junit.Arquillian; import org.jboss.shrinkwrap.api.ShrinkWrap; import org.jboss.shrinkwrap.api.asset.StringAsset; import org.jboss.shrinkwrap.api.spec.JavaArchive; +import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; - -import datawave.configuration.spring.SpringBean; +import org.springframework.beans.factory.NoSuchBeanDefinitionException; +import org.springframework.context.ApplicationContext; + +import datawave.core.query.configuration.GenericQueryConfiguration; +import datawave.core.query.logic.QueryLogic; +import datawave.core.query.logic.QueryLogicFactory; +import datawave.microservice.query.QueryImpl; +import datawave.security.authorization.ProxiedUserDetails; import datawave.webservice.edgedictionary.RemoteEdgeDictionary; -import datawave.webservice.query.QueryImpl; -import datawave.webservice.query.configuration.GenericQueryConfiguration; +import datawave.webservice.query.exception.DatawaveErrorCode; +import datawave.webservice.query.exception.QueryException; +import datawave.webservice.query.exception.UnauthorizedQueryException; @RunWith(Arquillian.class) public class EdgeQueryFunctionalTest extends BaseEdgeQueryTest { - private static final Logger log = Logger.getLogger(EdgeQueryFunctionalTest.class); + protected EdgeQueryLogic logic; @Inject - @SpringBean(name = "RewriteEdgeQuery") - EdgeQueryLogic logic; + protected ApplicationContext applicationContext; + + protected QueryLogicFactory factory = new TestQueryLogicFactory(); + + @Before + public void setup() throws Exception { + logic = (EdgeQueryLogic) createLogic(); + } + + public QueryLogic createLogic() throws Exception { + return factory.getQueryLogic("RewriteEdgeQuery"); + } /* * NOTE: If you're trying to debug within your IDE's debugger and you're getting Spring errors related to EdgeModelContext.xml or NoClassDefFound related to @@ -42,9 +62,10 @@ public class EdgeQueryFunctionalTest extends BaseEdgeQueryTest { @Deployment public static JavaArchive createDeployment() throws Exception { return ShrinkWrap.create(JavaArchive.class) - .addPackages(true, "org.apache.deltaspike", "io.astefanutti.metrics.cdi", "datawave.query", "datawave.webservice.query.result.event") + .addPackages(true, "org.apache.deltaspike", "io.astefanutti.metrics.cdi", "datawave.query", "datawave.webservice.query.result.event", + "datawave.core.query.result.event") .deleteClass(DefaultEdgeEventQueryLogic.class).deleteClass(RemoteEdgeDictionary.class) - .deleteClass(datawave.query.metrics.QueryMetricQueryLogic.class).deleteClass(datawave.query.metrics.ShardTableQueryMetricHandler.class) + .deleteClass(datawave.query.metrics.QueryMetricQueryLogic.class) .addAsManifestResource(new StringAsset( "" + "datawave.query.tables.edge.MockAlternative" + ""), "beans.xml"); @@ -67,7 +88,7 @@ public void testSingleQuery() throws Exception { expected.add("pluto%00;neptune AdjacentPlanets/TO-FROM:20150713/NEW_HORIZONS-NEW_HORIZONS [C]"); expected.add("pluto STATS/ACTIVITY/DwarfPlanets/TO:20150713/NEW_HORIZONS [D]"); - compareResults(logic, expected); + compareResults(logic, factory, expected); } @Test @@ -81,7 +102,7 @@ public void testSingleQueryMixedCase() throws Exception { expected.add("pluto%00;neptune AdjacentPlanets/TO-FROM:20150713/NEW_HORIZONS-NEW_HORIZONS [C]"); expected.add("pluto STATS/ACTIVITY/DwarfPlanets/TO:20150713/NEW_HORIZONS [D]"); - compareResults(logic, expected); + compareResults(logic, factory, expected); } @Test @@ -95,7 +116,7 @@ public void testSingleQueryLowerCase() throws Exception { expected.add("pluto%00;neptune AdjacentPlanets/TO-FROM:20150713/NEW_HORIZONS-NEW_HORIZONS [C]"); expected.add("pluto STATS/ACTIVITY/DwarfPlanets/TO:20150713/NEW_HORIZONS [D]"); - compareResults(logic, expected); + compareResults(logic, factory, expected); } @Test @@ -111,7 +132,7 @@ public void testSinglePatternQuery() throws Exception { expected.add("earth STATS/ACTIVITY/Planets/TO:20150713/COSMOS_DATA [B]"); expected.add("eris STATS/ACTIVITY/DwarfPlanets/TO:20150713/COSMOS_DATA [D]"); - compareResults(logic, expected); + compareResults(logic, factory, expected); } @Test @@ -122,7 +143,7 @@ public void testANDQuery() throws Exception { List expected = new ArrayList<>(); expected.add("earth%00;moon AdjacentCelestialBodies/FROM-TO:20150713/COSMOS_DATA-COSMOS_DATA [A]"); expected.add("earth STATS/ACTIVITY/Planets/TO:20150713/COSMOS_DATA [B]"); - compareResults(logic, expected); + compareResults(logic, factory, expected); } @Test @@ -135,7 +156,7 @@ public void testANDQueryWithPatterns() throws Exception { expected.add("earth%00;venus AdjacentPlanets/TO-FROM:20150713/COSMOS_DATA-COSMOS_DATA [A]"); expected.add("earth STATS/ACTIVITY/Planets/TO:20150713/COSMOS_DATA [B]"); expected.add("eris STATS/ACTIVITY/DwarfPlanets/TO:20150713/COSMOS_DATA [D]"); - compareResults(logic, expected); + compareResults(logic, factory, expected); } @Test @@ -154,7 +175,7 @@ public void testORQuery() throws Exception { expected.add("pluto%00;neptune AdjacentPlanets/TO-FROM:20150713/NEW_HORIZONS-NEW_HORIZONS [C]"); expected.add("pluto STATS/ACTIVITY/DwarfPlanets/TO:20150713/NEW_HORIZONS [D]"); - compareResults(logic, expected); + compareResults(logic, factory, expected); } @Test @@ -174,7 +195,7 @@ public void testORQueryWithPattern() throws Exception { expected.add("eris STATS/ACTIVITY/DwarfPlanets/TO:20150713/COSMOS_DATA [D]"); expected.add("eris%00;dysnomia AdjacentCelestialBodies/FROM-TO:20150713/COSMOS_DATA-COSMOS_DATA [B]"); - compareResults(logic, expected); + compareResults(logic, factory, expected); } @Test @@ -187,7 +208,7 @@ public void testCombinationQuery() throws Exception { expected.add("earth STATS/ACTIVITY/Planets/TO:20150713/COSMOS_DATA [B]"); expected.add("asteroid_belt%00;mars AdjacentCelestialBodies/FROM-TO:20150713/COSMOS_DATA-COSMOS_DATA [A]"); - compareResults(logic, expected); + compareResults(logic, factory, expected); } @Test @@ -201,7 +222,7 @@ public void testCombinationQueryWithPattern() throws Exception { expected.add("asteroid_belt%00;mars AdjacentCelestialBodies/FROM-TO:20150713/COSMOS_DATA-COSMOS_DATA [A]"); expected.add("eris STATS/ACTIVITY/DwarfPlanets/TO:20150713/COSMOS_DATA [D]"); - compareResults(logic, expected); + compareResults(logic, factory, expected); } @Test @@ -221,7 +242,7 @@ public void testComplexQuery1() throws Exception { expected.add("asteroid_belt%00;ceres AdjacentCelestialBodies/FROM-TO:20150713/COSMOS_DATA-COSMOS_DATA [A]"); expected.add("asteroid_belt%00;jupiter AdjacentCelestialBodies/FROM-TO:20150713/COSMOS_DATA-COSMOS_DATA [A]"); - compareResults(logic, expected); + compareResults(logic, factory, expected); } @Test @@ -248,7 +269,7 @@ public void testComplexQueryWithPatterns() throws Exception { expected.add("mars%00;jupiter AdjacentPlanets/FROM-TO:20150713/COSMOS_DATA-COSMOS_DATA [A]"); expected.add("mercury%00;venus AdjacentPlanets/FROM-TO:20150713/COSMOS_DATA-COSMOS_DATA [A]"); - compareResults(logic, expected); + compareResults(logic, factory, expected); } @Test @@ -264,7 +285,7 @@ public void testSourceAndSinkPatterns() throws Exception { expected.add("pluto STATS/ACTIVITY/DwarfPlanets/TO:20150713/NEW_HORIZONS [D]"); expected.add("eris STATS/ACTIVITY/DwarfPlanets/TO:20150713/COSMOS_DATA [D]"); - compareResults(logic, expected); + compareResults(logic, factory, expected); } @Test @@ -275,7 +296,7 @@ public void testTypeQuery() throws Exception { List expected = new ArrayList<>(); expected.add("jupiter%00;ceres AdjacentDwarfPlanets/TO-FROM:20150713/COSMOS_DATA-COSMOS_DATA [B]"); - compareResults(logic, expected); + compareResults(logic, factory, expected); } @Test @@ -287,7 +308,7 @@ public void testPatternWithType() throws Exception { expected.add("mars%00;earth AdjacentPlanets/TO-FROM:20150713/COSMOS_DATA-COSMOS_DATA [A]"); expected.add("mars%00;jupiter AdjacentPlanets/FROM-TO:20150713/COSMOS_DATA-COSMOS_DATA [A]"); expected.add("mercury%00;venus AdjacentPlanets/FROM-TO:20150713/COSMOS_DATA-COSMOS_DATA [A]"); - compareResults(logic, expected); + compareResults(logic, factory, expected); } @Test @@ -298,7 +319,7 @@ public void testPatternWithTypeAndRelation() throws Exception { List expected = new ArrayList<>(); expected.add("mars%00;earth AdjacentPlanets/TO-FROM:20150713/COSMOS_DATA-COSMOS_DATA [A]"); - compareResults(logic, expected); + compareResults(logic, factory, expected); } @Test @@ -314,7 +335,7 @@ public void testPatternWithRelation() throws Exception { expected.add("mars%00;ceres AdjacentDwarfPlanets/TO-FROM:20150713/COSMOS_DATA-COSMOS_DATA [B]"); expected.add("mars STATS/ACTIVITY/Planets/TO:20150713/COSMOS_DATA [B]"); expected.add("mercury STATS/ACTIVITY/Planets/TO:20150713/COSMOS_DATA [B]"); - compareResults(logic, expected); + compareResults(logic, factory, expected); } @Test @@ -328,7 +349,7 @@ public void testPatternWithLeadingWildcard() throws Exception { expected.add("mercury STATS/ACTIVITY/Planets/TO:20150713/COSMOS_DATA [B]"); expected.add("mercury%00;venus AdjacentPlanets/FROM-TO:20150713/COSMOS_DATA-COSMOS_DATA [A]"); expected.add("mars%00;ceres AdjacentDwarfPlanets/TO-FROM:20150713/COSMOS_DATA-COSMOS_DATA [B]"); - compareResults(logic, expected); + compareResults(logic, factory, expected); } @Test @@ -345,7 +366,7 @@ public void testLiteralWithPatternAndRelation() throws Exception { expected.add("ceres STATS/ACTIVITY/DwarfPlanets/TO:20150713/COSMOS_DATA [D]"); expected.add("earth STATS/ACTIVITY/Planets/TO:20150713/COSMOS_DATA [B]"); expected.add("eris STATS/ACTIVITY/DwarfPlanets/TO:20150713/COSMOS_DATA [D]"); - compareResults(logic, expected); + compareResults(logic, factory, expected); } @Test @@ -359,7 +380,7 @@ public void testNOT() throws Exception { expected.add("mars%00;earth AdjacentPlanets/TO-FROM:20150713/COSMOS_DATA-COSMOS_DATA [A]"); expected.add("mars STATS/ACTIVITY/Planets/TO:20150713/COSMOS_DATA [B]"); expected.add("mars%00;asteroid_belt AdjacentCelestialBodies/TO-FROM:20150713/COSMOS_DATA-COSMOS_DATA [A]"); - compareResults(logic, expected); + compareResults(logic, factory, expected); } @Test @@ -377,7 +398,7 @@ public void testPatternWithNot() throws Exception { expected.add("mercury STATS/ACTIVITY/Planets/TO:20150713/COSMOS_DATA [B]"); expected.add("moon%00;earth AdjacentCelestialBodies/TO-FROM:20150713/COSMOS_DATA-COSMOS_DATA [A]"); expected.add("mercury%00;venus AdjacentPlanets/FROM-TO:20150713/COSMOS_DATA-COSMOS_DATA [A]"); - compareResults(logic, expected); + compareResults(logic, factory, expected); } @Test @@ -397,7 +418,7 @@ public void testRegExQueryStatsOn() throws Exception { expected.add("mercury%00;venus AdjacentPlanets/FROM-TO:20150713/COSMOS_DATA-COSMOS_DATA [A]"); expected.add("mars%00;ceres AdjacentCelestialBodies/FROM-TO:20150713/COSMOS_DATA-COSMOS_DATA [B]"); expected.add("mars%00;ceres AdjacentDwarfPlanets/TO-FROM:20150713/COSMOS_DATA-COSMOS_DATA [B]"); - compareResults(logic, expected); + compareResults(logic, factory, expected); } @Test @@ -415,7 +436,7 @@ public void testRegExQueryStatsOff() throws Exception { expected.add("mercury%00;venus AdjacentPlanets/FROM-TO:20150713/COSMOS_DATA-COSMOS_DATA [A]"); expected.add("mars%00;ceres AdjacentCelestialBodies/FROM-TO:20150713/COSMOS_DATA-COSMOS_DATA [B]"); expected.add("mars%00;ceres AdjacentDwarfPlanets/TO-FROM:20150713/COSMOS_DATA-COSMOS_DATA [B]"); - compareResults(logic, expected); + compareResults(logic, factory, expected); } @Test @@ -428,8 +449,7 @@ public void testRelationStatsOn() throws Exception { expected.add("mercury%00;venus AdjacentPlanets/FROM-TO:20150713/COSMOS_DATA-COSMOS_DATA [A]"); expected.add("mars STATS/ACTIVITY/Planets/TO:20150713/COSMOS_DATA [B]"); expected.add("mercury STATS/ACTIVITY/Planets/TO:20150713/COSMOS_DATA [B]"); - compareResults(logic, expected); - + compareResults(logic, factory, expected); } @Test(expected = UnsupportedOperationException.class) @@ -441,7 +461,7 @@ public void testUnknownFunction() throws Exception { List expected = new ArrayList<>(); - compareResults(logic, expected); + compareResults(logic, factory, expected); } @Test @@ -450,7 +470,6 @@ public void testComplexQuery2() throws Exception { QueryImpl q = configQuery( "(SOURCE == 'EARTH' || SOURCE == 'SUN' || SOURCE == 'ASTEROID_BELT') &&" + " (SINK == 'MARS' || SINK == 'MOON' || SINK == 'JUPITER')", auths); - EdgeQueryLogic logic = runLogic(q, auths); List expected = new ArrayList<>(); @@ -461,7 +480,7 @@ public void testComplexQuery2() throws Exception { expected.add("earth STATS/ACTIVITY/Planets/TO:20150713/COSMOS_DATA [B]"); expected.add("sun STATS/ACTIVITY/Stars/TO:20150713/COSMOS_DATA [A]"); - compareResults(logic, expected); + compareResults(logic, factory, expected); } @Test @@ -476,7 +495,7 @@ public void testComplexQuery3() throws Exception { expected.add("asteroid_belt%00;mars AdjacentCelestialBodies/FROM-TO:20150713/COSMOS_DATA-COSMOS_DATA [A]"); expected.add("asteroid_belt%00;jupiter AdjacentCelestialBodies/FROM-TO:20150713/COSMOS_DATA-COSMOS_DATA [A]"); expected.add("mars STATS/ACTIVITY/Planets/TO:20150713/COSMOS_DATA [B]"); - compareResults(logic, expected); + compareResults(logic, factory, expected); } @Test @@ -490,7 +509,7 @@ public void testComplexQuery4() throws Exception { List expected = new ArrayList<>(); expected.add("mars%00;jupiter AdjacentPlanets/FROM-TO:20150713/COSMOS_DATA-COSMOS_DATA [A]"); - compareResults(logic, expected); + compareResults(logic, factory, expected); } @Test @@ -504,7 +523,53 @@ public void testAttribute1() throws Exception { expected.add("pluto%00;neptune AdjacentPlanets/TO-FROM:20150713/NEW_HORIZONS-NEW_HORIZONS [C]"); expected.add("pluto STATS/ACTIVITY/DwarfPlanets/TO:20150713/NEW_HORIZONS [D]"); expected.add("pluto%00;charon AdjacentCelestialBodies/FROM-TO:20150713/NEW_HORIZONS-NEW_HORIZONS [C]"); - compareResults(logic, expected); + compareResults(logic, factory, expected); + } + + public class TestQueryLogicFactory implements QueryLogicFactory { + + @Override + public QueryLogic getQueryLogic(String name, ProxiedUserDetails currentUser) throws QueryException { + Set userRoles = new HashSet<>(currentUser.getPrimaryUser().getRoles()); + return getQueryLogic(name, userRoles, true); + } + + @Override + public QueryLogic getQueryLogic(String name) throws QueryException { + return getQueryLogic(name, null, false); + } + + private QueryLogic getQueryLogic(String name, Collection userRoles, boolean checkRoles) throws QueryException { + QueryLogic logic; + try { + logic = (QueryLogic) applicationContext.getBean(name); + } catch (ClassCastException | NoSuchBeanDefinitionException cce) { + throw new IllegalArgumentException("Logic name '" + name + "' does not exist in the configuration"); + } + + if (checkRoles && !logic.canRunQuery(userRoles)) { + throw new UnauthorizedQueryException(DatawaveErrorCode.MISSING_REQUIRED_ROLES, + new IllegalAccessException("User does not have required role(s): " + logic.getRequiredRoles())); + } + + logic.setLogicName(name); + return logic; + } + + @Override + public List> getQueryLogicList() { + Map logicMap = applicationContext.getBeansOfType(QueryLogic.class); + + List> logicList = new ArrayList<>(); + + for (Map.Entry entry : logicMap.entrySet()) { + QueryLogic logic = entry.getValue(); + logic.setLogicName(entry.getKey()); + logicList.add(logic); + } + return logicList; + + } } } diff --git a/warehouse/query-core/src/test/java/datawave/query/tables/facets/FacetedQueryLogicTest.java b/warehouse/query-core/src/test/java/datawave/query/tables/facets/FacetedQueryLogicTest.java index de2827d6019..257bfeefc45 100644 --- a/warehouse/query-core/src/test/java/datawave/query/tables/facets/FacetedQueryLogicTest.java +++ b/warehouse/query-core/src/test/java/datawave/query/tables/facets/FacetedQueryLogicTest.java @@ -25,6 +25,7 @@ import com.google.common.collect.Sets; +import datawave.core.query.result.event.DefaultResponseObjectFactory; import datawave.helpers.PrintUtility; import datawave.marking.MarkingFunctions; import datawave.query.QueryTestTableHelper; @@ -48,7 +49,6 @@ import datawave.security.authorization.DatawavePrincipal; import datawave.security.authorization.DatawaveUser; import datawave.security.authorization.SubjectIssuerDNPair; -import datawave.webservice.query.result.event.DefaultResponseObjectFactory; public class FacetedQueryLogicTest extends AbstractFunctionalQuery { diff --git a/warehouse/query-core/src/test/java/datawave/query/tables/ssdeep/SSDeepIndexQueryTest.java b/warehouse/query-core/src/test/java/datawave/query/tables/ssdeep/SSDeepIndexQueryTest.java index 28f13d5ed96..d295bac7a3e 100644 --- a/warehouse/query-core/src/test/java/datawave/query/tables/ssdeep/SSDeepIndexQueryTest.java +++ b/warehouse/query-core/src/test/java/datawave/query/tables/ssdeep/SSDeepIndexQueryTest.java @@ -37,8 +37,11 @@ import datawave.accumulo.inmemory.InMemoryAccumuloClient; import datawave.accumulo.inmemory.InMemoryInstance; +import datawave.core.common.connection.AccumuloConnectionFactory; +import datawave.core.query.result.event.DefaultResponseObjectFactory; import datawave.ingest.mapreduce.handler.ssdeep.SSDeepIndexHandler; import datawave.marking.MarkingFunctions; +import datawave.microservice.query.QueryImpl; import datawave.microservice.querymetric.QueryMetricFactoryImpl; import datawave.query.tables.ssdeep.util.SSDeepTestUtil; import datawave.query.testframework.AbstractDataTypeConfig; @@ -49,9 +52,6 @@ import datawave.util.ssdeep.NGramByteHashGenerator; import datawave.util.ssdeep.NGramGenerator; import datawave.util.ssdeep.NGramTuple; -import datawave.webservice.common.connection.AccumuloConnectionFactory; -import datawave.webservice.query.QueryImpl; -import datawave.webservice.query.result.event.DefaultResponseObjectFactory; import datawave.webservice.query.result.event.EventBase; import datawave.webservice.query.result.event.FieldBase; import datawave.webservice.query.runner.RunningQuery; diff --git a/warehouse/query-core/src/test/java/datawave/query/tables/ssdeep/SSDeepIngestQueryTest.java b/warehouse/query-core/src/test/java/datawave/query/tables/ssdeep/SSDeepIngestQueryTest.java index 636210683d0..e79b8bc90e0 100644 --- a/warehouse/query-core/src/test/java/datawave/query/tables/ssdeep/SSDeepIngestQueryTest.java +++ b/warehouse/query-core/src/test/java/datawave/query/tables/ssdeep/SSDeepIngestQueryTest.java @@ -23,9 +23,14 @@ import com.google.common.collect.Sets; +import datawave.core.common.connection.AccumuloConnectionFactory; +import datawave.core.query.logic.AbstractQueryLogicTransformer; +import datawave.core.query.logic.QueryLogic; +import datawave.core.query.result.event.DefaultResponseObjectFactory; import datawave.helpers.PrintUtility; import datawave.ingest.mapreduce.handler.ssdeep.SSDeepIndexHandler; import datawave.marking.MarkingFunctions; +import datawave.microservice.query.QueryImpl; import datawave.microservice.querymetric.QueryMetricFactoryImpl; import datawave.query.RebuildingScannerTestHelper; import datawave.query.tables.ssdeep.testframework.SSDeepDataType; @@ -42,11 +47,6 @@ import datawave.security.authorization.DatawavePrincipal; import datawave.security.authorization.DatawaveUser; import datawave.security.authorization.SubjectIssuerDNPair; -import datawave.webservice.common.connection.AccumuloConnectionFactory; -import datawave.webservice.query.QueryImpl; -import datawave.webservice.query.logic.AbstractQueryLogicTransformer; -import datawave.webservice.query.logic.QueryLogic; -import datawave.webservice.query.result.event.DefaultResponseObjectFactory; import datawave.webservice.query.result.event.EventBase; import datawave.webservice.query.result.event.FieldBase; import datawave.webservice.query.result.event.ResponseObjectFactory; @@ -103,7 +103,8 @@ public void setupQuery() { discoveryQueryLogic.setTableName("shardIndex"); discoveryQueryLogic.setIndexTableName("shardIndex"); discoveryQueryLogic.setReverseIndexTableName("shardReverseIndex"); - discoveryQueryLogic.setModelTableName("metadata"); + discoveryQueryLogic.setMetadataTableName("metadata"); + discoveryQueryLogic.setModelName("DATAWAVE"); discoveryQueryLogic.setMarkingFunctions(markingFunctions); discoveryQueryLogic.setMetadataHelperFactory(metadataHelperFactory); discoveryQueryLogic.setResponseObjectFactory(responseFactory); diff --git a/warehouse/query-core/src/test/java/datawave/query/tables/ssdeep/SSDeepSimilarityQueryTest.java b/warehouse/query-core/src/test/java/datawave/query/tables/ssdeep/SSDeepSimilarityQueryTest.java index 6ed3580612c..fdc85c32fc2 100644 --- a/warehouse/query-core/src/test/java/datawave/query/tables/ssdeep/SSDeepSimilarityQueryTest.java +++ b/warehouse/query-core/src/test/java/datawave/query/tables/ssdeep/SSDeepSimilarityQueryTest.java @@ -32,17 +32,17 @@ import datawave.accumulo.inmemory.InMemoryAccumuloClient; import datawave.accumulo.inmemory.InMemoryInstance; +import datawave.core.common.connection.AccumuloConnectionFactory; +import datawave.core.query.result.event.DefaultResponseObjectFactory; import datawave.ingest.mapreduce.handler.ssdeep.SSDeepIndexHandler; import datawave.marking.MarkingFunctions; +import datawave.microservice.query.QueryImpl; import datawave.microservice.querymetric.QueryMetricFactoryImpl; import datawave.query.tables.ssdeep.util.SSDeepTestUtil; import datawave.query.testframework.AbstractDataTypeConfig; import datawave.security.authorization.DatawavePrincipal; import datawave.security.authorization.DatawaveUser; import datawave.security.authorization.SubjectIssuerDNPair; -import datawave.webservice.common.connection.AccumuloConnectionFactory; -import datawave.webservice.query.QueryImpl; -import datawave.webservice.query.result.event.DefaultResponseObjectFactory; import datawave.webservice.query.result.event.EventBase; import datawave.webservice.query.runner.RunningQuery; import datawave.webservice.result.EventQueryResponseBase; diff --git a/warehouse/query-core/src/test/java/datawave/query/tables/ssdeep/SSDeepSimilarityQueryTransformerTest.java b/warehouse/query-core/src/test/java/datawave/query/tables/ssdeep/SSDeepSimilarityQueryTransformerTest.java index b8f7bf1d671..fe52feb5749 100644 --- a/warehouse/query-core/src/test/java/datawave/query/tables/ssdeep/SSDeepSimilarityQueryTransformerTest.java +++ b/warehouse/query-core/src/test/java/datawave/query/tables/ssdeep/SSDeepSimilarityQueryTransformerTest.java @@ -21,6 +21,7 @@ import org.powermock.modules.junit4.PowerMockRunner; import datawave.marking.MarkingFunctions; +import datawave.microservice.query.Query; import datawave.query.config.SSDeepSimilarityQueryConfiguration; import datawave.util.ssdeep.NGramGenerator; import datawave.util.ssdeep.NGramTuple; @@ -28,7 +29,6 @@ import datawave.util.ssdeep.SSDeepHashEditDistanceScorer; import datawave.util.ssdeep.SSDeepHashScorer; import datawave.util.ssdeep.SSDeepNGramOverlapScorer; -import datawave.webservice.query.Query; import datawave.webservice.query.result.event.DefaultEvent; import datawave.webservice.query.result.event.DefaultField; import datawave.webservice.query.result.event.EventBase; diff --git a/warehouse/query-core/src/test/java/datawave/query/testframework/AbstractDataTypeConfig.java b/warehouse/query-core/src/test/java/datawave/query/testframework/AbstractDataTypeConfig.java index 5a4e4db1e00..f040c6d548a 100644 --- a/warehouse/query-core/src/test/java/datawave/query/testframework/AbstractDataTypeConfig.java +++ b/warehouse/query-core/src/test/java/datawave/query/testframework/AbstractDataTypeConfig.java @@ -2,7 +2,9 @@ import static datawave.query.testframework.FileType.CSV; +import java.io.File; import java.io.IOException; +import java.io.InputStream; import java.net.URI; import java.net.URISyntaxException; import java.net.URL; @@ -17,6 +19,7 @@ import org.apache.accumulo.core.security.Authorizations; import org.apache.accumulo.core.security.ColumnVisibility; +import org.apache.commons.io.FileUtils; import org.apache.hadoop.conf.Configuration; import org.apache.log4j.Logger; import org.junit.Assert; @@ -144,6 +147,14 @@ protected AbstractDataTypeConfig(final String dt, final String ingestFile, final // RawDataManager manager = mgr; URL url = this.getClass().getClassLoader().getResource(ingestFile); Assert.assertNotNull("unable to resolve ingest file(" + ingestFile + ")", url); + // if this url is not a file (e.g. nested in a jar), then make it a file + if (url.getProtocol().startsWith("jar")) { + InputStream in = this.getClass().getClassLoader().getResourceAsStream(ingestFile); + File tempFile = File.createTempFile(String.valueOf(in.hashCode()), ".tmp"); + tempFile.deleteOnExit(); + FileUtils.copyInputStreamToFile(in, tempFile); + url = tempFile.toURI().toURL(); + } this.ingestPath = url.toURI(); this.dataType = dt; this.fieldConfig = config; diff --git a/warehouse/query-core/src/test/java/datawave/query/testframework/AbstractFunctionalQuery.java b/warehouse/query-core/src/test/java/datawave/query/testframework/AbstractFunctionalQuery.java index a6ac089850a..46a2c1a8384 100644 --- a/warehouse/query-core/src/test/java/datawave/query/testframework/AbstractFunctionalQuery.java +++ b/warehouse/query-core/src/test/java/datawave/query/testframework/AbstractFunctionalQuery.java @@ -51,8 +51,14 @@ import com.google.common.collect.Multimap; import com.google.common.collect.Sets; +import datawave.core.common.connection.AccumuloConnectionFactory; +import datawave.core.query.configuration.GenericQueryConfiguration; +import datawave.core.query.logic.QueryLogic; +import datawave.core.query.logic.QueryLogicFactory; +import datawave.core.query.result.event.DefaultResponseObjectFactory; import datawave.data.type.Type; import datawave.marking.MarkingFunctions.Default; +import datawave.microservice.query.QueryImpl; import datawave.microservice.querymetric.BaseQueryMetric; import datawave.microservice.querymetric.QueryMetricFactory; import datawave.microservice.querymetric.QueryMetricFactoryImpl; @@ -73,12 +79,10 @@ import datawave.query.util.MetadataHelperFactory; import datawave.security.authorization.DatawavePrincipal; import datawave.security.authorization.DatawaveUser; +import datawave.security.authorization.ProxiedUserDetails; import datawave.security.authorization.SubjectIssuerDNPair; import datawave.security.util.DnUtils; -import datawave.webservice.common.connection.AccumuloConnectionFactory; -import datawave.webservice.query.QueryImpl; -import datawave.webservice.query.configuration.GenericQueryConfiguration; -import datawave.webservice.query.result.event.DefaultResponseObjectFactory; +import datawave.webservice.query.exception.QueryException; import datawave.webservice.query.result.event.EventBase; import datawave.webservice.query.result.event.FieldBase; import datawave.webservice.query.runner.RunningQuery; @@ -144,8 +148,9 @@ public enum TestCities { protected final RawDataManager dataManager; protected Authorizations auths; protected String documentKey; + protected QueryLogicFactory logicFactory; protected ShardQueryLogic logic; - private CountingShardQueryLogic countLogic = new CountingShardQueryLogic(); + private CountingShardQueryLogic countLogic; protected QueryLogicTestHarness testHarness; protected DatawavePrincipal principal; @@ -155,31 +160,37 @@ protected AbstractFunctionalQuery(final RawDataManager mgr) { this.dataManager = mgr; } - protected ShardQueryLogic createQueryLogic() { + protected ShardQueryLogic createShardQueryLogic() { return new ShardQueryLogic(); } - @Before - public void querySetUp() throws IOException { - log.debug("--------- querySetUp ---------"); + private ShardQueryLogic createQueryLogic() { + ShardQueryLogic logic = createShardQueryLogic(); + QueryTestTableHelper.configureLogicToScanTables(logic); - this.logic = createQueryLogic(); - QueryTestTableHelper.configureLogicToScanTables(this.logic); + logic.setFullTableScanEnabled(false); + logic.setIncludeDataTypeAsField(true); - this.logic.setFullTableScanEnabled(false); - this.logic.setIncludeDataTypeAsField(true); + logic.setDateIndexHelperFactory(new DateIndexHelperFactory()); + logic.setMarkingFunctions(new Default()); + logic.setMetadataHelperFactory(new MetadataHelperFactory()); + logic.setQueryPlanner(new DefaultQueryPlanner()); + logic.setResponseObjectFactory(new DefaultResponseObjectFactory()); - this.logic.setDateIndexHelperFactory(new DateIndexHelperFactory()); - this.logic.setMarkingFunctions(new Default()); - this.logic.setMetadataHelperFactory(new MetadataHelperFactory()); - this.logic.setResponseObjectFactory(new DefaultResponseObjectFactory()); + logic.setCollectTimingDetails(true); + logic.setLogTimingDetails(true); + logic.setMinimumSelectivity(0.03D); + logic.setMaxIndexScanTimeMillis(5000); - this.logic.setCollectTimingDetails(true); - this.logic.setLogTimingDetails(true); - this.logic.setMinimumSelectivity(0.03D); - this.logic.setMaxIndexScanTimeMillis(5000); + return logic; + } + + protected CountingShardQueryLogic createCountingShardQueryLogic() { + return new CountingShardQueryLogic(); + } - // count logic + private CountingShardQueryLogic createCountingQueryLogic() { + CountingShardQueryLogic countLogic = createCountingShardQueryLogic(); countLogic.setIncludeDataTypeAsField(true); countLogic.setFullTableScanEnabled(false); @@ -190,6 +201,61 @@ public void querySetUp() throws IOException { countLogic.setResponseObjectFactory(new DefaultResponseObjectFactory()); QueryTestTableHelper.configureLogicToScanTables(countLogic); + return countLogic; + } + + private class TestQueryLogicFactory implements QueryLogicFactory { + + /** + * @param name + * name of query logic + * @param currentUser + * @return new instance of QueryLogic class + * @throws IllegalArgumentException + * if query logic name does not exist + */ + @Override + public QueryLogic getQueryLogic(String name, ProxiedUserDetails currentUser) throws IllegalArgumentException, CloneNotSupportedException { + QueryLogic logic = null; + if (name.equals("EventQuery")) { + logic = createQueryLogic(); + } else if (name.equals("CountQuery")) { + logic = createCountingQueryLogic(); + } else { + throw new IllegalArgumentException("Unknown query logic " + name); + } + logic.setLogicName(name); + return logic; + } + + /** + * @param name + * name of query logic + * @return new instance of QueryLogic class + * @throws IllegalArgumentException + * if query logic name does not exist + */ + @Override + public QueryLogic getQueryLogic(String name) throws IllegalArgumentException, CloneNotSupportedException { + return getQueryLogic(name, null); + } + + @Override + public List> getQueryLogicList() { + try { + List> list = new ArrayList<>(); + list.add(getQueryLogic("EventQuery", null)); + list.add(getQueryLogic("CountQuery", null)); + return list; + } catch (Exception e) { + throw new RuntimeException("Failed to create query logic list"); + } + } + } + + @Before + public void querySetUp() throws IOException { + log.debug("--------- querySetUp ---------"); // init must set auths testInit(); @@ -201,7 +267,16 @@ public void querySetUp() throws IOException { SubjectIssuerDNPair dn = SubjectIssuerDNPair.of("userDn", "issuerDn"); DatawaveUser user = new DatawaveUser(dn, DatawaveUser.UserType.USER, Sets.newHashSet(this.auths.toString().split(",")), null, null, -1L); this.principal = new DatawavePrincipal(Collections.singleton(user)); + this.testHarness = new QueryLogicTestHarness(this); + + this.logicFactory = new TestQueryLogicFactory(); + try { + this.logic = (ShardQueryLogic) (logicFactory.getQueryLogic("EventQuery", principal)); + this.countLogic = (CountingShardQueryLogic) (logicFactory.getQueryLogic("CountQuery", principal)); + } catch (CloneNotSupportedException | QueryException e) { + throw new RuntimeException("Unable to create query logics", e); + } } // ============================================ @@ -441,7 +516,7 @@ protected void runTestQuery(Collection expected, String queryStr, Date s log.debug("Plan: " + config.getQueryString()); } } - testHarness.assertLogicResults(this.logic, expected, checkers); + testHarness.assertLogicResults(this.logic, this.logicFactory, expected, checkers); } /** diff --git a/warehouse/query-core/src/test/java/datawave/query/testframework/BaseRawData.java b/warehouse/query-core/src/test/java/datawave/query/testframework/BaseRawData.java index a825c44dd74..eac259a7a22 100644 --- a/warehouse/query-core/src/test/java/datawave/query/testframework/BaseRawData.java +++ b/warehouse/query-core/src/test/java/datawave/query/testframework/BaseRawData.java @@ -10,7 +10,6 @@ import java.util.Set; import org.apache.log4j.Logger; -import org.bouncycastle.util.Strings; import org.junit.Assert; import datawave.data.normalizer.Normalizer; @@ -89,7 +88,7 @@ public void processFields(final String datatype, final String[] fields) { final Set values = new HashSet<>(); // convert multi-value fields into a set of values if (isMultiValueField(header)) { - String[] multi = Strings.split(fields[n], RawDataManager.MULTIVALUE_SEP_CHAR); + String[] multi = fields[n].split(RawDataManager.MULTIVALUE_SEP); for (String s : multi) { if (norm instanceof NumberNormalizer) { values.add(s); @@ -99,7 +98,7 @@ public void processFields(final String datatype, final String[] fields) { } } else if (isTokenizedField(header)) { // convert field to a list of tokens that include the complete field - String[] multi = Strings.split(fields[n], ' '); + String[] multi = fields[n].split(" "); // add full field as an event values.add(fields[n]); for (String s : multi) { diff --git a/warehouse/query-core/src/test/java/datawave/query/testframework/QueryLogicTestHarness.java b/warehouse/query-core/src/test/java/datawave/query/testframework/QueryLogicTestHarness.java index 3cdf08174bc..ddabf27f003 100644 --- a/warehouse/query-core/src/test/java/datawave/query/testframework/QueryLogicTestHarness.java +++ b/warehouse/query-core/src/test/java/datawave/query/testframework/QueryLogicTestHarness.java @@ -1,17 +1,30 @@ package datawave.query.testframework; +import java.io.IOException; import java.util.Collection; import java.util.HashSet; +import java.util.Iterator; +import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.Queue; import java.util.Set; +import org.apache.accumulo.core.client.AccumuloClient; +import org.apache.accumulo.core.client.Connector; import org.apache.accumulo.core.data.Key; import org.apache.accumulo.core.data.Value; import org.apache.log4j.Logger; import org.junit.Assert; +import datawave.core.query.configuration.CheckpointableQueryConfiguration; +import datawave.core.query.configuration.GenericQueryConfiguration; +import datawave.core.query.logic.BaseQueryLogic; +import datawave.core.query.logic.CheckpointableQueryLogic; +import datawave.core.query.logic.QueryCheckpoint; +import datawave.core.query.logic.QueryKey; +import datawave.core.query.logic.QueryLogicFactory; import datawave.data.type.Type; import datawave.query.attributes.Attribute; import datawave.query.attributes.AttributeFactory; @@ -21,7 +34,7 @@ import datawave.query.attributes.TypeAttribute; import datawave.query.function.deserializer.KryoDocumentDeserializer; import datawave.query.iterator.profile.FinalDocumentTrackingIterator; -import datawave.webservice.query.logic.BaseQueryLogic; +import datawave.webservice.query.exception.QueryException; public class QueryLogicTestHarness { @@ -58,17 +71,26 @@ public interface TestResultParser { // ============================================= // assert methods + private void dumpCp(String start, QueryCheckpoint cp) { + cp.getQueries().iterator().forEachRemaining(qd -> { + System.out.println(">>>> " + start + ": " + qd.getRanges() + " -> " + qd.getLastResult()); + }); + } + /** * Determines if the correct results were obtained for a query. * * @param logic * key/value response data + * @param factory + * a logic factory for teardown/rebuilds if the logic is checkpointable * @param expected * list of key values expected within response data * @param checkers * list of additional validation methods */ - public void assertLogicResults(BaseQueryLogic> logic, Collection expected, List checkers) { + public void assertLogicResults(BaseQueryLogic> logic, QueryLogicFactory factory, Collection expected, + List checkers) throws IOException, ClassNotFoundException { Set actualResults = new HashSet<>(); if (log.isDebugEnabled()) { @@ -78,54 +100,43 @@ public void assertLogicResults(BaseQueryLogic> logic, Colle } } - for (Map.Entry entry : logic) { - if (FinalDocumentTrackingIterator.isFinalDocumentKey(entry.getKey())) { - continue; + boolean disableCheckpoint = false; + if (!disableCheckpoint && logic instanceof CheckpointableQueryLogic && ((CheckpointableQueryLogic) logic).isCheckpointable() && factory != null) { + Queue cps = new LinkedList<>(); + GenericQueryConfiguration config = logic.getConfig(); + AccumuloClient client = config.getClient(); + QueryKey queryKey = new QueryKey("default", logic.getConfig().getQuery().getId().toString(), logic.getLogicName()); + // replace the config with that which would have been stored + if (config instanceof CheckpointableQueryConfiguration && ((CheckpointableQueryLogic) logic).isCheckpointable()) { + config = ((CheckpointableQueryConfiguration) config).checkpoint(); } - final Document document = this.deserializer.apply(entry).getValue(); - - // check all of the types to ensure that all are keepers as defined in the - // AttributeFactory class - int count = 0; - for (Attribute> attribute : document.getAttributes()) { - if (attribute instanceof TimingMetadata) { - // ignore - } else if (attribute instanceof Attributes) { - Attributes attrs = (Attributes) attribute; - Collection> types = new HashSet<>(); - for (Attribute> attr : attrs.getAttributes()) { - count++; - if (attr instanceof TypeAttribute) { - Type> type = ((TypeAttribute) attr).getType(); - if (Objects.nonNull(type)) { - types.add(type.getClass()); - } - } - } - Assert.assertEquals(AttributeFactory.getKeepers(types), types); - } else { - count++; + cps.addAll(((CheckpointableQueryLogic) logic).checkpoint(queryKey)); + while (!cps.isEmpty()) { + QueryCheckpoint cp = cps.remove(); + // create a new instance of the logic + try { + logic = (BaseQueryLogic>) factory.getQueryLogic(logic.getLogicName()); + } catch (CloneNotSupportedException | QueryException e) { + Assert.fail("Failed to recreate checkpointable query logic for " + logic.getLogicName() + ": " + e.getMessage()); + } + // now reset the logic given the checkpoint + try { + ((CheckpointableQueryLogic) logic).setupQuery(client, config, cp); + } catch (Exception e) { + log.error("Failed to setup query given last checkpoint", e); + Assert.fail("Failed to setup query given last checkpoint: " + e.getMessage()); + } + Iterator> iter = logic.iterator(); + if (iter.hasNext()) { + Map.Entry next = iter.next(); + actualResults = processResult(actualResults, next, checkers); + cps.addAll(((CheckpointableQueryLogic) logic).checkpoint(queryKey)); } } - - // ignore empty documents (possible when only passing FinalDocument back) - if (count == 0) { - continue; - } - - // parse the document - String extractedResult = this.parser.parse(entry.getKey(), document); - log.debug("result(" + extractedResult + ") key(" + entry.getKey() + ") document(" + document + ")"); - - // verify expected results - Assert.assertNotNull("extracted result", extractedResult); - Assert.assertFalse("duplicate result(" + extractedResult + ") key(" + entry.getKey() + ")", actualResults.contains(extractedResult)); - actualResults.add(extractedResult); - - // perform any custom assert checks on document - for (final DocumentChecker check : checkers) { - check.assertValid(document); + } else { + for (Map.Entry entry : logic) { + actualResults = processResult(actualResults, entry, checkers); } } @@ -150,4 +161,65 @@ public void assertLogicResults(BaseQueryLogic> logic, Colle Assert.assertTrue("expected and actual values do not match", expected.containsAll(actualResults)); Assert.assertTrue("expected and actual values do not match", actualResults.containsAll(expected)); } + + /** + * Given an entry off of the logic iterator, deserialize and check its validity and add to the actualResults + * + * @param actualResults + * @param entry + * @param checkers + */ + private Set processResult(Set actualResults, Map.Entry entry, List checkers) { + if (FinalDocumentTrackingIterator.isFinalDocumentKey(entry.getKey())) { + return actualResults; + } + + final Document document = this.deserializer.apply(entry).getValue(); + + // check all of the types to ensure that all are keepers as defined in the + // AttributeFactory class + int count = 0; + for (Attribute> attribute : document.getAttributes()) { + if (attribute instanceof TimingMetadata) { + // ignore + } else if (attribute instanceof Attributes) { + Attributes attrs = (Attributes) attribute; + Collection> types = new HashSet<>(); + for (Attribute> attr : attrs.getAttributes()) { + count++; + if (attr instanceof TypeAttribute) { + Type> type = ((TypeAttribute) attr).getType(); + if (Objects.nonNull(type)) { + types.add(type.getClass()); + } + } + } + Assert.assertEquals(AttributeFactory.getKeepers(types), types); + } else { + count++; + } + } + + // ignore empty documents (possible when only passing FinalDocument back) + if (count == 0) { + return actualResults; + } + + // parse the document + String extractedResult = this.parser.parse(entry.getKey(), document); + log.debug("result(" + extractedResult + ") key(" + entry.getKey() + ") document(" + document + ")"); + + // verify expected results + Assert.assertNotNull("extracted result", extractedResult); + Assert.assertFalse("duplicate result(" + extractedResult + ") key(" + entry.getKey() + ")", actualResults.contains(extractedResult)); + + // perform any custom assert checks on document + for (final DocumentChecker check : checkers) { + check.assertValid(document); + } + + actualResults.add(extractedResult); + return actualResults; + } + } diff --git a/warehouse/query-core/src/test/java/datawave/query/tld/TLDNormalFlattenQueryTest.java b/warehouse/query-core/src/test/java/datawave/query/tld/TLDNormalFlattenQueryTest.java index 3e450c2fb9a..63bc4cf096b 100644 --- a/warehouse/query-core/src/test/java/datawave/query/tld/TLDNormalFlattenQueryTest.java +++ b/warehouse/query-core/src/test/java/datawave/query/tld/TLDNormalFlattenQueryTest.java @@ -14,7 +14,7 @@ public class TLDNormalFlattenQueryTest extends NormalFlattenQueryTest { private static final Logger log = Logger.getLogger(TLDNormalFlattenQueryTest.class); - protected ShardQueryLogic createQueryLogic() { + protected ShardQueryLogic createShardQueryLogic() { return new TLDQueryLogic(); } diff --git a/warehouse/query-core/src/test/java/datawave/query/transformer/DocumentTransformerTest.java b/warehouse/query-core/src/test/java/datawave/query/transformer/DocumentTransformerTest.java index de04e117fe7..0f4b82ed190 100644 --- a/warehouse/query-core/src/test/java/datawave/query/transformer/DocumentTransformerTest.java +++ b/warehouse/query-core/src/test/java/datawave/query/transformer/DocumentTransformerTest.java @@ -19,15 +19,15 @@ import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; +import datawave.core.query.logic.BaseQueryLogic; import datawave.marking.MarkingFunctions; +import datawave.microservice.query.Query; +import datawave.microservice.query.QueryImpl; import datawave.query.Constants; import datawave.query.DocumentSerialization; import datawave.query.attributes.Document; import datawave.query.attributes.Numeric; import datawave.query.function.deserializer.KryoDocumentDeserializer; -import datawave.webservice.query.Query; -import datawave.webservice.query.QueryImpl; -import datawave.webservice.query.logic.BaseQueryLogic; import datawave.webservice.query.result.event.ResponseObjectFactory; import datawave.webservice.query.result.event.SimpleEvent; import datawave.webservice.query.result.event.SimpleField; diff --git a/warehouse/query-core/src/test/java/datawave/query/transformer/GroupingTest.java b/warehouse/query-core/src/test/java/datawave/query/transformer/GroupingTest.java index 53d74a35ea0..4945b5994b5 100644 --- a/warehouse/query-core/src/test/java/datawave/query/transformer/GroupingTest.java +++ b/warehouse/query-core/src/test/java/datawave/query/transformer/GroupingTest.java @@ -51,8 +51,11 @@ import com.google.common.collect.Lists; import datawave.configuration.spring.SpringBean; +import datawave.core.query.configuration.GenericQueryConfiguration; +import datawave.core.query.iterator.DatawaveTransformIterator; import datawave.helpers.PrintUtility; import datawave.ingest.data.TypeRegistry; +import datawave.microservice.query.QueryImpl; import datawave.query.QueryParameters; import datawave.query.QueryTestTableHelper; import datawave.query.RebuildingScannerTestHelper; @@ -67,9 +70,6 @@ import datawave.query.util.VisibilityWiseGuysIngestWithModel; import datawave.util.TableName; import datawave.webservice.edgedictionary.RemoteEdgeDictionary; -import datawave.webservice.query.QueryImpl; -import datawave.webservice.query.configuration.GenericQueryConfiguration; -import datawave.webservice.query.iterator.DatawaveTransformIterator; import datawave.webservice.query.result.event.EventBase; import datawave.webservice.query.result.event.FieldBase; import datawave.webservice.result.DefaultEventQueryResponse; @@ -267,7 +267,7 @@ public static JavaArchive createDeployment() { .addPackages(true, "org.apache.deltaspike", "io.astefanutti.metrics.cdi", "datawave.query", "org.jboss.logging", "datawave.webservice.query.result.event") .deleteClass(DefaultEdgeEventQueryLogic.class).deleteClass(RemoteEdgeDictionary.class) - .deleteClass(datawave.query.metrics.QueryMetricQueryLogic.class).deleteClass(datawave.query.metrics.ShardTableQueryMetricHandler.class) + .deleteClass(datawave.query.metrics.QueryMetricQueryLogic.class) .addAsManifestResource(new StringAsset( "" + "datawave.query.tables.edge.MockAlternative" + ""), "beans.xml"); diff --git a/warehouse/query-core/src/test/java/datawave/query/transformer/NoExpansionTests.java b/warehouse/query-core/src/test/java/datawave/query/transformer/NoExpansionTests.java index da5fb7abfd1..340379642d6 100644 --- a/warehouse/query-core/src/test/java/datawave/query/transformer/NoExpansionTests.java +++ b/warehouse/query-core/src/test/java/datawave/query/transformer/NoExpansionTests.java @@ -31,8 +31,10 @@ import org.junit.runner.RunWith; import datawave.configuration.spring.SpringBean; +import datawave.core.query.configuration.GenericQueryConfiguration; import datawave.helpers.PrintUtility; import datawave.ingest.data.TypeRegistry; +import datawave.microservice.query.QueryImpl; import datawave.query.QueryParameters; import datawave.query.QueryTestTableHelper; import datawave.query.RebuildingScannerTestHelper; @@ -44,8 +46,6 @@ import datawave.test.JexlNodeAssert; import datawave.util.TableName; import datawave.webservice.edgedictionary.RemoteEdgeDictionary; -import datawave.webservice.query.QueryImpl; -import datawave.webservice.query.configuration.GenericQueryConfiguration; /** * Tests for usage of #NO_EXPANSION in queries. @@ -95,7 +95,7 @@ public static JavaArchive createDeployment() { .addPackages(true, "org.apache.deltaspike", "io.astefanutti.metrics.cdi", "datawave.query", "org.jboss.logging", "datawave.webservice.query.result.event") .deleteClass(DefaultEdgeEventQueryLogic.class).deleteClass(RemoteEdgeDictionary.class) - .deleteClass(datawave.query.metrics.QueryMetricQueryLogic.class).deleteClass(datawave.query.metrics.ShardTableQueryMetricHandler.class) + .deleteClass(datawave.query.metrics.QueryMetricQueryLogic.class) .addAsManifestResource(new StringAsset( "" + "datawave.query.tables.edge.MockAlternative" + ""), "beans.xml"); diff --git a/warehouse/query-core/src/test/java/datawave/query/transformer/UniqueTransformMostRecentTest.java b/warehouse/query-core/src/test/java/datawave/query/transformer/UniqueTransformMostRecentTest.java index 13817ab280d..5487ae7162c 100644 --- a/warehouse/query-core/src/test/java/datawave/query/transformer/UniqueTransformMostRecentTest.java +++ b/warehouse/query-core/src/test/java/datawave/query/transformer/UniqueTransformMostRecentTest.java @@ -11,11 +11,11 @@ import org.junit.Test; import org.junit.rules.TemporaryFolder; +import datawave.microservice.query.QueryImpl; import datawave.query.attributes.UniqueGranularity; import datawave.query.iterator.ivarator.IvaratorCacheDirConfig; import datawave.query.tables.ShardQueryLogic; import datawave.query.util.sortedset.FileSortedSet; -import datawave.webservice.query.QueryImpl; public class UniqueTransformMostRecentTest extends UniqueTransformTest { diff --git a/warehouse/query-core/src/test/resources/DATAWAVE_EDGE.xml b/warehouse/query-core/src/test/resources/DATAWAVE_EDGE.xml index 9a19b0294c3..31d6cedfd7f 100644 --- a/warehouse/query-core/src/test/resources/DATAWAVE_EDGE.xml +++ b/warehouse/query-core/src/test/resources/DATAWAVE_EDGE.xml @@ -9,26 +9,26 @@ name representation as needed for the deployment environment's default query syntax. --> - - + + - - + + - - + + - - + + - - + + - - + + - - + + diff --git a/warehouse/query-core/src/test/resources/datawave/query/EventQueryLogicFactory.xml b/warehouse/query-core/src/test/resources/datawave/query/EventQueryLogicFactory.xml index 7ac7099c927..ffa8088b8e0 100644 --- a/warehouse/query-core/src/test/resources/datawave/query/EventQueryLogicFactory.xml +++ b/warehouse/query-core/src/test/resources/datawave/query/EventQueryLogicFactory.xml @@ -13,7 +13,7 @@ isn't defined in Accumulo for whatever reason... --> - + @@ -45,11 +45,10 @@ - - - - - + + + + @@ -176,11 +175,5 @@ - - - - - - diff --git a/warehouse/query-core/src/test/resources/datawave/query/QueryLogicFactory.xml b/warehouse/query-core/src/test/resources/datawave/query/QueryLogicFactory.xml index fdfced0fba9..fe08b21c663 100644 --- a/warehouse/query-core/src/test/resources/datawave/query/QueryLogicFactory.xml +++ b/warehouse/query-core/src/test/resources/datawave/query/QueryLogicFactory.xml @@ -6,7 +6,7 @@ xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans-4.0.xsd http://www.springframework.org/schema/util http://www.springframework.org/schema/util/spring-util-4.0.xsd"> - + @@ -99,6 +99,7 @@ + + - - + + + + + + + @@ -158,11 +165,10 @@ - - - + + - + @@ -367,11 +373,6 @@ - - - - - - + diff --git a/warehouse/regression-testing/pom.xml b/warehouse/regression-testing/pom.xml index 37295d04b9c..63d7f06694b 100644 --- a/warehouse/regression-testing/pom.xml +++ b/warehouse/regression-testing/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.14.0-SNAPSHOT + 7.0.0-SNAPSHOT datawave-regression-testing ${project.artifactId} diff --git a/warehouse/ssdeep-common/pom.xml b/warehouse/ssdeep-common/pom.xml index a22534ffe28..c26ad6862b5 100644 --- a/warehouse/ssdeep-common/pom.xml +++ b/warehouse/ssdeep-common/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 6.14.0-SNAPSHOT + 7.0.0-SNAPSHOT datawave-ssdeep-common diff --git a/web-services/accumulo/pom.xml b/web-services/accumulo/pom.xml index 0c0d1f629eb..39c90d5ba5b 100644 --- a/web-services/accumulo/pom.xml +++ b/web-services/accumulo/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 6.14.0-SNAPSHOT + 7.0.0-SNAPSHOT datawave-ws-accumulo ejb @@ -25,7 +25,18 @@ dnsjava dnsjava - 2.1.8 + + + gov.nsa.datawave.core + datawave-core-common-util + ${project.version} + jboss + + + * + * + + gov.nsa.datawave.microservice diff --git a/web-services/atom/pom.xml b/web-services/atom/pom.xml index b1e744a06ac..240a4eaf46f 100644 --- a/web-services/atom/pom.xml +++ b/web-services/atom/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 6.14.0-SNAPSHOT + 7.0.0-SNAPSHOT datawave-ws-atom ejb @@ -67,6 +67,11 @@ cdi-api provided + + org.apache.hadoop.thirdparty + hadoop-shaded-guava + provided + org.jboss.logging jboss-logging diff --git a/web-services/atom/src/main/java/datawave/webservice/atom/AtomServiceBean.java b/web-services/atom/src/main/java/datawave/webservice/atom/AtomServiceBean.java index b6f8dd04643..9cc2fc468f2 100644 --- a/web-services/atom/src/main/java/datawave/webservice/atom/AtomServiceBean.java +++ b/web-services/atom/src/main/java/datawave/webservice/atom/AtomServiceBean.java @@ -54,11 +54,11 @@ import org.jboss.resteasy.util.Base64; import datawave.annotation.Required; +import datawave.core.common.connection.AccumuloConnectionFactory; +import datawave.core.common.connection.AccumuloConnectionFactory.Priority; import datawave.security.authorization.DatawavePrincipal; import datawave.security.util.ScannerHelper; import datawave.webservice.accumulo.iterator.MatchingKeySkippingIterator; -import datawave.webservice.common.connection.AccumuloConnectionFactory; -import datawave.webservice.common.connection.AccumuloConnectionFactory.Priority; import datawave.webservice.common.exception.DatawaveWebApplicationException; import datawave.webservice.common.exception.NoResultsException; import datawave.webservice.query.exception.DatawaveErrorCode; @@ -120,8 +120,12 @@ public void setup() {} public Categories getCategories() { Principal p = ctx.getCallerPrincipal(); Set auths = new HashSet<>(); + String userDN = null; + Collection proxyServers = null; if (p instanceof DatawavePrincipal) { DatawavePrincipal dp = (DatawavePrincipal) p; + userDN = dp.getUserDN().subjectDN(); + proxyServers = dp.getProxyServers(); for (Collection cbAuths : dp.getAuthorizations()) auths.add(new Authorizations(cbAuths.toArray(new String[cbAuths.size()]))); } @@ -131,7 +135,7 @@ public Categories getCategories() { result = abdera.newCategories(); Map trackingMap = connectionFactory.getTrackingMap(Thread.currentThread().getStackTrace()); - client = connectionFactory.getClient(poolName, Priority.NORMAL, trackingMap); + client = connectionFactory.getClient(userDN, proxyServers, poolName, Priority.NORMAL, trackingMap); try (Scanner scanner = ScannerHelper.createScanner(client, tableName + "Categories", auths)) { Map props = new HashMap<>(); props.put(MatchingKeySkippingIterator.ROW_DELIMITER_OPTION, "\0"); @@ -196,8 +200,12 @@ public Feed getFeed(@Required("category") @PathParam("category") String category Principal p = ctx.getCallerPrincipal(); Set auths = new HashSet<>(); + String userDN = null; + Collection proxyServers = null; if (p instanceof DatawavePrincipal) { DatawavePrincipal dp = (DatawavePrincipal) p; + userDN = dp.getUserDN().subjectDN(); + proxyServers = dp.getProxyServers(); for (Collection cbAuths : dp.getAuthorizations()) auths.add(new Authorizations(cbAuths.toArray(new String[cbAuths.size()]))); } @@ -207,7 +215,7 @@ public Feed getFeed(@Required("category") @PathParam("category") String category Date maxDate = new Date(0); try { Map trackingMap = connectionFactory.getTrackingMap(Thread.currentThread().getStackTrace()); - client = connectionFactory.getClient(poolName, Priority.NORMAL, trackingMap); + client = connectionFactory.getClient(userDN, proxyServers, poolName, Priority.NORMAL, trackingMap); result = abdera.newFeed(); result.addAuthor(clustername); @@ -286,8 +294,12 @@ public Entry getEntry(@Required("category") @PathParam("category") String catego // Find out who/what called this method Principal p = ctx.getCallerPrincipal(); Set auths = new HashSet<>(); + String userDN = null; + Collection proxyServers = null; if (p instanceof DatawavePrincipal) { DatawavePrincipal dp = (DatawavePrincipal) p; + userDN = dp.getUserDN().subjectDN(); + proxyServers = dp.getProxyServers(); for (Collection cbAuths : dp.getAuthorizations()) auths.add(new Authorizations(cbAuths.toArray(new String[cbAuths.size()]))); } @@ -296,7 +308,7 @@ public Entry getEntry(@Required("category") @PathParam("category") String catego AccumuloClient client = null; try { Map trackingMap = connectionFactory.getTrackingMap(Thread.currentThread().getStackTrace()); - client = connectionFactory.getClient(poolName, Priority.NORMAL, trackingMap); + client = connectionFactory.getClient(userDN, proxyServers, poolName, Priority.NORMAL, trackingMap); try (Scanner scanner = ScannerHelper.createScanner(client, tableName, auths)) { scanner.setRange(new Range(category, true, category + "\1", false)); diff --git a/web-services/cached-results/pom.xml b/web-services/cached-results/pom.xml index e090a9f5a53..2f162079a8a 100644 --- a/web-services/cached-results/pom.xml +++ b/web-services/cached-results/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 6.14.0-SNAPSHOT + 7.0.0-SNAPSHOT datawave-ws-cached-results ejb @@ -26,6 +26,21 @@ gov.nsa.datawave datawave-core + + gov.nsa.datawave.core + datawave-core-cached-results + ${project.version} + + + log4j + log4j + + + org.slf4j + slf4j-log4j12 + + + gov.nsa.datawave.microservice query-metric-api diff --git a/web-services/cached-results/src/main/java/datawave/webservice/query/database/CachedResultsCleanupBean.java b/web-services/cached-results/src/main/java/datawave/webservice/query/database/CachedResultsCleanupBean.java index 85a4625780a..732fb764988 100644 --- a/web-services/cached-results/src/main/java/datawave/webservice/query/database/CachedResultsCleanupBean.java +++ b/web-services/cached-results/src/main/java/datawave/webservice/query/database/CachedResultsCleanupBean.java @@ -28,7 +28,7 @@ import org.apache.log4j.Logger; import datawave.configuration.spring.SpringBean; -import datawave.webservice.results.cached.CachedResultsParameters; +import datawave.core.query.cachedresults.CachedResultsQueryParameters; /** * Removes tables and views from the MySQL database that have been there for 24 hours so that we don't have to purge data from them. @@ -88,7 +88,7 @@ public void cleanup() { ResultSet rs = s.executeQuery(GET_TABLES_TO_REMOVE.replace("?", schema).replace("XYZ", Integer.toString(cachedResultsCleanupConfiguration.getDaysToLive())))) { while (rs.next()) { - String objectName = CachedResultsParameters.validate(rs.getString(1)); + String objectName = CachedResultsQueryParameters.validate(rs.getString(1)); // Drop the table String dropTable = String.format("DROP TABLE %s", objectName); try (Statement statement = con.createStatement()) { @@ -96,7 +96,7 @@ public void cleanup() { } removeCrqRow(objectName); - String viewName = CachedResultsParameters.validate(objectName.replaceFirst("t", "v")); + String viewName = CachedResultsQueryParameters.validate(objectName.replaceFirst("t", "v")); // Drop the associated view String dropView = String.format("DROP VIEW %s", viewName); try (Statement statement = con.createStatement()) { diff --git a/web-services/cached-results/src/main/java/datawave/webservice/results/cached/CachedResultsBean.java b/web-services/cached-results/src/main/java/datawave/webservice/results/cached/CachedResultsBean.java index 6246258eca2..f9324e1f756 100644 --- a/web-services/cached-results/src/main/java/datawave/webservice/results/cached/CachedResultsBean.java +++ b/web-services/cached-results/src/main/java/datawave/webservice/results/cached/CachedResultsBean.java @@ -20,7 +20,6 @@ import java.util.ArrayList; import java.util.Collection; import java.util.Collections; -import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -45,7 +44,6 @@ import javax.ejb.TransactionAttributeType; import javax.ejb.TransactionManagement; import javax.ejb.TransactionManagementType; -import javax.enterprise.concurrent.ManagedExecutorService; import javax.inject.Inject; import javax.interceptor.Interceptors; import javax.sql.DataSource; @@ -75,6 +73,8 @@ import org.apache.log4j.Logger; import org.jboss.resteasy.annotations.GZIP; import org.jboss.resteasy.specimpl.MultivaluedMapImpl; +import org.springframework.util.LinkedMultiValueMap; +import org.springframework.util.MultiValueMap; import com.codahale.metrics.annotation.Timed; import com.google.common.base.Preconditions; @@ -83,10 +83,21 @@ import datawave.annotation.GenerateQuerySessionId; import datawave.annotation.Required; import datawave.configuration.spring.SpringBean; +import datawave.core.common.audit.PrivateAuditConstants; +import datawave.core.common.connection.AccumuloConnectionFactory; +import datawave.core.query.cache.ResultsPage; +import datawave.core.query.cachedresults.CacheableLogic; +import datawave.core.query.cachedresults.CachedResultsQueryParameters; +import datawave.core.query.logic.QueryLogic; +import datawave.core.query.logic.QueryLogicFactory; +import datawave.core.query.predict.QueryPredictor; import datawave.interceptor.RequiredInterceptor; import datawave.interceptor.ResponseInterceptor; import datawave.marking.MarkingFunctions; import datawave.marking.SecurityMarking; +import datawave.microservice.query.Query; +import datawave.microservice.query.QueryParameters; +import datawave.microservice.query.config.QueryExpirationProperties; import datawave.microservice.querymetric.BaseQueryMetric; import datawave.microservice.querymetric.QueryMetric; import datawave.microservice.querymetric.QueryMetricFactory; @@ -96,23 +107,16 @@ import datawave.webservice.common.audit.AuditBean; import datawave.webservice.common.audit.AuditParameters; import datawave.webservice.common.audit.Auditor.AuditType; -import datawave.webservice.common.audit.PrivateAuditConstants; -import datawave.webservice.common.connection.AccumuloConnectionFactory; import datawave.webservice.common.exception.DatawaveWebApplicationException; import datawave.webservice.common.exception.NoResultsException; import datawave.webservice.common.exception.NotFoundException; import datawave.webservice.common.exception.PreConditionFailedException; import datawave.webservice.common.exception.QueryCanceledException; import datawave.webservice.common.exception.UnauthorizedException; -import datawave.webservice.query.Query; -import datawave.webservice.query.QueryParameters; import datawave.webservice.query.cache.CachedResultsQueryCache; import datawave.webservice.query.cache.CreatedQueryLogicCacheBean; import datawave.webservice.query.cache.QueryCache; -import datawave.webservice.query.cache.QueryExpirationConfiguration; -import datawave.webservice.query.cache.ResultsPage; import datawave.webservice.query.cache.RunningQueryTimingImpl; -import datawave.webservice.query.cachedresults.CacheableLogic; import datawave.webservice.query.cachedresults.CacheableQueryRow; import datawave.webservice.query.exception.BadRequestQueryException; import datawave.webservice.query.exception.DatawaveErrorCode; @@ -123,12 +127,9 @@ import datawave.webservice.query.exception.QueryException; import datawave.webservice.query.exception.UnauthorizedQueryException; import datawave.webservice.query.factory.Persister; -import datawave.webservice.query.logic.QueryLogic; -import datawave.webservice.query.logic.QueryLogicFactory; import datawave.webservice.query.metric.QueryMetricsBean; import datawave.webservice.query.result.event.ResponseObjectFactory; import datawave.webservice.query.runner.AccumuloConnectionRequestBean; -import datawave.webservice.query.runner.QueryPredictor; import datawave.webservice.query.runner.RunningQuery; import datawave.webservice.query.util.QueryUncaughtExceptionHandler; import datawave.webservice.result.BaseQueryResponse; @@ -208,7 +209,6 @@ public class CachedResultsBean { protected static final String BASE_COLUMNS = StringUtils.join(CacheableQueryRow.getFixedColumnSet(), ","); @Inject - @SpringBean(name = "ResponseObjectFactory") private ResponseObjectFactory responseObjectFactory; // reference "datawave/query/CachedResults.xml" @@ -224,7 +224,7 @@ public class CachedResultsBean { @Inject @SpringBean(refreshable = true) - private QueryExpirationConfiguration queryExpirationConf; + private QueryExpirationProperties queryExpirationConf; @Inject private QueryMetricFactory metricFactory; @@ -241,7 +241,7 @@ public class CachedResultsBean { private static Map loadingQueryMap = Collections.synchronizedMap(new HashMap<>()); private static Set loadingQueries = Collections.synchronizedSet(new HashSet<>()); private URL importFileUrl = null; - private CachedResultsParameters cp = new CachedResultsParameters(); + private CachedResultsQueryParameters cp = new CachedResultsQueryParameters(); @PostConstruct public void init() { @@ -341,9 +341,11 @@ protected GenericResponse load(@Required("queryId") String queryId, Stri Principal p = ctx.getCallerPrincipal(); String owner = getOwnerFromPrincipal(p); String userDn = getDNFromPrincipal(p); + Collection proxyServers = null; Collection> cbAuths = new HashSet<>(); if (p instanceof DatawavePrincipal) { DatawavePrincipal dp = (DatawavePrincipal) p; + proxyServers = dp.getProxyServers(); cbAuths.addAll(dp.getAuthorizations()); } else { QueryException qe = new QueryException(DatawaveErrorCode.UNEXPECTED_PRINCIPAL_ERROR, MessageFormat.format("Class: {0}", p.getClass().getName())); @@ -427,9 +429,9 @@ protected GenericResponse load(@Required("queryId") String queryId, Stri priority = logic.getConnectionPriority(); Map trackingMap = connectionFactory.getTrackingMap(Thread.currentThread().getStackTrace()); q.populateTrackingMap(trackingMap); - accumuloConnectionRequestBean.requestBegin(queryId); + accumuloConnectionRequestBean.requestBegin(queryId, userDn, trackingMap); try { - client = connectionFactory.getClient(priority, trackingMap); + client = connectionFactory.getClient(userDn, proxyServers, priority, trackingMap); } finally { accumuloConnectionRequestBean.requestEnd(queryId); } @@ -442,13 +444,12 @@ protected GenericResponse load(@Required("queryId") String queryId, Stri AuditType auditType = logic.getAuditType(q); if (!auditType.equals(AuditType.NONE)) { try { - MultivaluedMap queryMap = new MultivaluedMapImpl<>(); - queryMap.putAll(q.toMap()); + MultiValueMap queryMap = new LinkedMultiValueMap<>(q.toMap()); marking.validate(queryMap); - queryMap.putSingle(PrivateAuditConstants.COLUMN_VISIBILITY, marking.toColumnVisibilityString()); - queryMap.putSingle(PrivateAuditConstants.AUDIT_TYPE, auditType.name()); - queryMap.putSingle(PrivateAuditConstants.USER_DN, q.getUserDN()); - queryMap.putSingle(PrivateAuditConstants.LOGIC_CLASS, logic.getLogicName()); + queryMap.set(PrivateAuditConstants.COLUMN_VISIBILITY, marking.toColumnVisibilityString()); + queryMap.set(PrivateAuditConstants.AUDIT_TYPE, auditType.name()); + queryMap.set(PrivateAuditConstants.USER_DN, q.getUserDN()); + queryMap.set(PrivateAuditConstants.LOGIC_CLASS, logic.getLogicName()); try { List selectors = logic.getSelectors(q); if (selectors != null && !selectors.isEmpty()) { @@ -459,7 +460,7 @@ protected GenericResponse load(@Required("queryId") String queryId, Stri } // if the user didn't set an audit id, use the query id if (!queryMap.containsKey(AuditParameters.AUDIT_ID)) { - queryMap.putSingle(AuditParameters.AUDIT_ID, q.getId().toString()); + queryMap.set(AuditParameters.AUDIT_ID, q.getId().toString()); } auditor.audit(queryMap); } catch (Exception e) { @@ -533,57 +534,53 @@ protected GenericResponse load(@Required("queryId") String queryId, Stri int maxLength = 0; for (Object o : results.getResults()) { + CacheableQueryRow cacheableQueryRow = cacheableLogic.writeToCache(o); - List cacheableQueryRowList = cacheableLogic.writeToCache(o); - - for (CacheableQueryRow cacheableQueryObject : cacheableQueryRowList) { - - Collection values = ((CacheableQueryRow) cacheableQueryObject).getColumnValues().values(); - int maxValueLength = 0; - for (String s : values) { - if (s.length() > maxValueLength) { - maxValueLength = s.length(); - } + Collection values = cacheableQueryRow.getColumnValues().values(); + int maxValueLength = 0; + for (String s : values) { + if (s.length() > maxValueLength) { + maxValueLength = s.length(); } + } - boolean dataWritten = false; - // If a successful maxLength has been determined, then don't change it. - if (maxLength == 0) - maxLength = maxValueLength + 1; - else if (maxValueLength > maxLength) { - maxLength = maxValueLength; - } + boolean dataWritten = false; + // If a successful maxLength has been determined, then don't change it. + if (maxLength == 0) + maxLength = maxValueLength + 1; + else if (maxValueLength > maxLength) { + maxLength = maxValueLength; + } - int attempt = 0; - SQLException loadBatchException = null; // exception; - while (dataWritten == false && attempt < 10) { - try { - loadBatch(ps, owner, queryId, logic.getLogicName(), fieldMap, cacheableQueryObject, maxLength); - dataWritten = true; - rowsWritten++; - } catch (SQLException e) { - loadBatchException = e; - String msg = e.getMessage(); - if (msg.startsWith("Table") && msg.endsWith("doesn't exist")) { - throw new QueryException(DatawaveErrorCode.CACHE_TABLE_MISSING, MessageFormat.format("message: {0}", msg)); - } else { - log.info("Caught other SQLException:" + msg + " writing batch with maxLength:" + maxLength); - maxLength = maxLength / 2; - } + int attempt = 0; + SQLException loadBatchException = null; // exception; + while (dataWritten == false && attempt < 10) { + try { + loadBatch(ps, owner, queryId, logic.getLogicName(), fieldMap, cacheableQueryRow, maxLength); + dataWritten = true; + rowsWritten++; + } catch (SQLException e) { + loadBatchException = e; + String msg = e.getMessage(); + if (msg.startsWith("Table") && msg.endsWith("doesn't exist")) { + throw new QueryException(DatawaveErrorCode.CACHE_TABLE_MISSING, MessageFormat.format("message: {0}", msg)); + } else { + log.info("Caught other SQLException:" + msg + " writing batch with maxLength:" + maxLength); + maxLength = maxLength / 2; } - attempt++; } + attempt++; + } - if (dataWritten == false) { - String message = (loadBatchException == null) ? "unknown" : loadBatchException.getMessage(); + if (dataWritten == false) { + String message = (loadBatchException == null) ? "unknown" : loadBatchException.getMessage(); - log.error("Batch write FAILED - last exception = " + message + "record = " + cacheableQueryObject.getColumnValues().entrySet(), - loadBatchException); - } else if (rowsWritten >= rowsPerBatch) { - persistBatch(ps); - ps.clearBatch(); - rowsWritten = 0; - } + log.error("Batch write FAILED - last exception = " + message + "record = " + cacheableQueryRow.getColumnValues().entrySet(), + loadBatchException); + } else if (rowsWritten >= rowsPerBatch) { + persistBatch(ps); + ps.clearBatch(); + rowsWritten = 0; } } } // End of inserts into table @@ -931,9 +928,9 @@ public CachedResultsResponse loadAndCreate(@Required("queryId") @PathParam("quer Preconditions.checkNotNull(newQueryId, "newQueryId cannot be null"); Preconditions.checkNotNull(queryId, "queryId cannot be null"); - queryParameters.putSingle(CachedResultsParameters.QUERY_ID, queryId); + queryParameters.putSingle(CachedResultsQueryParameters.QUERY_ID, queryId); - String alias = queryParameters.getFirst(CachedResultsParameters.ALIAS); + String alias = queryParameters.getFirst(CachedResultsQueryParameters.ALIAS); // Find out who/what called this method Principal p = ctx.getCallerPrincipal(); @@ -985,9 +982,9 @@ public CachedResultsResponse loadAndCreate(@Required("queryId") @PathParam("quer // pagesize validated in create CreateQuerySessionIDFilter.QUERY_ID.set(newQueryId); - queryParameters.remove(CachedResultsParameters.QUERY_ID); - queryParameters.remove(CachedResultsParameters.VIEW); - queryParameters.putSingle(CachedResultsParameters.VIEW, view); + queryParameters.remove(CachedResultsQueryParameters.QUERY_ID); + queryParameters.remove(CachedResultsQueryParameters.VIEW); + queryParameters.putSingle(CachedResultsQueryParameters.VIEW, view); CachedResultsResponse response = create(newQueryId, queryParameters); try { persistByQueryId(newQueryId, alias, owner, CachedRunningQuery.Status.AVAILABLE, "", true); @@ -1007,16 +1004,16 @@ public Future loadAndCreateAsync(@Required("newQueryId") @DefaultValue("-1") Integer pagesize, String fixedFieldsInEvent) { MultivaluedMap queryParameters = new MultivaluedMapImpl<>(); - queryParameters.putSingle(CachedResultsParameters.QUERY_ID, queryId); + queryParameters.putSingle(CachedResultsQueryParameters.QUERY_ID, queryId); queryParameters.putSingle("newQueryId", newQueryId); - queryParameters.putSingle(CachedResultsParameters.ALIAS, alias); - queryParameters.putSingle(CachedResultsParameters.FIELDS, fields); - queryParameters.putSingle(CachedResultsParameters.CONDITIONS, conditions); - queryParameters.putSingle(CachedResultsParameters.GROUPING, grouping); - queryParameters.putSingle(CachedResultsParameters.ORDER, order); + queryParameters.putSingle(CachedResultsQueryParameters.ALIAS, alias); + queryParameters.putSingle(CachedResultsQueryParameters.FIELDS, fields); + queryParameters.putSingle(CachedResultsQueryParameters.CONDITIONS, conditions); + queryParameters.putSingle(CachedResultsQueryParameters.GROUPING, grouping); + queryParameters.putSingle(CachedResultsQueryParameters.ORDER, order); queryParameters.putSingle("columnVisibility", columnVisibility); queryParameters.putSingle(QueryParameters.QUERY_PAGESIZE, Integer.toString(pagesize)); - queryParameters.putSingle(CachedResultsParameters.FIXED_FIELDS_IN_EVENT, fixedFieldsInEvent); + queryParameters.putSingle(CachedResultsQueryParameters.FIXED_FIELDS_IN_EVENT, fixedFieldsInEvent); return loadAndCreateAsync(queryParameters); } @@ -1029,7 +1026,7 @@ public Future loadAndCreateAsync(MultivaluedMap queryParameters) { CreateQuerySessionIDFilter.QUERY_ID.set(null); - queryParameters.putSingle(CachedResultsParameters.QUERY_ID, queryId); + queryParameters.putSingle(CachedResultsQueryParameters.QUERY_ID, queryId); cp.clear(); cp.validate(queryParameters); @@ -1270,20 +1267,19 @@ public CachedResultsResponse create(@Required("queryId") @PathParam("queryId") S auditMessage.append("User running secondary query on cached results of original query,"); auditMessage.append(" original query: ").append(query.getQuery()); auditMessage.append(", secondary query: ").append(sqlQuery); - MultivaluedMap params = new MultivaluedMapImpl<>(); - params.putAll(query.toMap()); + MultiValueMap params = new LinkedMultiValueMap<>(query.toMap()); marking.validate(params); PrivateAuditConstants.stripPrivateParameters(queryParameters); - params.putSingle(PrivateAuditConstants.COLUMN_VISIBILITY, marking.toColumnVisibilityString()); - params.putSingle(PrivateAuditConstants.AUDIT_TYPE, auditType.name()); - params.putSingle(PrivateAuditConstants.USER_DN, query.getUserDN()); - params.putSingle(PrivateAuditConstants.LOGIC_CLASS, crq.getQueryLogic().getLogicName()); + params.set(PrivateAuditConstants.COLUMN_VISIBILITY, marking.toColumnVisibilityString()); + params.set(PrivateAuditConstants.AUDIT_TYPE, auditType.name()); + params.set(PrivateAuditConstants.USER_DN, query.getUserDN()); + params.set(PrivateAuditConstants.LOGIC_CLASS, crq.getQueryLogic().getLogicName()); params.remove(QueryParameters.QUERY_STRING); - params.putSingle(QueryParameters.QUERY_STRING, auditMessage.toString()); + params.set(QueryParameters.QUERY_STRING, auditMessage.toString()); params.putAll(queryParameters); // if the user didn't set an audit id, use the query id if (!params.containsKey(AuditParameters.AUDIT_ID)) { - params.putSingle(AuditParameters.AUDIT_ID, queryId); + params.set(AuditParameters.AUDIT_ID, queryId); } auditor.audit(params); } @@ -1413,7 +1409,7 @@ public CachedResultsResponse update(@PathParam("queryId") @Required("queryId") S Connection connection = ds.getConnection(); String logicName = crq.getQueryLogicName(); if (logicName != null) { - QueryLogic queryLogic = queryFactory.getQueryLogic(logicName, p); + QueryLogic queryLogic = queryFactory.getQueryLogic(logicName, (DatawavePrincipal) p); crq.activate(connection, queryLogic); } else { DbUtils.closeQuietly(connection); @@ -1519,7 +1515,7 @@ public BaseQueryResponse previous(@PathParam("queryId") @Required("queryId") Str if (crq.getShouldAutoActivate()) { Connection connection = ds.getConnection(); String logicName = crq.getQueryLogicName(); - QueryLogic queryLogic = queryFactory.getQueryLogic(logicName, p); + QueryLogic queryLogic = queryFactory.getQueryLogic(logicName, (DatawavePrincipal) p); crq.activate(connection, queryLogic); } else { throw new PreConditionFailedQueryException(DatawaveErrorCode.QUERY_TIMEOUT_FOR_RESOURCES); @@ -1633,7 +1629,7 @@ public CachedResultsResponse reset(@PathParam("queryId") @Required("queryId") St Connection connection = ds.getConnection(); String logicName = crq.getQueryLogicName(); - QueryLogic queryLogic = queryFactory.getQueryLogic(logicName, p); + QueryLogic queryLogic = queryFactory.getQueryLogic(logicName, (DatawavePrincipal) p); crq.activate(connection, queryLogic); response.setQueryId(crq.getQueryId()); @@ -1729,7 +1725,7 @@ public BaseQueryResponse next(@PathParam("queryId") @Required("queryId") String if (crq.getShouldAutoActivate()) { Connection connection = ds.getConnection(); String logicName = crq.getQueryLogicName(); - QueryLogic queryLogic = queryFactory.getQueryLogic(logicName, p); + QueryLogic queryLogic = queryFactory.getQueryLogic(logicName, (DatawavePrincipal) p); crq.activate(connection, queryLogic); } else { throw new PreConditionFailedQueryException(DatawaveErrorCode.QUERY_TIMEOUT_FOR_RESOURCES); @@ -1872,7 +1868,7 @@ public BaseQueryResponse getRows(@PathParam("queryId") @Required("queryId") Stri if (crq.isActivated() == false) { Connection connection = ds.getConnection(); String logicName = crq.getQueryLogicName(); - QueryLogic queryLogic = queryFactory.getQueryLogic(logicName, p); + QueryLogic queryLogic = queryFactory.getQueryLogic(logicName, (DatawavePrincipal) p); crq.activate(connection, queryLogic); } @@ -2142,7 +2138,7 @@ private RunningQuery getQueryById(String id) throws Exception { Query q = queries.get(0); // will throw IllegalArgumentException if not defined - QueryLogic logic = queryFactory.getQueryLogic(q.getQueryLogicName(), p); + QueryLogic logic = queryFactory.getQueryLogic(q.getQueryLogicName(), (DatawavePrincipal) p); AccumuloConnectionFactory.Priority priority = logic.getConnectionPriority(); query = new RunningQuery(metrics, null, priority, logic, q, q.getQueryAuthorizations(), p, new RunningQueryTimingImpl(queryExpirationConf, q.getPageTimeout()), predictor, userOperationsBean, metricFactory); @@ -2447,8 +2443,8 @@ public void persistByQueryId(String queryId, String alias, String owner, CachedR } protected boolean createView(String tableName, String viewName, Connection con, boolean viewCreated, Map fieldMap) throws SQLException { - CachedResultsParameters.validate(tableName); - CachedResultsParameters.validate(viewName); + CachedResultsQueryParameters.validate(tableName); + CachedResultsQueryParameters.validate(viewName); StringBuilder viewCols = new StringBuilder(); StringBuilder tableCols = new StringBuilder(); viewCols.append(BASE_COLUMNS); diff --git a/web-services/cached-results/src/main/java/datawave/webservice/results/cached/CachedRunningQuery.java b/web-services/cached-results/src/main/java/datawave/webservice/results/cached/CachedRunningQuery.java index 1b526db4fcb..da9e2cd1ffe 100644 --- a/web-services/cached-results/src/main/java/datawave/webservice/results/cached/CachedRunningQuery.java +++ b/web-services/cached-results/src/main/java/datawave/webservice/results/cached/CachedRunningQuery.java @@ -29,23 +29,27 @@ import org.apache.commons.dbutils.DbUtils; import org.apache.commons.lang.StringUtils; import org.apache.log4j.Logger; +import org.jboss.resteasy.specimpl.MultivaluedMapImpl; import org.springframework.util.LinkedMultiValueMap; import org.springframework.util.MultiValueMap; +import datawave.core.query.cache.ResultsPage; +import datawave.core.query.cachedresults.CacheableLogic; +import datawave.core.query.cachedresults.CacheableQueryRowReader; +import datawave.core.query.cachedresults.CachedResultsQueryParameters; +import datawave.core.query.logic.QueryLogic; +import datawave.core.query.logic.QueryLogicFactory; +import datawave.core.query.logic.QueryLogicTransformer; +import datawave.microservice.query.Query; import datawave.microservice.querymetric.BaseQueryMetric; import datawave.microservice.querymetric.QueryMetricFactory; -import datawave.webservice.query.Query; +import datawave.security.authorization.DatawavePrincipal; import datawave.webservice.query.cache.AbstractRunningQuery; -import datawave.webservice.query.cache.ResultsPage; -import datawave.webservice.query.cachedresults.CacheableLogic; import datawave.webservice.query.cachedresults.CacheableQueryRow; -import datawave.webservice.query.cachedresults.CacheableQueryRowReader; import datawave.webservice.query.data.ObjectSizeOf; import datawave.webservice.query.exception.QueryException; -import datawave.webservice.query.logic.QueryLogic; -import datawave.webservice.query.logic.QueryLogicFactory; -import datawave.webservice.query.logic.QueryLogicTransformer; import datawave.webservice.query.result.event.ResponseObjectFactory; +import datawave.webservice.query.util.MapUtils; @SuppressWarnings("restriction") public class CachedRunningQuery extends AbstractRunningQuery { @@ -394,7 +398,7 @@ private boolean isFunction(String field) { public String generateSql(String view, String fields, String conditions, String grouping, String order, String user, Connection connection) throws SQLException { - CachedResultsParameters.validate(view); + CachedResultsQueryParameters.validate(view); StringBuilder buf = new StringBuilder(); if (StringUtils.isEmpty(StringUtils.trimToNull(fields))) fields = "*"; @@ -554,7 +558,7 @@ public boolean isActivated() { } private List getViewColumnNames(Connection connection, String view) throws SQLException { - CachedResultsParameters.validate(view); + CachedResultsQueryParameters.validate(view); List columns = new ArrayList<>(); try (Statement s = connection.createStatement(); ResultSet rs = s.executeQuery(String.format("show columns from %s", view))) { Set fixedColumns = CacheableQueryRow.getFixedColumnSet(); @@ -826,13 +830,14 @@ public long getLastPageNumber() { */ private ResultsPage convert(CachedRowSet cachedRowSet, long pageByteTrigger) { boolean hitPageByteTrigger = false; - List cacheableQueryRowList = new ArrayList<>(); + List results = new ArrayList<>(); try { cachedRowSet.beforeFirst(); long resultBytes = 0; while (cachedRowSet.next() && !hitPageByteTrigger) { - CacheableQueryRow row = CacheableQueryRowReader.createRow(cachedRowSet, this.fixedFieldsInEvent, this.responseObjectFactory); - cacheableQueryRowList.add(row); + CacheableQueryRow row = CacheableQueryRowReader.createRow(cachedRowSet, this.fixedFieldsInEvent, this.responseObjectFactory, + queryLogic.getMarkingFunctions()); + results.add(this.cacheableLogic.readFromCache(row)); if (pageByteTrigger != 0) { resultBytes += ObjectSizeOf.Sizer.getObjectSize(row); if (resultBytes >= pageByteTrigger) { @@ -847,21 +852,21 @@ private ResultsPage convert(CachedRowSet cachedRowSet, long pageByteTrigger) { if (this.cacheableLogic == null) { return new ResultsPage(); } else { - return new ResultsPage(this.cacheableLogic.readFromCache(cacheableQueryRowList), - (hitPageByteTrigger ? ResultsPage.Status.PARTIAL : ResultsPage.Status.COMPLETE)); + return new ResultsPage(results, (hitPageByteTrigger ? ResultsPage.Status.PARTIAL : ResultsPage.Status.COMPLETE)); } } private ResultsPage convert(CachedRowSet cachedRowSet, Integer rowBegin, Integer rowEnd, long pageByteTrigger) { boolean hitPageByteTrigger = false; - List cacheableQueryRowList = new ArrayList<>(); + List results = new ArrayList<>(); try { long resultBytes = 0; while (cachedRowSet.next() && cachedRowSet.getRow() <= rowEnd && !hitPageByteTrigger) { if (log.isTraceEnabled()) log.trace("CRS.position: " + cachedRowSet.getRow() + ", size: " + cachedRowSet.size()); - CacheableQueryRow row = CacheableQueryRowReader.createRow(cachedRowSet, this.fixedFieldsInEvent, this.responseObjectFactory); - cacheableQueryRowList.add(row); + CacheableQueryRow row = CacheableQueryRowReader.createRow(cachedRowSet, this.fixedFieldsInEvent, this.responseObjectFactory, + queryLogic.getMarkingFunctions()); + results.add(this.cacheableLogic.readFromCache(row)); if (pageByteTrigger != 0) { resultBytes += ObjectSizeOf.Sizer.getObjectSize(row); if (resultBytes >= pageByteTrigger) { @@ -877,8 +882,7 @@ private ResultsPage convert(CachedRowSet cachedRowSet, Integer rowBegin, Integer if (this.cacheableLogic == null) { return new ResultsPage(); } else { - return new ResultsPage(this.cacheableLogic.readFromCache(cacheableQueryRowList), - (hitPageByteTrigger ? ResultsPage.Status.PARTIAL : ResultsPage.Status.COMPLETE)); + return new ResultsPage(results, (hitPageByteTrigger ? ResultsPage.Status.PARTIAL : ResultsPage.Status.COMPLETE)); } } @@ -1145,7 +1149,7 @@ public static CachedRunningQuery retrieveFromDatabase(String id, Principal princ resultSet.getTimestamp(x++); crq.pagesize = resultSet.getInt(x++); crq.user = resultSet.getString(x++); - crq.view = CachedResultsParameters.validate(resultSet.getString(x++), true); + crq.view = CachedResultsQueryParameters.validate(resultSet.getString(x++), true); crq.tableName = resultSet.getString(x++); crq.getMetric().setQueryType(CachedRunningQuery.class); @@ -1203,9 +1207,16 @@ public static CachedRunningQuery retrieveFromDatabase(String id, Principal princ InputStream istream = optionalQueryParametersBlob.getBinaryStream(); ObjectInputStream oistream = new ObjectInputStream(istream); Object optionalQueryParametersObject = oistream.readObject(); - if (optionalQueryParametersObject != null && optionalQueryParametersObject instanceof MultiValueMap) { - MultiValueMap optionalQueryParameters = (MultiValueMap) optionalQueryParametersObject; - query.setOptionalQueryParameters(optionalQueryParameters); + if (optionalQueryParametersObject != null) { + if (optionalQueryParametersObject instanceof MultivaluedMapImpl) { + MultivaluedMapImpl optionalQueryParameters = (MultivaluedMapImpl) optionalQueryParametersObject; + query.setOptionalQueryParameters(MapUtils.toMultiValueMap(optionalQueryParameters)); + } else if (optionalQueryParametersObject instanceof MultiValueMap) { + query.setOptionalQueryParameters((MultiValueMap) optionalQueryParametersObject); + } else { + throw new IllegalArgumentException( + "Failed to convert " + optionalQueryParametersObject.getClass() + " to a " + MultiValueMap.class); + } } } catch (IOException e) { log.error(e.getMessage(), e); @@ -1222,14 +1233,14 @@ public static CachedRunningQuery retrieveFromDatabase(String id, Principal princ } if (crq.queryLogicName != null) { try { - crq.queryLogic = queryFactory.getQueryLogic(crq.queryLogicName, principal); + crq.queryLogic = queryFactory.getQueryLogic(crq.queryLogicName, (DatawavePrincipal) principal); } catch (IllegalArgumentException | CloneNotSupportedException e) { log.error(e.getMessage(), e); } } } } - } catch (SQLException e) { + } catch (SQLException | QueryException e) { log.error(e.getMessage(), e); } return crq; @@ -1260,7 +1271,7 @@ public static void setQueryFactory(QueryLogicFactory queryFactory) { } public void setView(String view) { - this.view = CachedResultsParameters.validate(view); + this.view = CachedResultsQueryParameters.validate(view); } public String getTableName() { diff --git a/web-services/cached-results/src/test/java/datawave/webservice/results/cached/CachedRunningQueryTest.java b/web-services/cached-results/src/test/java/datawave/webservice/results/cached/CachedRunningQueryTest.java index fdad0238e8d..dfd70a07887 100644 --- a/web-services/cached-results/src/test/java/datawave/webservice/results/cached/CachedRunningQueryTest.java +++ b/web-services/cached-results/src/test/java/datawave/webservice/results/cached/CachedRunningQueryTest.java @@ -19,7 +19,7 @@ import org.powermock.modules.junit4.PowerMockRunner; import org.powermock.reflect.Whitebox; -import datawave.webservice.query.cachedresults.CacheableQueryRowImpl; +import datawave.core.query.cachedresults.CacheableQueryRowImpl; @RunWith(PowerMockRunner.class) @PrepareForTest(CachedRunningQuery.class) diff --git a/web-services/client/pom.xml b/web-services/client/pom.xml index d4443660355..a60f8005a36 100644 --- a/web-services/client/pom.xml +++ b/web-services/client/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 6.14.0-SNAPSHOT + 7.0.0-SNAPSHOT datawave-ws-client jar @@ -55,6 +55,11 @@ gov.nsa.datawave.microservice dictionary-api + + gov.nsa.datawave.microservice + query-api + jboss + gov.nsa.datawave.microservice type-utils diff --git a/web-services/client/src/main/java/datawave/audit/SelectorExtractor.java b/web-services/client/src/main/java/datawave/audit/SelectorExtractor.java index 87ac5cd4892..9dcb478377b 100644 --- a/web-services/client/src/main/java/datawave/audit/SelectorExtractor.java +++ b/web-services/client/src/main/java/datawave/audit/SelectorExtractor.java @@ -2,7 +2,7 @@ import java.util.List; -import datawave.webservice.query.Query; +import datawave.microservice.query.Query; public interface SelectorExtractor { diff --git a/web-services/client/src/main/java/datawave/webservice/query/Query.java b/web-services/client/src/main/java/datawave/webservice/query/Query.java deleted file mode 100644 index 8f4bf870a51..00000000000 --- a/web-services/client/src/main/java/datawave/webservice/query/Query.java +++ /dev/null @@ -1,118 +0,0 @@ -package datawave.webservice.query; - -import java.util.Date; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.UUID; - -import javax.xml.bind.annotation.XmlAccessType; -import javax.xml.bind.annotation.XmlAccessorType; -import javax.xml.bind.annotation.XmlSeeAlso; - -import datawave.webservice.query.QueryImpl.Parameter; -import datawave.webservice.query.util.QueryUncaughtExceptionHandler; - -@XmlAccessorType(XmlAccessType.NONE) -@XmlSeeAlso(QueryImpl.class) -public abstract class Query { - - public abstract void initialize(String userDN, List dnList, String queryLogicName, QueryParameters qp, - Map> optionalQueryParameters); - - public abstract String getQueryLogicName(); - - public abstract UUID getId(); - - public abstract void setId(UUID id); - - public abstract String getQueryName(); - - public abstract void setQueryName(String queryName); - - public abstract String getUserDN(); - - public abstract void setUserDN(String userDN); - - public abstract String getQuery(); - - public abstract void setQuery(String query); - - public abstract String getQueryAuthorizations(); - - public abstract void setQueryAuthorizations(String authorizations); - - public abstract Date getExpirationDate(); - - public abstract void setExpirationDate(Date expirationDate); - - public abstract int getPagesize(); - - public abstract void setPagesize(int pagesize); - - public abstract int getPageTimeout(); - - public abstract void setPageTimeout(int pageTimeout); - - public abstract long getMaxResultsOverride(); - - public abstract void setMaxResultsOverride(long maxResults); - - public abstract boolean isMaxResultsOverridden(); - - public abstract Set getParameters(); - - public abstract void setParameters(Set params); - - public abstract void setQueryLogicName(String name); - - public abstract Date getBeginDate(); - - public abstract void setBeginDate(Date beginDate); - - public abstract Date getEndDate(); - - public abstract void setEndDate(Date endDate); - - public abstract String getSystemFrom(); - - public abstract void setSystemFrom(String systemFrom); - - public abstract Query duplicate(String newQueryName); - - public abstract Parameter findParameter(String parameter); - - public abstract void setParameters(Map parameters); - - public abstract void addParameter(String key, String val); - - public abstract void addParameters(Map parameters); - - public abstract void setDnList(List dnList); - - public abstract List getDnList(); - - public abstract QueryUncaughtExceptionHandler getUncaughtExceptionHandler(); - - public abstract void setUncaughtExceptionHandler(QueryUncaughtExceptionHandler uncaughtExceptionHandler); - - public abstract void setOwner(String owner); - - public abstract String getOwner(); - - public abstract void setColumnVisibility(String colviz); - - public abstract String getColumnVisibility(); - - public abstract Map> toMap(); - - public abstract Map getCardinalityFields(); - - public abstract void setOptionalQueryParameters(Map> optionalQueryParameters); - - public abstract Map> getOptionalQueryParameters(); - - public abstract void removeParameter(String key); - - public abstract void populateTrackingMap(Map trackingMap); -} diff --git a/web-services/client/src/main/java/datawave/webservice/query/QueryImpl.java b/web-services/client/src/main/java/datawave/webservice/query/QueryImpl.java deleted file mode 100644 index d7afae71766..00000000000 --- a/web-services/client/src/main/java/datawave/webservice/query/QueryImpl.java +++ /dev/null @@ -1,904 +0,0 @@ -package datawave.webservice.query; - -import java.io.IOException; -import java.io.Serializable; -import java.text.ParseException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.Date; -import java.util.HashMap; -import java.util.HashSet; -import java.util.List; -import java.util.Map; -import java.util.Map.Entry; -import java.util.Set; -import java.util.UUID; - -import javax.naming.InvalidNameException; -import javax.naming.ldap.LdapName; -import javax.naming.ldap.Rdn; -import javax.xml.bind.annotation.XmlAccessType; -import javax.xml.bind.annotation.XmlAccessorType; -import javax.xml.bind.annotation.XmlElement; -import javax.xml.bind.annotation.XmlRootElement; -import javax.xml.bind.annotation.XmlTransient; -import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter; - -import org.apache.commons.collections4.CollectionUtils; -import org.apache.commons.collections4.MapUtils; -import org.apache.commons.lang.builder.EqualsBuilder; -import org.apache.commons.lang.builder.HashCodeBuilder; -import org.apache.commons.lang.builder.ToStringBuilder; -import org.springframework.util.LinkedMultiValueMap; -import org.springframework.util.MultiValueMap; - -import datawave.webservice.query.util.OptionallyEncodedStringAdapter; -import datawave.webservice.query.util.QueryUncaughtExceptionHandler; -import io.protostuff.Input; -import io.protostuff.Message; -import io.protostuff.Output; -import io.protostuff.Schema; -import io.protostuff.UninitializedMessageException; - -@XmlRootElement(name = "QueryImpl") -@XmlAccessorType(XmlAccessType.NONE) -public class QueryImpl extends Query implements Serializable, Message { - - public static final String PARAMETER_SEPARATOR = ";"; - public static final String PARAMETER_NAME_VALUE_SEPARATOR = ":"; - - @XmlAccessorType(XmlAccessType.FIELD) - public static final class Parameter implements Serializable, Message { - - private static final long serialVersionUID = 2L; - - @XmlElement(name = "name") - private String parameterName; - @XmlElement(name = "value") - private String parameterValue; - - public Parameter() {} - - public Parameter(String name, String value) { - this.parameterName = name; - this.parameterValue = value; - } - - public String getParameterName() { - return parameterName; - } - - public void setParameterName(String parameterName) { - this.parameterName = parameterName; - } - - public String getParameterValue() { - return parameterValue; - } - - public void setParameterValue(String parameterValue) { - this.parameterValue = parameterValue; - } - - @Override - public String toString() { - StringBuilder sb = new StringBuilder(256); - sb.append("[name=").append(this.parameterName); - sb.append(",value=").append(this.parameterValue).append("]"); - return sb.toString(); - } - - @Override - public boolean equals(Object o) { - if (null == o) - return false; - if (!(o instanceof Parameter)) - return false; - if (this == o) - return true; - Parameter other = (Parameter) o; - if (this.getParameterName().equals(other.getParameterName()) && this.getParameterValue().equals(other.getParameterValue())) - return true; - else - return false; - } - - @Override - public int hashCode() { - return getParameterName() == null ? 0 : getParameterName().hashCode(); - } - - @XmlTransient - public static final Schema SCHEMA = new Schema() { - public Parameter newMessage() { - return new Parameter(); - } - - public Class typeClass() { - return Parameter.class; - } - - public String messageName() { - return Parameter.class.getSimpleName(); - } - - public String messageFullName() { - return Parameter.class.getName(); - } - - public boolean isInitialized(Parameter message) { - return message.parameterName != null && message.parameterValue != null; - } - - public void writeTo(Output output, Parameter message) throws IOException { - if (message.parameterName == null) - throw new UninitializedMessageException(message); - output.writeString(1, message.parameterName, false); - - if (message.parameterValue == null) - throw new UninitializedMessageException(message); - output.writeString(2, message.parameterValue, false); - } - - public void mergeFrom(Input input, Parameter message) throws IOException { - int number; - while ((number = input.readFieldNumber(this)) != 0) { - switch (number) { - case 1: - message.parameterName = input.readString(); - break; - case 2: - message.parameterValue = input.readString(); - break; - default: - input.handleUnknownField(number, this); - break; - } - } - } - - public String getFieldName(int number) { - switch (number) { - case 1: - return "parameterName"; - case 2: - return "parameterValue"; - default: - return null; - } - } - - public int getFieldNumber(String name) { - final Integer number = fieldMap.get(name); - return number == null ? 0 : number.intValue(); - } - - final java.util.HashMap fieldMap = new java.util.HashMap(); - { - fieldMap.put("parameterName", 1); - fieldMap.put("parameterValue", 2); - } - }; - - public static Schema getSchema() { - return SCHEMA; - } - - @Override - public Schema cachedSchema() { - return SCHEMA; - } - - } - - private static final long serialVersionUID = 2L; - - @XmlElement - protected String queryLogicName; - @XmlElement - protected String id; - @XmlElement - protected String queryName; - @XmlElement - protected String userDN; - @XmlElement - @XmlJavaTypeAdapter(OptionallyEncodedStringAdapter.class) - protected String query; - @XmlElement - protected Date beginDate; - @XmlElement - protected Date endDate; - @XmlElement - protected String queryAuthorizations; - @XmlElement - protected Date expirationDate; - @XmlElement - protected int pagesize; - @XmlElement - protected int pageTimeout; - @XmlElement - protected boolean isMaxResultsOverridden; - @XmlElement - protected long maxResultsOverride; - @XmlElement - protected HashSet parameters = new HashSet(); - @XmlElement - protected List dnList; - @XmlElement - protected String owner; - @XmlElement - protected String columnVisibility; - @XmlElement - protected String systemFrom; - - @XmlTransient - protected Map> optionalQueryParameters; - - protected transient QueryUncaughtExceptionHandler uncaughtExceptionHandler; - - protected transient HashMap paramLookup = new HashMap(); - - public String getQueryLogicName() { - return queryLogicName; - } - - public UUID getId() { - if (null == id) - return null; - return UUID.fromString(id); - } - - public String getQueryName() { - return queryName; - } - - public String getUserDN() { - return userDN; - } - - public String getQuery() { - return query; - } - - public String getQueryAuthorizations() { - return queryAuthorizations; - } - - public Date getExpirationDate() { - return expirationDate; - } - - public int getPagesize() { - return pagesize; - } - - public int getPageTimeout() { - return pageTimeout; - } - - public long getMaxResultsOverride() { - return maxResultsOverride; - } - - public boolean isMaxResultsOverridden() { - return isMaxResultsOverridden; - } - - public Set getParameters() { - return parameters == null ? null : Collections.unmodifiableSet(parameters); - } - - public void setQueryLogicName(String name) { - this.queryLogicName = name; - } - - public void setId(UUID id) { - this.id = id.toString(); - } - - public void setQueryName(String queryName) { - this.queryName = queryName; - } - - public void setUserDN(String userDN) { - this.userDN = userDN; - } - - public void setQuery(String query) { - this.query = query; - } - - public void setQueryAuthorizations(String queryAuthorizations) { - this.queryAuthorizations = queryAuthorizations; - } - - public void setExpirationDate(Date expirationDate) { - this.expirationDate = expirationDate; - } - - public void setMaxResultsOverride(long maxResults) { - this.maxResultsOverride = maxResults; - this.isMaxResultsOverridden = true; - } - - public void setPagesize(int pagesize) { - this.pagesize = pagesize; - } - - public void setPageTimeout(int pageTimeout) { - this.pageTimeout = pageTimeout; - } - - public void setParameters(Set parameters) { - this.parameters.clear(); - this.parameters.addAll(parameters); - this.paramLookup.clear(); - for (Parameter p : this.parameters) { - this.paramLookup.put(p.getParameterName(), p); - } - } - - public void addParameter(String key, String val) { - Parameter p = new Parameter(key, val); - this.parameters.add(p); - this.paramLookup.put(p.getParameterName(), p); - } - - public void addParameters(Map parameters) { - for (Entry p : parameters.entrySet()) { - addParameter(p.getKey(), p.getValue()); - } - } - - public void setParameters(Map parameters) { - HashSet paramObjs = new HashSet(parameters.size()); - for (Entry param : parameters.entrySet()) { - Parameter p = new Parameter(param.getKey(), param.getValue()); - paramObjs.add(p); - } - this.setParameters(paramObjs); - } - - public List getDnList() { - return dnList; - } - - public void setDnList(List dnList) { - this.dnList = dnList; - } - - public String getColumnVisibility() { - return columnVisibility; - } - - public void setColumnVisibility(String columnVisibility) { - this.columnVisibility = columnVisibility; - } - - public Date getBeginDate() { - return beginDate; - } - - public void setBeginDate(Date beginDate) { - this.beginDate = beginDate; - } - - public Date getEndDate() { - return endDate; - } - - public void setEndDate(Date endDate) { - this.endDate = endDate; - } - - @Override - public String getSystemFrom() { - return systemFrom; - } - - @Override - public void setSystemFrom(String systemFrom) { - this.systemFrom = systemFrom; - } - - public Map> getOptionalQueryParameters() { - return optionalQueryParameters; - } - - public void setOptionalQueryParameters(Map> optionalQueryParameters) { - this.optionalQueryParameters = optionalQueryParameters; - } - - @Override - public QueryImpl duplicate(String newQueryName) { - QueryImpl query = new QueryImpl(); - query.setQueryLogicName(this.getQueryLogicName()); - query.setQueryName(newQueryName); - query.setExpirationDate(this.getExpirationDate()); - query.setId(UUID.randomUUID()); - query.setPagesize(this.getPagesize()); - query.setPageTimeout(this.getPageTimeout()); - if (query.isMaxResultsOverridden()) { - query.setMaxResultsOverride(this.getMaxResultsOverride()); - } - query.setQuery(this.getQuery()); - query.setQueryAuthorizations(this.getQueryAuthorizations()); - query.setUserDN(this.getUserDN()); - query.setOwner(this.getOwner()); - query.setColumnVisibility(this.getColumnVisibility()); - query.setBeginDate(this.getBeginDate()); - query.setEndDate(this.getEndDate()); - query.setSystemFrom(this.getSystemFrom()); - if (CollectionUtils.isNotEmpty(this.parameters)) - query.setParameters(new HashSet(this.parameters)); - query.setDnList(this.dnList); - if (MapUtils.isNotEmpty(this.optionalQueryParameters)) { - Map> optionalDuplicate = new HashMap<>(); - this.optionalQueryParameters.entrySet().stream().forEach(e -> optionalDuplicate.put(e.getKey(), new ArrayList(e.getValue()))); - query.setOptionalQueryParameters(optionalDuplicate); - } - query.setUncaughtExceptionHandler(this.getUncaughtExceptionHandler()); - return query; - } - - @Override - public int hashCode() { - return new HashCodeBuilder(17, 37).append(this.getQueryLogicName()).append(this.getQueryName()).append(this.getExpirationDate()) - .append(UUID.randomUUID()).append(this.getPagesize()).append(this.getPageTimeout()) - .append(this.isMaxResultsOverridden() ? this.getMaxResultsOverride() : 0).append(this.getQuery()).append(this.getQueryAuthorizations()) - .append(this.getUserDN()).append(this.getOwner()).append(this.getParameters()).append(this.getDnList()) - .append(this.getColumnVisibility()).append(this.getBeginDate()).append(this.getEndDate()).append(this.getSystemFrom()).toHashCode(); - } - - @Override - public String toString() { - ToStringBuilder tsb = new ToStringBuilder(this); - tsb.append("queryLogicName", this.getQueryLogicName()); - tsb.append("queryName", this.getQueryName()); - tsb.append("expirationDate", this.getExpirationDate()); - tsb.append("uuid", this.getId()); - tsb.append("pagesize", this.getPagesize()); - tsb.append("pageTimeout", this.getPageTimeout()); - tsb.append("maxResultsOverride", (this.isMaxResultsOverridden() ? this.getMaxResultsOverride() : "NA")); - tsb.append("query", this.getQuery()); - tsb.append("queryAuthorizations", this.getQueryAuthorizations()); - tsb.append("userDN", this.getUserDN()); - tsb.append("owner", this.getOwner()); - tsb.append("parameters", this.getParameters()); - tsb.append("dnList", this.getDnList()); - tsb.append("columnVisibility", this.getColumnVisibility()); - tsb.append("beginDate", this.getBeginDate()); - tsb.append("endDate", this.getEndDate()); - tsb.append("systemFrom", this.getSystemFrom()); - - return tsb.toString(); - } - - @Override - public boolean equals(Object o) { - if (null == o) - return false; - if (!(o instanceof QueryImpl)) - return false; - if (this == o) - return true; - QueryImpl other = (QueryImpl) o; - EqualsBuilder eb = new EqualsBuilder(); - eb.append(this.getQueryLogicName(), other.getQueryLogicName()); - eb.append(this.getId(), other.getId()); - eb.append(this.getQueryName(), other.getQueryName()); - eb.append(this.getUserDN(), other.getUserDN()); - eb.append(this.getOwner(), other.getOwner()); - eb.append(this.getQuery(), other.getQuery()); - eb.append(this.getQueryAuthorizations(), other.getQueryAuthorizations()); - eb.append(this.getExpirationDate(), other.getExpirationDate()); - eb.append(this.getPagesize(), other.getPagesize()); - eb.append(this.getPageTimeout(), other.getPageTimeout()); - eb.append(this.isMaxResultsOverridden(), other.isMaxResultsOverridden()); - if (this.isMaxResultsOverridden()) { - eb.append(this.getMaxResultsOverride(), other.getMaxResultsOverride()); - } - eb.append(this.getColumnVisibility(), other.getColumnVisibility()); - eb.append(this.getBeginDate(), other.getBeginDate()); - eb.append(this.getEndDate(), other.getEndDate()); - eb.append(this.getSystemFrom(), other.getSystemFrom()); - eb.append(this.getDnList(), other.getDnList()); - eb.append(this.getParameters(), other.getParameters()); - return eb.isEquals(); - } - - public Parameter findParameter(String parameter) { - if (!paramLookup.containsKey(parameter)) { - return new Parameter(parameter, ""); - } else { - return paramLookup.get(parameter); - } - } - - @XmlTransient - private static final Schema SCHEMA = new Schema() { - public QueryImpl newMessage() { - return new QueryImpl(); - } - - public Class typeClass() { - return QueryImpl.class; - } - - public String messageName() { - return QueryImpl.class.getSimpleName(); - } - - public String messageFullName() { - return QueryImpl.class.getName(); - } - - public boolean isInitialized(QueryImpl message) { - return message.queryLogicName != null && message.id != null && message.userDN != null && message.query != null - && message.queryAuthorizations != null && message.expirationDate != null && message.pagesize > 0 && message.pageTimeout != 0; - } - - public void writeTo(Output output, QueryImpl message) throws IOException { - if (message.queryLogicName == null) - throw new UninitializedMessageException(message, SCHEMA); - output.writeString(1, message.queryLogicName, false); - - if (message.id == null) - throw new UninitializedMessageException(message, SCHEMA); - output.writeString(2, message.id, false); - - if (message.queryName != null) - output.writeString(3, message.queryName, false); - - if (message.userDN == null) - throw new UninitializedMessageException(message, SCHEMA); - output.writeString(4, message.userDN, false); - - if (message.query == null) - throw new UninitializedMessageException(message, SCHEMA); - output.writeString(5, message.query, false); - - if (message.beginDate != null) - output.writeInt64(6, message.beginDate.getTime(), false); - - if (message.endDate != null) - output.writeInt64(7, message.endDate.getTime(), false); - - if (message.queryAuthorizations == null) - throw new UninitializedMessageException(message, SCHEMA); - output.writeString(8, message.queryAuthorizations, false); - - if (message.expirationDate == null) - throw new UninitializedMessageException(message, SCHEMA); - output.writeInt64(9, message.expirationDate.getTime(), false); - - if (message.pagesize <= 0) - throw new UninitializedMessageException(message, SCHEMA); - output.writeUInt32(10, message.pagesize, false); - - if (message.parameters != null) { - for (Parameter p : message.parameters) { - output.writeObject(11, p, Parameter.SCHEMA, true); - } - } - - if (message.owner == null) - throw new UninitializedMessageException(message, SCHEMA); - output.writeString(12, message.owner, false); - - if (null != message.dnList) { - for (String dn : message.dnList) - output.writeString(13, dn, true); - } - - if (message.columnVisibility != null) { - output.writeString(14, message.columnVisibility, false); - } - - if (message.pageTimeout == 0) - throw new UninitializedMessageException(message, SCHEMA); - output.writeUInt32(15, message.pageTimeout, false); - - if (message.systemFrom != null) - output.writeString(16, message.systemFrom, false); - } - - public void mergeFrom(Input input, QueryImpl message) throws IOException { - int number; - while ((number = input.readFieldNumber(this)) != 0) { - switch (number) { - case 1: - message.queryLogicName = input.readString(); - break; - case 2: - message.id = input.readString(); - break; - case 3: - message.queryName = input.readString(); - break; - case 4: - message.userDN = input.readString(); - break; - case 5: - message.query = input.readString(); - break; - - case 6: - message.beginDate = new Date(input.readInt64()); - break; - case 7: - message.endDate = new Date(input.readInt64()); - break; - case 8: - message.queryAuthorizations = input.readString(); - break; - case 9: - message.expirationDate = new Date(input.readInt64()); - break; - case 10: - message.pagesize = input.readUInt32(); - break; - case 11: - if (message.parameters == null) - message.parameters = new HashSet(); - Parameter p = input.mergeObject(null, Parameter.SCHEMA); - message.addParameter(p.getParameterName(), p.getParameterValue()); - break; - case 12: - message.owner = input.readString(); - break; - case 13: - if (null == message.dnList) - message.dnList = new ArrayList(); - message.dnList.add(input.readString()); - break; - case 14: - message.columnVisibility = input.readString(); - break; - case 15: - message.pageTimeout = input.readUInt32(); - break; - case 16: - message.systemFrom = input.readString(); - default: - input.handleUnknownField(number, this); - break; - } - } - } - - public String getFieldName(int number) { - switch (number) { - case 1: - return "queryLogicName"; - case 2: - return "id"; - case 3: - return "queryName"; - case 4: - return "userDN"; - case 5: - return "query"; - case 6: - return "beginDate"; - case 7: - return "endDate"; - case 8: - return "queryAuthorizations"; - case 9: - return "expirationDate"; - case 10: - return "pagesize"; - case 11: - return "parameters"; - case 12: - return "owner"; - case 13: - return "dnList"; - case 14: - return "columnVisibility"; - case 15: - return "pageTimeout"; - case 16: - return "systemFrom"; - default: - return null; - } - } - - public int getFieldNumber(String name) { - final Integer number = fieldMap.get(name); - return number == null ? 0 : number.intValue(); - } - - final java.util.HashMap fieldMap = new java.util.HashMap(); - { - fieldMap.put("queryLogicName", 1); - fieldMap.put("id", 2); - fieldMap.put("queryName", 3); - fieldMap.put("userDN", 4); - fieldMap.put("query", 5); - fieldMap.put("beginDate", 6); - fieldMap.put("endDate", 7); - fieldMap.put("queryAuthorizations", 8); - fieldMap.put("expirationDate", 9); - fieldMap.put("pagesize", 10); - fieldMap.put("parameters", 11); - fieldMap.put("owner", 12); - fieldMap.put("dnList", 13); - fieldMap.put("columnVisibility", 14); - fieldMap.put("pageTimeout", 15); - fieldMap.put("systemFrom", 16); - } - }; - - public QueryUncaughtExceptionHandler getUncaughtExceptionHandler() { - return this.uncaughtExceptionHandler; - } - - public void setUncaughtExceptionHandler(QueryUncaughtExceptionHandler uncaughtExceptionHandler) { - this.uncaughtExceptionHandler = uncaughtExceptionHandler; - } - - public void initialize(String userDN, List dnList, String queryLogicName, QueryParameters qp, Map> optionalQueryParameters) { - this.dnList = dnList; - this.expirationDate = qp.getExpirationDate(); - this.id = UUID.randomUUID().toString(); - this.pagesize = qp.getPagesize(); - this.pageTimeout = qp.getPageTimeout(); - this.query = qp.getQuery(); - this.queryAuthorizations = qp.getAuths(); - this.queryLogicName = queryLogicName; - this.queryName = qp.getQueryName(); - this.userDN = userDN; - this.owner = getOwner(this.userDN); - this.beginDate = qp.getBeginDate(); - this.endDate = qp.getEndDate(); - this.systemFrom = qp.getSystemFrom(); - if (optionalQueryParameters != null) { - for (Entry> entry : optionalQueryParameters.entrySet()) { - if (entry.getValue().get(0) != null) { - this.addParameter(entry.getKey(), entry.getValue().get(0)); - } - } - } - } - - private static String getCommonName(String dn) { - String[] comps = getComponents(dn, "CN"); - return comps.length >= 1 ? comps[0] : null; - } - - private static String[] getComponents(String dn, String componentName) { - componentName = componentName.toUpperCase(); - ArrayList components = new ArrayList(); - try { - LdapName name = new LdapName(dn); - for (Rdn rdn : name.getRdns()) { - if (componentName.equals(rdn.getType().toUpperCase())) { - components.add(String.valueOf(rdn.getValue())); - } - } - } catch (InvalidNameException e) { - // ignore -- invalid name, so can't find components - } - return components.toArray(new String[0]); - } - - public static String getOwner(String dn) { - String sid = null; - if (dn != null) { - String cn = getCommonName(dn); - if (cn == null) - cn = dn; - sid = cn; - int idx = cn.lastIndexOf(' '); - if (idx >= 0) - sid = cn.substring(idx + 1); - } - return sid; - } - - public void setOwner(String owner) { - this.owner = owner; - } - - public String getOwner() { - return this.owner; - } - - public Map> toMap() { - // TODO: missing variables uuid and owner -- not going into map - MultiValueMap p = new LinkedMultiValueMap<>(); - if (this.queryAuthorizations != null) { - p.set(QueryParameters.QUERY_AUTHORIZATIONS, this.queryAuthorizations); - } - if (this.expirationDate != null) { - try { - p.set(QueryParameters.QUERY_EXPIRATION, QueryParametersImpl.formatDate(this.expirationDate)); - } catch (ParseException e) { - throw new RuntimeException("Error formatting date", e); - } - } - if (this.queryName != null) { - p.set(QueryParameters.QUERY_NAME, this.queryName); - } - if (this.queryLogicName != null) { - p.set(QueryParameters.QUERY_LOGIC_NAME, this.queryLogicName); - } - // no null check on primitives - p.set(QueryParameters.QUERY_PAGESIZE, Integer.toString(this.pagesize)); - if (this.query != null) { - p.set(QueryParameters.QUERY_STRING, this.query); - } - if (this.userDN != null) { - p.set("userDN", this.userDN); - } - if (this.dnList != null) { - p.set("dnList", this.dnList.toString()); - } - if (this.columnVisibility != null) { - p.set("columnVisibility", this.columnVisibility); - } - if (this.beginDate != null) { - try { - p.set(QueryParameters.QUERY_BEGIN, QueryParametersImpl.formatDate(this.beginDate)); - } catch (ParseException e) { - throw new RuntimeException("Error formatting date", e); - } - } - if (this.endDate != null) { - try { - p.set(QueryParameters.QUERY_END, QueryParametersImpl.formatDate(this.endDate)); - } catch (ParseException e) { - throw new RuntimeException("Error formatting date", e); - } - } - p.set(QueryParameters.QUERY_PAGETIMEOUT, Integer.toString(this.pageTimeout)); - - if (this.systemFrom != null) { - p.set("systemFrom", this.systemFrom); - } - - if (this.parameters != null) { - for (Parameter parameter : parameters) { - p.set(parameter.getParameterName(), parameter.getParameterValue()); - } - } - return p; - } - - @Override - public Map getCardinalityFields() { - Map cardinalityFields = new HashMap(); - cardinalityFields.put("QUERY_USER", getOwner()); - cardinalityFields.put("QUERY_LOGIC_NAME", getQueryLogicName()); - return cardinalityFields; - } - - @Override - public Schema cachedSchema() { - return SCHEMA; - } - - @Override - public void removeParameter(String key) { - this.parameters.remove(paramLookup.get(key)); - this.paramLookup.remove(key); - } - - @Override - public void populateTrackingMap(Map trackingMap) { - if (trackingMap != null) { - if (this.owner != null) { - trackingMap.put("query.user", this.owner); - } - if (this.id != null) { - trackingMap.put("query.id", this.id); - } - if (this.query != null) { - trackingMap.put("query.query", this.query); - } - } - } -} diff --git a/web-services/client/src/main/java/datawave/webservice/query/QueryParameters.java b/web-services/client/src/main/java/datawave/webservice/query/QueryParameters.java deleted file mode 100644 index 7363ea6cb86..00000000000 --- a/web-services/client/src/main/java/datawave/webservice/query/QueryParameters.java +++ /dev/null @@ -1,97 +0,0 @@ -package datawave.webservice.query; - -import java.util.Date; -import java.util.List; -import java.util.Map; - -import datawave.validation.ParameterValidator; - -/** - * QueryParameters passed in from a client, they are validated and passed through to the iterator stack as QueryOptions. - * - */ -public interface QueryParameters extends ParameterValidator { - - String QUERY_STRING = "query"; - String QUERY_NAME = "queryName"; - String QUERY_PERSISTENCE = "persistence"; - String QUERY_PAGESIZE = "pagesize"; - String QUERY_PAGETIMEOUT = "pageTimeout"; - String QUERY_MAX_RESULTS_OVERRIDE = "max.results.override"; - String QUERY_AUTHORIZATIONS = "auths"; - String QUERY_EXPIRATION = "expiration"; - String QUERY_TRACE = "trace"; - String QUERY_BEGIN = "begin"; - String QUERY_END = "end"; - String QUERY_PARAMS = "params"; - String QUERY_VISIBILITY = "columnVisibility"; - String QUERY_LOGIC_NAME = "logicName"; - String QUERY_SYSTEM_FROM = "systemFrom"; - - String getQuery(); - - void setQuery(String query); - - String getQueryName(); - - void setQueryName(String queryName); - - QueryPersistence getPersistenceMode(); - - void setPersistenceMode(QueryPersistence persistenceMode); - - int getPagesize(); - - void setPagesize(int pagesize); - - int getPageTimeout(); - - void setPageTimeout(int pageTimeout); - - long getMaxResultsOverride(); - - void setMaxResultsOverride(long maxResults); - - boolean isMaxResultsOverridden(); - - String getAuths(); - - void setAuths(String auths); - - Date getExpirationDate(); - - void setExpirationDate(Date expirationDate); - - boolean isTrace(); - - void setTrace(boolean trace); - - Date getBeginDate(); - - Date getEndDate(); - - void setBeginDate(Date beginDate); - - void setEndDate(Date endDate); - - String getVisibility(); - - void setVisibility(String visibility); - - String getLogicName(); - - void setLogicName(String logicName); - - String getSystemFrom(); - - void setSystemFrom(String systemFrom); - - Map> getRequestHeaders(); - - void setRequestHeaders(Map> requestHeaders); - - Map> getUnknownParameters(Map> allQueryParameters); - - void clear(); - -} diff --git a/web-services/client/src/main/java/datawave/webservice/query/QueryParametersImpl.java b/web-services/client/src/main/java/datawave/webservice/query/QueryParametersImpl.java deleted file mode 100644 index 3895aa81e0d..00000000000 --- a/web-services/client/src/main/java/datawave/webservice/query/QueryParametersImpl.java +++ /dev/null @@ -1,552 +0,0 @@ -package datawave.webservice.query; - -import java.text.ParseException; -import java.text.SimpleDateFormat; -import java.util.Arrays; -import java.util.Calendar; -import java.util.Date; -import java.util.LinkedHashMap; -import java.util.List; -import java.util.Map; - -import org.apache.commons.lang.StringUtils; -import org.apache.commons.lang.time.DateUtils; -import org.springframework.util.LinkedMultiValueMap; -import org.springframework.util.MultiValueMap; - -import com.google.common.base.Preconditions; -import com.google.common.base.Splitter; - -public class QueryParametersImpl implements QueryParameters { - - private static final List KNOWN_PARAMS = Arrays.asList(QUERY_STRING, QUERY_NAME, QUERY_PERSISTENCE, QUERY_PAGESIZE, QUERY_PAGETIMEOUT, - QUERY_AUTHORIZATIONS, QUERY_EXPIRATION, QUERY_TRACE, QUERY_BEGIN, QUERY_END, QUERY_VISIBILITY, QUERY_LOGIC_NAME, QUERY_MAX_RESULTS_OVERRIDE, - QUERY_SYSTEM_FROM); - - protected String query; - protected String queryName; - protected QueryPersistence persistenceMode; - protected int pagesize; - protected int pageTimeout; - protected boolean isMaxResultsOverridden; - protected long maxResultsOverride; - protected String auths; - protected Date expirationDate; - protected boolean trace; - protected Date beginDate; - protected Date endDate; - protected String visibility; - protected String logicName; - protected String systemFrom; - protected Map> requestHeaders; - - public QueryParametersImpl() { - clear(); - } - - /** - * Configure internal variables via the incoming parameter map, performing validation of values. - * - * QueryParameters are considered valid if the following required parameters are present. - *
      - *
    1. 'query'
    2. - *
    3. 'queryName'
    4. - *
    5. 'persistence'
    6. - *
    7. 'auths'
    8. - *
    9. 'expiration'
    10. - *
    11. 'queryLogicName'
    12. - *
    - * - * QueryParameters may also include the following optional parameters. - *
      - *
    1. 'pagesize'
    2. - *
    3. 'pageTimeout'
    4. - *
    5. 'begin'
    6. - *
    7. 'end'
    8. - *
    - * - * @param parameters - * - a Map of QueryParameters - * @throws IllegalArgumentException - * when a bad argument is encountered - */ - public void validate(Map> parameters) throws IllegalArgumentException { - for (String param : KNOWN_PARAMS) { - List values = parameters.get(param); - if (null == values) { - continue; - } - if (values.isEmpty() || values.size() > 1) { - throw new IllegalArgumentException("Known parameter [" + param + "] only accepts one value"); - } - if (QUERY_STRING.equals(param)) { - this.query = values.get(0); - } else if (QUERY_NAME.equals(param)) { - this.queryName = values.get(0); - } else if (QUERY_PERSISTENCE.equals(param)) { - this.persistenceMode = QueryPersistence.valueOf(values.get(0)); - } else if (QUERY_PAGESIZE.equals(param)) { - this.pagesize = Integer.parseInt(values.get(0)); - } else if (QUERY_PAGETIMEOUT.equals(param)) { - this.pageTimeout = Integer.parseInt(values.get(0)); - } else if (QUERY_MAX_RESULTS_OVERRIDE.equals(param)) { - this.maxResultsOverride = Long.parseLong(values.get(0)); - this.isMaxResultsOverridden = true; - } else if (QUERY_AUTHORIZATIONS.equals(param)) { - // ensure that auths are comma separated with no empty values or spaces - Splitter splitter = Splitter.on(',').omitEmptyStrings().trimResults(); - this.auths = StringUtils.join(splitter.splitToList(values.get(0)), ","); - } else if (QUERY_EXPIRATION.equals(param)) { - try { - this.expirationDate = parseEndDate(values.get(0)); - } catch (ParseException e) { - throw new IllegalArgumentException("Error parsing expiration date", e); - } - } else if (QUERY_TRACE.equals(param)) { - this.trace = Boolean.parseBoolean(values.get(0)); - } else if (QUERY_BEGIN.equals(param)) { - try { - this.beginDate = values.get(0) == null ? null : parseStartDate(values.get(0)); - } catch (ParseException e) { - throw new IllegalArgumentException("Error parsing begin date", e); - } - } else if (QUERY_END.equals(param)) { - try { - this.endDate = values.get(0) == null ? null : parseEndDate(values.get(0)); - } catch (ParseException e) { - throw new IllegalArgumentException("Error parsing end date", e); - } - } else if (QUERY_VISIBILITY.equals(param)) { - this.visibility = values.get(0); - } else if (QUERY_LOGIC_NAME.equals(param)) { - this.logicName = values.get(0); - } else if (QUERY_SYSTEM_FROM.equals(param)) { - this.systemFrom = values.get(0); - } else { - throw new IllegalArgumentException("Unknown condition."); - } - } - - try { - Preconditions.checkNotNull(this.query, "QueryParameter 'query' cannot be null"); - Preconditions.checkNotNull(this.queryName, "QueryParameter 'queryName' cannot be null"); - Preconditions.checkNotNull(this.persistenceMode, "QueryParameter 'persistence' mode cannot be null"); - Preconditions.checkNotNull(this.auths, "QueryParameter 'auths' cannot be null"); - Preconditions.checkNotNull(this.expirationDate, "QueryParameter 'expirationDate' cannot be null"); - Preconditions.checkNotNull(this.logicName, "QueryParameter 'logicName' cannot be null"); - } catch (NullPointerException e) { - throw new IllegalArgumentException("Missing one or more required QueryParameters", e); - } - } - - @Override - public boolean equals(Object o) { - if (this == o) - return true; - if (o == null || getClass() != o.getClass()) - return false; - - QueryParametersImpl that = (QueryParametersImpl) o; - - if (pagesize != that.pagesize) - return false; - if (pageTimeout != that.pageTimeout) - return false; - if (isMaxResultsOverridden != that.isMaxResultsOverridden) - return false; - if (isMaxResultsOverridden) { - if (maxResultsOverride != that.maxResultsOverride) - return false; - } - if (trace != that.trace) - return false; - if (!auths.equals(that.auths)) - return false; - if (beginDate != null ? !beginDate.equals(that.beginDate) : that.beginDate != null) - return false; - if (visibility != null ? !visibility.equals(that.visibility) : that.visibility != null) - return false; - if (endDate != null ? !endDate.equals(that.endDate) : that.endDate != null) - return false; - if (!expirationDate.equals(that.expirationDate)) - return false; - if (logicName != null ? !logicName.equals(that.logicName) : that.logicName != null) - return false; - if (persistenceMode != that.persistenceMode) - return false; - if (!query.equals(that.query)) - return false; - if (!queryName.equals(that.queryName)) - return false; - if (requestHeaders != null ? !requestHeaders.equals(that.requestHeaders) : that.requestHeaders != null) - return false; - if (systemFrom != null ? !systemFrom.equals(that.systemFrom) : that.systemFrom != null) - return false; - return true; - } - - @Override - public int hashCode() { - int result = query.hashCode(); - result = 31 * result + queryName.hashCode(); - result = 31 * result + persistenceMode.hashCode(); - result = 31 * result + pagesize; - result = 31 * result + pageTimeout; - if (isMaxResultsOverridden) { - result = 31 * result + (int) (maxResultsOverride); - } - result = 31 * result + auths.hashCode(); - result = 31 * result + expirationDate.hashCode(); - result = 31 * result + (trace ? 1 : 0); - result = 31 * result + (beginDate != null ? beginDate.hashCode() : 0); - result = 31 * result + (endDate != null ? endDate.hashCode() : 0); - result = 31 * result + (visibility != null ? visibility.hashCode() : 0); - result = 31 * result + (logicName != null ? logicName.hashCode() : 0); - result = 31 * result + (requestHeaders != null ? requestHeaders.hashCode() : 0); - result = 31 * result + (systemFrom != null ? systemFrom.hashCode() : 0); - return result; - } - - public static synchronized String formatDate(Date d) throws ParseException { - String formatPattern = "yyyyMMdd HHmmss.SSS"; - SimpleDateFormat formatter = new SimpleDateFormat(formatPattern); - formatter.setLenient(false); - return formatter.format(d); - } - - protected static final String defaultStartTime = "000000"; - protected static final String defaultStartMillisec = "000"; - protected static final String defaultEndTime = "235959"; - protected static final String defaultEndMillisec = "999"; - protected static final String formatPattern = "yyyyMMdd HHmmss.SSS"; - private static final SimpleDateFormat dateFormat; - - static { - dateFormat = new SimpleDateFormat(formatPattern); - dateFormat.setLenient(false); - } - - public static Date parseStartDate(String s) throws ParseException { - return parseDate(s, defaultStartTime, defaultStartMillisec); - } - - public static Date parseEndDate(String s) throws ParseException { - return parseDate(s, defaultEndTime, defaultEndMillisec); - } - - public static synchronized Date parseDate(String s, String defaultTime, String defaultMillisec) throws ParseException { - Date d; - ParseException e = null; - synchronized (QueryParametersImpl.dateFormat) { - String str = s; - if (str.equals("+24Hours")) { - d = DateUtils.addDays(new Date(), 1); - } else { - if (StringUtils.isNotBlank(defaultTime) && !str.contains(" ")) { - str = str + " " + defaultTime; - } - - if (StringUtils.isNotBlank(defaultMillisec) && !str.contains(".")) { - str = str + "." + defaultMillisec; - } - - try { - d = QueryParametersImpl.dateFormat.parse(str); - // if any time value in HHmmss was set either by default or by the user - // then we want to include ALL of that second by setting the milliseconds to 999 - if (DateUtils.getFragmentInMilliseconds(d, Calendar.HOUR_OF_DAY) > 0) { - DateUtils.setMilliseconds(d, 999); - } - } catch (ParseException pe) { - throw new RuntimeException("Unable to parse date " + str + " with format " + formatPattern, e); - } - } - } - return d; - } - - /** - * Convenience method to generate a {@code Map>} from the specified arguments. If an argument is null, it's associated parameter name - * (key) will not be added to the map, which is why Integer and Boolean wrappers are used for greater flexibility. - * - * The 'parameters' argument will not be parsed, so its internal elements will not be placed into the map. If non-null, the 'parameters' value will be - * mapped directly to the QUERY_PARAMS key. - * - * No attempt is made to determine whether or not the given arguments constitute a valid query. If validation is desired, see the {@link #validate(Map)} - * method - * - * @param queryLogicName - * - name of QueryLogic to use - * @param query - * - the raw query string - * @param queryName - * - client-supplied name of query - * @param queryVisibility - * - query - * @param beginDate - * - start date - * @param endDate - * - end date - * @param queryAuthorizations - * - what auths the query should run with - * @param expirationDate - * - expiration date - * @param pagesize - * - page size - * @param pageTimeout - * - page timeout - * @param maxResultsOverride - * - max results override - * @param persistenceMode - * - persistence mode - * @param systemFrom - * - system from - * @param parameters - * - additional parameters passed in as map - * @param trace - * - trace flag - * @return parameter map - * @throws ParseException - * on date parse/format error - */ - public static Map> paramsToMap(String queryLogicName, String query, String queryName, String queryVisibility, Date beginDate, - Date endDate, String queryAuthorizations, Date expirationDate, Integer pagesize, Integer pageTimeout, Long maxResultsOverride, - QueryPersistence persistenceMode, String systemFrom, String parameters, Boolean trace) throws ParseException { - - MultiValueMap p = new LinkedMultiValueMap<>(); - if (queryLogicName != null) { - p.set(QueryParameters.QUERY_LOGIC_NAME, queryLogicName); - } - if (query != null) { - p.set(QueryParameters.QUERY_STRING, query); - } - if (queryName != null) { - p.set(QueryParameters.QUERY_NAME, queryName); - } - if (queryVisibility != null) { - p.set(QueryParameters.QUERY_VISIBILITY, queryVisibility); - } - if (beginDate != null) { - p.set(QueryParameters.QUERY_BEGIN, formatDate(beginDate)); - } - if (endDate != null) { - p.set(QueryParameters.QUERY_END, formatDate(endDate)); - } - if (queryAuthorizations != null) { - // ensure that auths are comma separated with no empty values or spaces - Splitter splitter = Splitter.on(',').omitEmptyStrings().trimResults(); - p.set(QueryParameters.QUERY_AUTHORIZATIONS, StringUtils.join(splitter.splitToList(queryAuthorizations), ",")); - } - if (expirationDate != null) { - p.set(QueryParameters.QUERY_EXPIRATION, formatDate(expirationDate)); - } - if (pagesize != null) { - p.set(QueryParameters.QUERY_PAGESIZE, pagesize.toString()); - } - if (pageTimeout != null) { - p.set(QueryParameters.QUERY_PAGETIMEOUT, pageTimeout.toString()); - } - if (maxResultsOverride != null) { - p.set(QueryParameters.QUERY_MAX_RESULTS_OVERRIDE, maxResultsOverride.toString()); - } - if (persistenceMode != null) { - p.set(QueryParameters.QUERY_PERSISTENCE, persistenceMode.name()); - } - if (trace != null) { - p.set(QueryParameters.QUERY_TRACE, trace.toString()); - } - if (systemFrom != null) { - p.set(QueryParameters.QUERY_SYSTEM_FROM, systemFrom); - } - if (parameters != null) { - p.set(QueryParameters.QUERY_PARAMS, parameters); - } - - return p; - } - - @Override - public String getQuery() { - return query; - } - - @Override - public void setQuery(String query) { - this.query = query; - } - - @Override - public String getQueryName() { - return queryName; - } - - @Override - public void setQueryName(String queryName) { - this.queryName = queryName; - } - - @Override - public QueryPersistence getPersistenceMode() { - return persistenceMode; - } - - @Override - public void setPersistenceMode(QueryPersistence persistenceMode) { - this.persistenceMode = persistenceMode; - } - - @Override - public int getPagesize() { - return pagesize; - } - - @Override - public void setPagesize(int pagesize) { - this.pagesize = pagesize; - } - - @Override - public int getPageTimeout() { - return pageTimeout; - } - - @Override - public void setPageTimeout(int pageTimeout) { - this.pageTimeout = pageTimeout; - } - - @Override - public long getMaxResultsOverride() { - return maxResultsOverride; - } - - @Override - public void setMaxResultsOverride(long maxResultsOverride) { - this.maxResultsOverride = maxResultsOverride; - } - - @Override - public boolean isMaxResultsOverridden() { - return this.isMaxResultsOverridden; - } - - @Override - public String getAuths() { - return auths; - } - - @Override - public void setAuths(String auths) { - this.auths = auths; - } - - @Override - public Date getExpirationDate() { - return expirationDate; - } - - @Override - public void setExpirationDate(Date expirationDate) { - this.expirationDate = expirationDate; - } - - @Override - public boolean isTrace() { - return trace; - } - - @Override - public void setTrace(boolean trace) { - this.trace = trace; - } - - @Override - public Date getBeginDate() { - return beginDate; - } - - @Override - public Date getEndDate() { - return endDate; - } - - @Override - public void setBeginDate(Date beginDate) { - this.beginDate = beginDate; - } - - @Override - public void setEndDate(Date endDate) { - this.endDate = endDate; - } - - @Override - public String getVisibility() { - return visibility; - } - - @Override - public void setVisibility(String visibility) { - this.visibility = visibility; - } - - @Override - public String getLogicName() { - return logicName; - } - - @Override - public void setLogicName(String logicName) { - this.logicName = logicName; - } - - @Override - public String getSystemFrom() { - return systemFrom; - } - - @Override - public void setSystemFrom(String systemFrom) { - this.systemFrom = systemFrom; - } - - @Override - public Map> getRequestHeaders() { - return requestHeaders; - } - - @Override - public void setRequestHeaders(Map> requestHeaders) { - this.requestHeaders = requestHeaders; - } - - @Override - public Map> getUnknownParameters(Map> allQueryParameters) { - Map> p = new LinkedHashMap<>(); - for (String key : allQueryParameters.keySet()) { - if (!KNOWN_PARAMS.contains(key)) { - p.put(key, allQueryParameters.get(key)); - } - } - return p; - } - - @Override - public void clear() { - this.query = null; - this.queryName = null; - this.persistenceMode = QueryPersistence.TRANSIENT; - this.pagesize = 10; - this.pageTimeout = -1; - this.isMaxResultsOverridden = false; - this.auths = null; - this.expirationDate = DateUtils.addDays(new Date(), 1); - this.trace = false; - this.beginDate = null; - this.endDate = null; - this.visibility = null; - this.logicName = null; - this.requestHeaders = null; - this.systemFrom = null; - } -} diff --git a/web-services/client/src/main/java/datawave/webservice/query/QueryPersistence.java b/web-services/client/src/main/java/datawave/webservice/query/QueryPersistence.java deleted file mode 100644 index 08506ec9f06..00000000000 --- a/web-services/client/src/main/java/datawave/webservice/query/QueryPersistence.java +++ /dev/null @@ -1,7 +0,0 @@ -package datawave.webservice.query; - -public enum QueryPersistence { - - PERSISTENT, TRANSIENT; - -} diff --git a/web-services/client/src/main/java/datawave/webservice/query/cachedresults/CacheableQueryRow.java b/web-services/client/src/main/java/datawave/webservice/query/cachedresults/CacheableQueryRow.java index 4139467b8e0..b3157341f62 100644 --- a/web-services/client/src/main/java/datawave/webservice/query/cachedresults/CacheableQueryRow.java +++ b/web-services/client/src/main/java/datawave/webservice/query/cachedresults/CacheableQueryRow.java @@ -7,6 +7,7 @@ import java.util.Map; import java.util.Set; +import datawave.marking.MarkingFunctions; import datawave.webservice.query.result.event.HasMarkings; import datawave.webservice.query.util.TypedValue; @@ -26,6 +27,8 @@ public abstract class CacheableQueryRow implements HasMarkings { fixedColumnSet.add("_column_timestamps_"); } + protected MarkingFunctions markingFunctions; + public static Set getFixedColumnSet() { return Collections.unmodifiableSet(fixedColumnSet); } @@ -100,4 +103,12 @@ protected static String createColumnList(Set columnNames, Map columnVisibilityMap); public abstract void setColumnTimestampMap(Map parseColumnTimestamps); + + public MarkingFunctions getMarkingFunctions() { + return markingFunctions; + } + + public void setMarkingFunctions(MarkingFunctions markingFunctions) { + this.markingFunctions = markingFunctions; + } } diff --git a/web-services/client/src/main/java/datawave/webservice/query/result/event/DefaultEvent.java b/web-services/client/src/main/java/datawave/webservice/query/result/event/DefaultEvent.java index a0419689d04..65fe18277d0 100644 --- a/web-services/client/src/main/java/datawave/webservice/query/result/event/DefaultEvent.java +++ b/web-services/client/src/main/java/datawave/webservice/query/result/event/DefaultEvent.java @@ -55,6 +55,15 @@ public String toString() { return getMarkings() + ": " + (this.fields != null ? this.fields.toString() : "fields are null"); } + @Override + public Map getMarkings() { + if (markings != null) { + return markings; + } else { + return super.getMarkings(); + } + } + public void setMarkings(Map markings) { if (null != markings) { this.markings = new HashMap<>(markings); diff --git a/web-services/client/src/main/java/datawave/webservice/query/result/event/DefaultField.java b/web-services/client/src/main/java/datawave/webservice/query/result/event/DefaultField.java index 704dcc2a6f8..e059c49e7e0 100644 --- a/web-services/client/src/main/java/datawave/webservice/query/result/event/DefaultField.java +++ b/web-services/client/src/main/java/datawave/webservice/query/result/event/DefaultField.java @@ -17,6 +17,9 @@ import org.apache.commons.lang.builder.EqualsBuilder; import org.apache.commons.lang.builder.HashCodeBuilder; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; + import datawave.data.type.Type; import datawave.webservice.query.util.TypedValue; import datawave.webservice.xml.util.StringMapAdapter; @@ -81,34 +84,63 @@ public Map getMarkings() { return markings; } + public void setColumnVisibility(String columnVisibility) { + this.columnVisibility = columnVisibility; + } + + public String getColumnVisibility() { + return columnVisibility; + } + + public void setTimestamp(Long timestamp) { + this.timestamp = timestamp; + } + public Long getTimestamp() { return timestamp; } - public String getValueString() { - if (value.getValue() instanceof Type) { - return ((Type) value.getValue()).getDelegate().toString(); - } else if (value.getValue() instanceof String) { - return (String) value.getValue(); - } else { - return value.getValue().toString(); - } + public void setName(String name) { + this.name = name; + } + + public String getName() { + return name; + } + + public void setTypedValue(TypedValue value) { + this.value = value; } public TypedValue getTypedValue() { return this.value; } - public Object getValueOfTypedValue() { - return (null == value) ? null : value.getValue(); + @JsonIgnore + public void setValue(Object value) { + if (value instanceof TypedValue) { + this.value = (TypedValue) value; + } else { + this.value = new TypedValue(value); + } } - public void setTimestamp(Long timestamp) { - this.timestamp = timestamp; + @JsonIgnore + @XmlTransient + public Object getValueOfTypedValue() { + return (null == value) ? null : value.getValue(); } - public void setValue(Object value) { - this.value = new TypedValue(value); + @JsonIgnore + @XmlTransient + public String getValueString() { + if (value.getValue() instanceof Type) { + return ((Type) value.getValue()).getDelegate().toString(); + } else if (value.getValue() instanceof String) { + return (String) value.getValue(); + } else { + return value.getValue().toString(); + } } @Override @@ -149,14 +181,6 @@ public boolean equals(Object o) { return false; } - public String getName() { - return name; - } - - public void setName(String name) { - this.name = name; - } - @Override public Schema cachedSchema() { return SCHEMA; @@ -264,12 +288,4 @@ public int getFieldNumber(String name) { fieldMap.put("value", 5); } }; - - public String getColumnVisibility() { - return columnVisibility; - } - - public void setColumnVisibility(String columnVisibility) { - this.columnVisibility = columnVisibility; - } } diff --git a/web-services/client/src/main/java/datawave/webservice/query/result/event/ResponseObjectFactory.java b/web-services/client/src/main/java/datawave/webservice/query/result/event/ResponseObjectFactory.java index 6facccda7de..13af445d055 100644 --- a/web-services/client/src/main/java/datawave/webservice/query/result/event/ResponseObjectFactory.java +++ b/web-services/client/src/main/java/datawave/webservice/query/result/event/ResponseObjectFactory.java @@ -1,11 +1,11 @@ package datawave.webservice.query.result.event; +import datawave.microservice.query.Query; import datawave.user.AuthorizationsListBase; import datawave.webservice.dictionary.data.DataDictionaryBase; import datawave.webservice.dictionary.data.DescriptionBase; import datawave.webservice.dictionary.data.FieldsBase; import datawave.webservice.metadata.MetadataFieldBase; -import datawave.webservice.query.Query; import datawave.webservice.query.cachedresults.CacheableQueryRow; import datawave.webservice.query.result.EdgeQueryResponseBase; import datawave.webservice.query.result.edge.EdgeBase; diff --git a/web-services/client/src/main/java/datawave/webservice/query/result/logic/QueryLogicDescription.java b/web-services/client/src/main/java/datawave/webservice/query/result/logic/QueryLogicDescription.java index 938a64a9fb9..e7fb218d0a6 100644 --- a/web-services/client/src/main/java/datawave/webservice/query/result/logic/QueryLogicDescription.java +++ b/web-services/client/src/main/java/datawave/webservice/query/result/logic/QueryLogicDescription.java @@ -10,8 +10,7 @@ import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlElementWrapper; - -import com.fasterxml.jackson.annotation.JsonProperty; +import javax.xml.bind.annotation.XmlElements; @XmlAccessorType(XmlAccessType.NONE) @XmlAccessorOrder(XmlAccessOrder.ALPHABETICAL) @@ -41,16 +40,14 @@ public QueryLogicDescription() {} @XmlElement(name = "Syntax") private List querySyntax = null; - @JsonProperty("SupportedParameters") - // work-around for bug in jackson-databind @XmlElementWrapper(name = "SupportedParameters") - @XmlElement(name = "Parameter") + // work-around for bug in jackson-databind + @XmlElements(@XmlElement(name = "Parameter", type = String.class)) private List supportedParams = null; - @JsonProperty("RequiredParameters") - // work-around for bug in jackson-databind @XmlElementWrapper(name = "RequiredParameters") - @XmlElement(name = "Parameter") + // work-around for bug in jackson-databind + @XmlElements(@XmlElement(name = "Parameter", type = String.class)) private List requiredParams = null; @XmlElementWrapper(name = "ExampleQueries") @@ -136,5 +133,4 @@ public List getExampleQueries() { public void setExampleQueries(List exampleQueries) { this.exampleQueries = exampleQueries; } - } diff --git a/web-services/client/src/main/java/datawave/webservice/result/QueryImplListResponse.java b/web-services/client/src/main/java/datawave/webservice/result/QueryImplListResponse.java index 743e1bd9247..007c8bf8e93 100644 --- a/web-services/client/src/main/java/datawave/webservice/result/QueryImplListResponse.java +++ b/web-services/client/src/main/java/datawave/webservice/result/QueryImplListResponse.java @@ -4,7 +4,6 @@ import java.util.ArrayList; import java.util.LinkedList; import java.util.List; -import java.util.Objects; import javax.xml.bind.annotation.XmlAccessOrder; import javax.xml.bind.annotation.XmlAccessType; @@ -14,7 +13,7 @@ import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlTransient; -import datawave.webservice.query.Query; +import datawave.microservice.query.Query; import datawave.webservice.query.exception.QueryExceptionType; import io.protostuff.Input; import io.protostuff.Message; diff --git a/web-services/client/src/main/java/datawave/webservice/results/mr/MapReduceJobDescription.java b/web-services/client/src/main/java/datawave/webservice/results/mr/MapReduceJobDescription.java index 0f30bf56cff..4bdb108453a 100644 --- a/web-services/client/src/main/java/datawave/webservice/results/mr/MapReduceJobDescription.java +++ b/web-services/client/src/main/java/datawave/webservice/results/mr/MapReduceJobDescription.java @@ -10,6 +10,7 @@ import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlElementWrapper; +import javax.xml.bind.annotation.XmlElements; import javax.xml.bind.annotation.XmlRootElement; import com.fasterxml.jackson.annotation.JsonProperty; @@ -30,16 +31,14 @@ public class MapReduceJobDescription implements Serializable { @XmlElement(name = "JobType", required = true) protected String jobType = null; - @JsonProperty(value = "RequiredRuntimeParameters") // work-around for bug in jackson-databind @XmlElementWrapper(name = "RequiredRuntimeParameters") - @XmlElement(name = "Parameter") + @XmlElements(@XmlElement(name = "Parameter", type = String.class)) protected List requiredRuntimeParameters = null; - @JsonProperty(value = "OptionalRuntimeParameters") // work-around for bug in jackson-databind @XmlElementWrapper(name = "OptionalRuntimeParameters") - @XmlElement(name = "Parameter") + @XmlElements(@XmlElement(name = "Parameter", type = String.class)) protected List optionalRuntimeParameters = null; @JsonProperty(value = "WorkflowAlgorithmDescriptions") diff --git a/web-services/client/src/main/java/datawave/webservice/results/mr/MapReduceJobDescriptionList.java b/web-services/client/src/main/java/datawave/webservice/results/mr/MapReduceJobDescriptionList.java new file mode 100644 index 00000000000..1afcdf65d15 --- /dev/null +++ b/web-services/client/src/main/java/datawave/webservice/results/mr/MapReduceJobDescriptionList.java @@ -0,0 +1,34 @@ +package datawave.webservice.results.mr; + +import java.io.Serializable; +import java.util.ArrayList; +import java.util.List; + +import javax.xml.bind.annotation.XmlAccessOrder; +import javax.xml.bind.annotation.XmlAccessType; +import javax.xml.bind.annotation.XmlAccessorOrder; +import javax.xml.bind.annotation.XmlAccessorType; +import javax.xml.bind.annotation.XmlElement; +import javax.xml.bind.annotation.XmlElementWrapper; +import javax.xml.bind.annotation.XmlRootElement; + +import datawave.webservice.result.BaseResponse; + +@XmlRootElement(name = "MapReduceJobDescriptionList") +@XmlAccessorType(XmlAccessType.NONE) +@XmlAccessorOrder(XmlAccessOrder.ALPHABETICAL) +public class MapReduceJobDescriptionList extends BaseResponse implements Serializable { + private static final long serialVersionUID = 1L; + + @XmlElementWrapper(name = "MapReduceJobDescriptionList") + @XmlElement(name = "MapReduceJobDescription") + List results = new ArrayList(); + + public List getResults() { + return results; + } + + public void setResults(List results) { + this.results = results; + } +} diff --git a/web-services/common-util/src/main/resources/source-templates/datawave/webservice/common/result/package-info.java b/web-services/client/src/main/resources/source-templates/datawave/webservice/query/map/package-info.java similarity index 74% rename from web-services/common-util/src/main/resources/source-templates/datawave/webservice/common/result/package-info.java rename to web-services/client/src/main/resources/source-templates/datawave/webservice/query/map/package-info.java index 5065eff71f2..a1d62ad9bd3 100644 --- a/web-services/common-util/src/main/resources/source-templates/datawave/webservice/common/result/package-info.java +++ b/web-services/client/src/main/resources/source-templates/datawave/webservice/query/map/package-info.java @@ -1,7 +1,6 @@ @XmlSchema(namespace="${datawave.webservice.namespace}", elementFormDefault=XmlNsForm.QUALIFIED, xmlns={@XmlNs(prefix = "", namespaceURI = "${datawave.webservice.namespace}")}) -package datawave.webservice.common.result; +package datawave.webservice.query.map; import javax.xml.bind.annotation.XmlNs; import javax.xml.bind.annotation.XmlNsForm; -import javax.xml.bind.annotation.XmlSchema; - +import javax.xml.bind.annotation.XmlSchema; \ No newline at end of file diff --git a/web-services/client/src/main/resources/source-templates/datawave/webservice/query/result/metadata/package-info.java b/web-services/client/src/main/resources/source-templates/datawave/webservice/query/result/metadata/package-info.java deleted file mode 100644 index 7c64d7234a9..00000000000 --- a/web-services/client/src/main/resources/source-templates/datawave/webservice/query/result/metadata/package-info.java +++ /dev/null @@ -1,7 +0,0 @@ -@XmlSchema(namespace="${datawave.webservice.namespace}", elementFormDefault=XmlNsForm.QUALIFIED, xmlns={@XmlNs(prefix = "", namespaceURI = "${datawave.webservice.namespace}")}) -package datawave.webservice.query.result.metadata; - -import javax.xml.bind.annotation.XmlNs; -import javax.xml.bind.annotation.XmlNsForm; -import javax.xml.bind.annotation.XmlSchema; - diff --git a/web-services/client/src/test/java/datawave/user/UserAuthorizationsTest.java b/web-services/client/src/test/java/datawave/user/UserAuthorizationsTest.java new file mode 100644 index 00000000000..27e7adab842 --- /dev/null +++ b/web-services/client/src/test/java/datawave/user/UserAuthorizationsTest.java @@ -0,0 +1,82 @@ +package datawave.user; + +import static org.junit.Assert.assertArrayEquals; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; +import static org.powermock.api.support.membermodification.MemberMatcher.field; +import static org.powermock.api.support.membermodification.MemberMatcher.fields; + +import java.lang.reflect.Field; +import java.util.Arrays; +import java.util.TreeSet; + +import org.junit.Before; +import org.junit.Test; + +import io.protostuff.LinkedBuffer; +import io.protostuff.Message; +import io.protostuff.ProtobufIOUtil; + +public class UserAuthorizationsTest { + @Test + public void testFieldConfiguration() { + String[] expecteds = new String[] {"SCHEMA", "auths", "serialVersionUID"}; + testFieldNames(expecteds, UserAuthorizations.class); + } + + @Test + public void testSerialization() throws Exception { + TreeSet auths = new TreeSet(); + auths.add("a1"); + auths.add("a2"); + auths.add("a3"); + testRoundTrip(UserAuthorizations.class, new String[] {"auths"}, new Object[] {auths}); + } + + protected LinkedBuffer buffer; + + @Before + public void setUp() { + buffer = LinkedBuffer.allocate(4096); + } + + protected > void testFieldNames(String[] fieldNames, Class clazz) { + Field[] fields = fields(clazz); + assertEquals("The number of fields in " + clazz.getName() + " has changed. Please update " + getClass().getName() + ".", fieldNames.length, + fields.length); + + String[] actualFieldNames = new String[fields.length]; + for (int i = 0; i < fields.length; ++i) + actualFieldNames[i] = fields[i].getName(); + + Arrays.sort(fieldNames); + Arrays.sort(actualFieldNames); + assertArrayEquals("Serialization/deserialization of " + clazz.getName() + " failed.", fieldNames, actualFieldNames); + } + + protected > void testRoundTrip(Class clazz, String[] fieldNames, Object[] fieldValues) throws Exception { + assertNotNull(fieldNames); + assertNotNull(fieldValues); + assertEquals(fieldNames.length, fieldValues.length); + + T original = clazz.newInstance(); + for (int i = 0; i < fieldNames.length; ++i) + field(clazz, fieldNames[i]).set(original, fieldValues[i]); + + T reconstructed = roundTrip(original); + for (int i = 0; i < fieldNames.length; ++i) + assertEquals(fieldValues[i], field(clazz, fieldNames[i]).get(reconstructed)); + } + + protected > T roundTrip(T message) throws Exception { + byte[] bytes = toProtobufBytes(message); + T response = message.cachedSchema().newMessage(); + ProtobufIOUtil.mergeFrom(bytes, response, message.cachedSchema()); + return response; + } + + protected > byte[] toProtobufBytes(T message) { + return ProtobufIOUtil.toByteArray(message, message.cachedSchema(), buffer); + } + +} diff --git a/web-services/client/src/test/java/datawave/webservice/query/QueryParametersTest.java b/web-services/client/src/test/java/datawave/webservice/query/QueryParametersTest.java index ed250e175ae..dbf6357bb37 100644 --- a/web-services/client/src/test/java/datawave/webservice/query/QueryParametersTest.java +++ b/web-services/client/src/test/java/datawave/webservice/query/QueryParametersTest.java @@ -11,6 +11,10 @@ import org.springframework.util.LinkedMultiValueMap; import org.springframework.util.MultiValueMap; +import datawave.microservice.query.DefaultQueryParameters; +import datawave.microservice.query.QueryParameters; +import datawave.microservice.query.QueryPersistence; + public class QueryParametersTest { private QueryParameters qp; @@ -58,7 +62,7 @@ public void beforeTests() { } private QueryParameters buildQueryParameters() { - QueryParametersImpl qpBuilder = new QueryParametersImpl(); + DefaultQueryParameters qpBuilder = new DefaultQueryParameters(); qpBuilder.setAuths(auths); qpBuilder.setBeginDate(beginDate); qpBuilder.setEndDate(endDate); @@ -99,8 +103,8 @@ public void testAllTheParams() { // Test and validate date formatting, parsing try { - Assert.assertEquals(formatDateCheck, QueryParametersImpl.formatDate(beginDate)); - Assert.assertEquals(parseDateCheck, QueryParametersImpl.parseStartDate(QueryParametersImpl.formatDate(beginDate))); + Assert.assertEquals(formatDateCheck, DefaultQueryParameters.formatDate(beginDate)); + Assert.assertEquals(parseDateCheck, DefaultQueryParameters.parseStartDate(DefaultQueryParameters.formatDate(beginDate))); } catch (ParseException e) { log.error(e); } @@ -115,10 +119,10 @@ public void testAllTheParams() { params.add(QueryParameters.QUERY_PERSISTENCE, "PERSISTENT"); params.add(QueryParameters.QUERY_PAGESIZE, "10"); params.add(QueryParameters.QUERY_AUTHORIZATIONS, "auths"); - params.add(QueryParameters.QUERY_EXPIRATION, QueryParametersImpl.formatDate(expDate).toString()); + params.add(QueryParameters.QUERY_EXPIRATION, DefaultQueryParameters.formatDate(expDate).toString()); params.add(QueryParameters.QUERY_TRACE, "trace"); - params.add(QueryParameters.QUERY_BEGIN, QueryParametersImpl.formatDate(beginDate).toString()); - params.add(QueryParameters.QUERY_END, QueryParametersImpl.formatDate(endDate).toString()); + params.add(QueryParameters.QUERY_BEGIN, DefaultQueryParameters.formatDate(beginDate).toString()); + params.add(QueryParameters.QUERY_END, DefaultQueryParameters.formatDate(endDate).toString()); params.add(QueryParameters.QUERY_PARAMS, "params"); params.add(QueryParameters.QUERY_LOGIC_NAME, "logicName"); } catch (ParseException e) { diff --git a/web-services/client/src/test/java/datawave/webservice/query/TestQueryImpl.java b/web-services/client/src/test/java/datawave/webservice/query/TestQueryImpl.java index 5205127c571..8c1b6eaf3fb 100644 --- a/web-services/client/src/test/java/datawave/webservice/query/TestQueryImpl.java +++ b/web-services/client/src/test/java/datawave/webservice/query/TestQueryImpl.java @@ -7,7 +7,8 @@ import org.junit.Before; import org.junit.Test; -import datawave.webservice.query.QueryImpl.Parameter; +import datawave.microservice.query.QueryImpl; +import datawave.microservice.query.QueryImpl.Parameter; public class TestQueryImpl { diff --git a/web-services/client/src/test/java/datawave/webservice/query/TestQueryParameters.java b/web-services/client/src/test/java/datawave/webservice/query/TestQueryParameters.java index 4d349c98f33..24ae4377a35 100644 --- a/web-services/client/src/test/java/datawave/webservice/query/TestQueryParameters.java +++ b/web-services/client/src/test/java/datawave/webservice/query/TestQueryParameters.java @@ -11,20 +11,23 @@ import org.springframework.util.LinkedMultiValueMap; import org.springframework.util.MultiValueMap; +import datawave.microservice.query.DefaultQueryParameters; +import datawave.microservice.query.QueryParameters; + public class TestQueryParameters { - private QueryParametersImpl qp; + private DefaultQueryParameters qp; private MultiValueMap parameters; @Before public void setup() { - qp = new QueryParametersImpl(); + qp = new DefaultQueryParameters(); parameters = new LinkedMultiValueMap<>(); - parameters.set(QueryParameters.QUERY_AUTHORIZATIONS, "ALL"); - parameters.set(QueryParameters.QUERY_NAME, "Test"); - parameters.set(QueryParameters.QUERY_PERSISTENCE, "TRANSIENT"); - parameters.set(QueryParameters.QUERY_STRING, "FOO == BAR"); - parameters.set(QueryParameters.QUERY_LOGIC_NAME, "LogicName"); + parameters.add(QueryParameters.QUERY_AUTHORIZATIONS, "ALL"); + parameters.add(QueryParameters.QUERY_NAME, "Test"); + parameters.add(QueryParameters.QUERY_PERSISTENCE, "TRANSIENT"); + parameters.add(QueryParameters.QUERY_STRING, "FOO == BAR"); + parameters.add(QueryParameters.QUERY_LOGIC_NAME, "LogicName"); } @Test @@ -38,7 +41,7 @@ public void testNullExpirationDate() { @Test public void test24HoursExpirationDate() { - parameters.set(QueryParameters.QUERY_EXPIRATION, "+24Hours"); + parameters.add(QueryParameters.QUERY_EXPIRATION, "+24Hours"); qp.validate(parameters); SimpleDateFormat format = new SimpleDateFormat("yyyyMMdd HHmmss"); @@ -51,7 +54,7 @@ public void testDaysExpirationDate() { SimpleDateFormat format = new SimpleDateFormat("yyyyMMdd"); SimpleDateFormat msFormat = new SimpleDateFormat("yyyyMMdd HHmmss.SSS"); String expDateString = format.format(DateUtils.addDays(new Date(), 1)); - parameters.set(QueryParameters.QUERY_EXPIRATION, expDateString); + parameters.add(QueryParameters.QUERY_EXPIRATION, expDateString); qp.validate(parameters); assertEquals(expDateString + " 235959.999", msFormat.format(qp.getExpirationDate())); } @@ -61,7 +64,7 @@ public void testTimeExpirationDate() { SimpleDateFormat format = new SimpleDateFormat("yyyyMMdd HHmmss"); SimpleDateFormat msFormat = new SimpleDateFormat("yyyyMMdd HHmmss.SSS"); String expDateString = format.format(DateUtils.addDays(new Date(), 1)); - parameters.set(QueryParameters.QUERY_EXPIRATION, expDateString); + parameters.add(QueryParameters.QUERY_EXPIRATION, expDateString); qp.validate(parameters); assertEquals(expDateString + ".999", msFormat.format(qp.getExpirationDate())); } @@ -70,7 +73,7 @@ public void testTimeExpirationDate() { public void testTimeMillisExpirationDate() { SimpleDateFormat format = new SimpleDateFormat("yyyyMMdd HHmmss.SSS"); String expDateString = format.format(DateUtils.addDays(new Date(), 1)); - parameters.set(QueryParameters.QUERY_EXPIRATION, expDateString); + parameters.add(QueryParameters.QUERY_EXPIRATION, expDateString); qp.validate(parameters); assertEquals(expDateString, format.format(qp.getExpirationDate())); } @@ -82,7 +85,7 @@ public void testStartDateNoTime() { String startDateStr = format.format(new Date()); parameters.remove(QueryParameters.QUERY_BEGIN); - parameters.set(QueryParameters.QUERY_BEGIN, startDateStr); + parameters.add(QueryParameters.QUERY_BEGIN, startDateStr); qp.validate(parameters); assertEquals(startDateStr + " 000000.000", msFormat.format(qp.getBeginDate())); } @@ -94,7 +97,7 @@ public void testStartDateNoMs() { String startDateStr = format.format(new Date()); parameters.remove(QueryParameters.QUERY_BEGIN); - parameters.set(QueryParameters.QUERY_BEGIN, startDateStr); + parameters.add(QueryParameters.QUERY_BEGIN, startDateStr); qp.validate(parameters); assertEquals(startDateStr + ".000", msFormat.format(qp.getBeginDate())); } @@ -106,7 +109,7 @@ public void testEndDateNoTime() { String endDateStr = format.format(new Date()); parameters.remove(QueryParameters.QUERY_END); - parameters.set(QueryParameters.QUERY_END, endDateStr); + parameters.add(QueryParameters.QUERY_END, endDateStr); qp.validate(parameters); assertEquals(endDateStr + " 235959.999", msFormat.format(qp.getEndDate())); } @@ -118,7 +121,7 @@ public void testEndDateNoMs() { String endDateStr = format.format(new Date()); parameters.remove(QueryParameters.QUERY_END); - parameters.set(QueryParameters.QUERY_END, endDateStr); + parameters.add(QueryParameters.QUERY_END, endDateStr); qp.validate(parameters); assertEquals(endDateStr + ".999", msFormat.format(qp.getEndDate())); } diff --git a/web-services/common-util/pom.xml b/web-services/common-util/pom.xml index 9448c18a0e9..ca2930bcaa9 100644 --- a/web-services/common-util/pom.xml +++ b/web-services/common-util/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 6.14.0-SNAPSHOT + 7.0.0-SNAPSHOT datawave-ws-common-util jar @@ -31,6 +31,29 @@ datawave-common ${project.version}
    + + gov.nsa.datawave.core + datawave-core-common + ${project.version} + + + gov.nsa.datawave.core + datawave-core-common-util + ${project.version} + jboss + + + gov.nsa.datawave.core + datawave-core-connection-pool + + + + + gov.nsa.datawave.core + datawave-core-connection-pool + ${project.version} + jboss + gov.nsa.datawave.microservice accumulo-utils diff --git a/web-services/common-util/src/main/java/datawave/security/authorization/UserOperations.java b/web-services/common-util/src/main/java/datawave/security/authorization/UserOperations.java deleted file mode 100644 index 0f5b090b46e..00000000000 --- a/web-services/common-util/src/main/java/datawave/security/authorization/UserOperations.java +++ /dev/null @@ -1,60 +0,0 @@ -package datawave.security.authorization; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.function.Function; -import java.util.stream.Collectors; - -import com.google.common.collect.HashMultimap; -import com.google.common.collect.Multimap; - -import datawave.user.AuthorizationsListBase; -import datawave.webservice.result.GenericResponse; - -/** - * A user operations service is one that can pass calls off to another external user operations endpoint - */ -public interface UserOperations { - - AuthorizationsListBase listEffectiveAuthorizations(Object callerObject) throws AuthorizationException; - - GenericResponse flushCachedCredentials(Object callerObject) throws AuthorizationException; - - default DatawavePrincipal getRemoteUser(DatawavePrincipal principal) throws AuthorizationException { - // get the effective authorizations for this user - AuthorizationsListBase auths = listEffectiveAuthorizations(principal); - - // create a new set of proxied users - List mappedUsers = new ArrayList<>(); - Map localUsers = principal.getProxiedUsers().stream() - .collect(Collectors.toMap(DatawaveUser::getDn, Function.identity(), (v1, v2) -> v2)); - - // create a mapped user for the primary user with the auths returned by listEffectiveAuthorizations - SubjectIssuerDNPair primaryDn = SubjectIssuerDNPair.of(auths.getUserDn(), auths.getIssuerDn()); - DatawaveUser localUser = localUsers.get(primaryDn); - mappedUsers.add(new DatawaveUser(primaryDn, localUser.getUserType(), auths.getAllAuths(), auths.getAuthMapping().keySet(), - toMultimap(auths.getAuthMapping()), System.currentTimeMillis())); - - // for each proxied user, create a new user with the auths returned by listEffectiveAuthorizations - Map> authMap = auths.getAuths(); - for (Map.Entry> entry : authMap.entrySet()) { - SubjectIssuerDNPair pair = SubjectIssuerDNPair.of(entry.getKey().subjectDN, entry.getKey().issuerDN); - if (!pair.equals(primaryDn)) { - mappedUsers.add(new DatawaveUser(pair, DatawaveUser.UserType.SERVER, entry.getValue(), null, null, System.currentTimeMillis())); - } - } - - // return a principal with the mapped users - return new DatawavePrincipal(mappedUsers); - } - - static Multimap toMultimap(Map> map) { - Multimap multimap = HashMultimap.create(); - map.entrySet().stream().forEach(e -> multimap.putAll(e.getKey(), e.getValue())); - return multimap; - } - -} diff --git a/web-services/common-util/src/main/java/datawave/security/util/DnUtils.java b/web-services/common-util/src/main/java/datawave/security/util/DnUtils.java index 14c65ddf508..037f1a48f16 100644 --- a/web-services/common-util/src/main/java/datawave/security/util/DnUtils.java +++ b/web-services/common-util/src/main/java/datawave/security/util/DnUtils.java @@ -31,9 +31,11 @@ public class DnUtils { /** Parsed NPE OU identifiers */ static final List NPE_OU_LIST; + private static final Logger log = LoggerFactory.getLogger(DnUtils.class); private static final datawave.microservice.security.util.DnUtils dnUtils; + static { InputStream in = null; try { diff --git a/web-services/common-util/src/main/java/datawave/webservice/query/cache/ResultsPage.java b/web-services/common-util/src/main/java/datawave/webservice/query/cache/ResultsPage.java deleted file mode 100644 index b0fcad2497c..00000000000 --- a/web-services/common-util/src/main/java/datawave/webservice/query/cache/ResultsPage.java +++ /dev/null @@ -1,45 +0,0 @@ -package datawave.webservice.query.cache; - -import java.util.ArrayList; -import java.util.List; - -/** - * - */ -public class ResultsPage { - public enum Status { - NONE, PARTIAL, COMPLETE - }; - - private List results = null; - private Status status = null; - - public ResultsPage() { - this(new ArrayList<>()); - } - - public ResultsPage(List c) { - this(c, (c.isEmpty() ? Status.NONE : Status.COMPLETE)); - } - - public ResultsPage(List c, Status s) { - setResults(c); - setStatus(s); - } - - public Status getStatus() { - return status; - } - - public void setStatus(Status status) { - this.status = status; - } - - public List getResults() { - return results; - } - - public void setResults(List results) { - this.results = results; - } -} diff --git a/web-services/common/pom.xml b/web-services/common/pom.xml index f35b0cf06b1..a21a7d039ef 100644 --- a/web-services/common/pom.xml +++ b/web-services/common/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 6.14.0-SNAPSHOT + 7.0.0-SNAPSHOT datawave-ws-common ejb @@ -43,6 +43,17 @@ gov.nsa.datawave datawave-in-memory-accumulo + + gov.nsa.datawave.core + datawave-core-common + ${project.version} + + + gov.nsa.datawave.core + datawave-core-connection-pool + ${project.version} + jboss + gov.nsa.datawave.microservice audit-api diff --git a/web-services/common/src/main/java/datawave/webservice/common/audit/AuditBean.java b/web-services/common/src/main/java/datawave/webservice/common/audit/AuditBean.java index 453688a386b..6a237bee723 100644 --- a/web-services/common/src/main/java/datawave/webservice/common/audit/AuditBean.java +++ b/web-services/common/src/main/java/datawave/webservice/common/audit/AuditBean.java @@ -14,6 +14,7 @@ import org.apache.log4j.Logger; import org.jboss.resteasy.annotations.GZIP; +import org.springframework.util.MultiValueMap; import datawave.webservice.common.exception.DatawaveWebApplicationException; import datawave.webservice.query.exception.DatawaveErrorCode; @@ -55,7 +56,7 @@ public VoidResponse auditRest(MultivaluedMap parameters) { } } - public String audit(MultivaluedMap parameters) throws Exception { + public String audit(MultiValueMap parameters) throws Exception { return auditService.audit(auditParameterBuilder.convertAndValidate(parameters)); } } diff --git a/web-services/common/src/main/java/datawave/webservice/common/audit/AuditParameterBuilder.java b/web-services/common/src/main/java/datawave/webservice/common/audit/AuditParameterBuilder.java index 0b7ac168bf2..6bd8c63b8dd 100644 --- a/web-services/common/src/main/java/datawave/webservice/common/audit/AuditParameterBuilder.java +++ b/web-services/common/src/main/java/datawave/webservice/common/audit/AuditParameterBuilder.java @@ -4,6 +4,8 @@ import javax.ws.rs.core.MultivaluedMap; +import org.springframework.util.MultiValueMap; + /** * A utility to extract parameters from a REST call and convert them, as necessary, into parameters that are required by the auditor. */ @@ -16,7 +18,7 @@ public interface AuditParameterBuilder { * the query parameters * @return validated parameters */ - Map convertAndValidate(MultivaluedMap queryParameters); + Map convertAndValidate(MultiValueMap queryParameters); /** * Builds validated audit parameters for a direct call to the audit service. That is, the parameters passed in are expected to be those used by the audit diff --git a/web-services/common/src/main/java/datawave/webservice/common/audit/DefaultAuditParameterBuilder.java b/web-services/common/src/main/java/datawave/webservice/common/audit/DefaultAuditParameterBuilder.java index 8b36f149e3b..80c0bda55a9 100644 --- a/web-services/common/src/main/java/datawave/webservice/common/audit/DefaultAuditParameterBuilder.java +++ b/web-services/common/src/main/java/datawave/webservice/common/audit/DefaultAuditParameterBuilder.java @@ -7,14 +7,16 @@ import org.jboss.resteasy.specimpl.MultivaluedMapImpl; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.springframework.util.MultiValueMap; -import datawave.webservice.query.QueryParameters; +import datawave.core.common.audit.PrivateAuditConstants; +import datawave.microservice.query.QueryParameters; public class DefaultAuditParameterBuilder implements AuditParameterBuilder { private Logger log = LoggerFactory.getLogger(getClass().getName()); @Override - public Map convertAndValidate(MultivaluedMap queryParameters) { + public Map convertAndValidate(MultiValueMap queryParameters) { AuditParameters validatedParams = new AuditParameters(); MultivaluedMapImpl auditParams = new MultivaluedMapImpl<>(); diff --git a/web-services/common/src/main/java/datawave/webservice/common/cache/AccumuloTableCacheBean.java b/web-services/common/src/main/java/datawave/webservice/common/cache/AccumuloTableCacheBean.java new file mode 100644 index 00000000000..0c9bc484c50 --- /dev/null +++ b/web-services/common/src/main/java/datawave/webservice/common/cache/AccumuloTableCacheBean.java @@ -0,0 +1,213 @@ +package datawave.webservice.common.cache; + +import java.util.List; + +import javax.annotation.PostConstruct; +import javax.annotation.PreDestroy; +import javax.annotation.Resource; +import javax.annotation.security.DeclareRoles; +import javax.annotation.security.RolesAllowed; +import javax.annotation.security.RunAs; +import javax.ejb.Local; +import javax.ejb.LocalBean; +import javax.ejb.Lock; +import javax.ejb.LockType; +import javax.ejb.Schedule; +import javax.ejb.Singleton; +import javax.ejb.Startup; +import javax.enterprise.concurrent.ManagedExecutorService; +import javax.inject.Inject; +import javax.interceptor.Interceptors; +import javax.jms.Destination; +import javax.jms.JMSContext; +import javax.ws.rs.GET; +import javax.ws.rs.Path; +import javax.ws.rs.PathParam; +import javax.ws.rs.Produces; + +import org.apache.deltaspike.core.api.config.ConfigProperty; +import org.apache.deltaspike.core.api.exclude.Exclude; +import org.apache.log4j.Logger; +import org.jboss.resteasy.annotations.GZIP; + +import datawave.accumulo.inmemory.InMemoryInstance; +import datawave.annotation.Required; +import datawave.configuration.DatawaveEmbeddedProjectStageHolder; +import datawave.core.common.cache.AccumuloTableCache; +import datawave.core.common.cache.AccumuloTableCacheImpl; +import datawave.core.common.cache.AccumuloTableCacheProperties; +import datawave.core.common.connection.AccumuloConnectionFactory; +import datawave.core.common.result.AccumuloTableCacheStatus; +import datawave.core.common.result.TableCacheDescription; +import datawave.interceptor.RequiredInterceptor; +import datawave.webservice.common.exception.DatawaveWebApplicationException; +import datawave.webservice.query.exception.QueryException; +import datawave.webservice.result.VoidResponse; + +/** + * Object that caches data from Accumulo tables. + */ +@Path("/Common/AccumuloTableCache") +@RunAs("InternalUser") +@RolesAllowed({"AuthorizedUser", "AuthorizedQueryServer", "AuthorizedServer", "InternalUser", "Administrator", "JBossAdministrator"}) +@DeclareRoles({"AuthorizedUser", "AuthorizedQueryServer", "AuthorizedServer", "InternalUser", "Administrator", "JBossAdministrator"}) +@Local(AccumuloTableCache.class) +@LocalBean +@Startup +// tells the container to initialize on startup +@Singleton +// this is a singleton bean in the container +@Lock(LockType.READ) +@Exclude(ifProjectStage = DatawaveEmbeddedProjectStageHolder.DatawaveEmbedded.class) +public class AccumuloTableCacheBean implements AccumuloTableCache { + + private final Logger log = Logger.getLogger(this.getClass()); + + @Inject + private JMSContext jmsContext; + + @Resource(mappedName = "java:/topic/AccumuloTableCache") + private Destination cacheTopic; + + @Resource + private ManagedExecutorService executorService; + + @Inject + @ConfigProperty(name = "dw.warehouse.zookeepers") + private String zookeepers = null; + @SuppressWarnings("MismatchedQueryAndUpdateOfCollection") + @Inject + @ConfigProperty(name = "dw.cache.tableNames", defaultValue = "DatawaveMetadata,QueryMetrics_m,errorMetadata") + private List tableNames; + @Inject + @ConfigProperty(name = "dw.cache.pool", defaultValue = "WAREHOUSE") + private String poolName; + @Inject + @ConfigProperty(name = "dw.cache.reloadInterval", defaultValue = "86400000") + private long reloadInterval; + @Inject + @ConfigProperty(name = "dw.cacheCoordinator.evictionReaperIntervalSeconds", defaultValue = "30") + private int evictionReaperIntervalInSeconds; + @Inject + @ConfigProperty(name = "dw.cacheCoordinator.numLocks", defaultValue = "300") + private int numLocks; + @Inject + @ConfigProperty(name = "dw.cacheCoordinator.maxRetries", defaultValue = "10") + private int maxRetries; + + private AccumuloTableCacheImpl tableCache; + + public AccumuloTableCacheBean() {} + + @PostConstruct + private void setup() { + AccumuloTableCacheProperties config = new AccumuloTableCacheProperties().withTableNames(tableNames).withPoolName(poolName).withNumLocks(numLocks) + .withZookeepers(zookeepers).withMaxRetries(maxRetries).withReloadInterval(reloadInterval) + .withEvictionReaperIntervalInSeconds(evictionReaperIntervalInSeconds); + + log.debug("Called AccumuloTableCacheBean and accumuloTableCacheConfiguration = " + config); + + tableCache = new AccumuloTableCacheImpl(executorService, config); + } + + @Override + public void setConnectionFactory(AccumuloConnectionFactory connectionFactory) { + tableCache.setConnectionFactory(connectionFactory); + } + + @Override + public InMemoryInstance getInstance() { + return tableCache.getInstance(); + } + + @Schedule(hour = "*", minute = "*", second = "1", persistent = false) + @Override + public void submitReloadTasks() { + tableCache.submitReloadTasks(); + } + + @PreDestroy + public void stop() { + close(); + } + + @Override + public void close() { + tableCache.close(); + tableCache = null; + } + + /** + * JBossAdministrator or Administrator credentials required. + * + * @param tableName + * the name of the table for which the cached version is to be reloaded + * @return datawave.webservice.result.VoidResponse + * @RequestHeader X-ProxiedEntitiesChain use when proxying request for user + * @RequestHeader X-ProxiedIssuersChain required when using X-ProxiedEntitiesChain, specify one issuer DN per subject DN listed in X-ProxiedEntitiesChain + * @RequestHeader query-session-id session id value used for load balancing purposes. query-session-id can be placed in the request in a Cookie header or as + * a query parameter + * @ResponseHeader X-OperationTimeInMS time spent on the server performing the operation, does not account for network or result serialization + * + * @HTTP 200 success + * @HTTP 404 queries not found using {@code id} + * @HTTP 500 internal server error + */ + @GET + @Path("/reload/{tableName}") + @Produces({"application/xml", "text/xml", "application/json", "text/yaml", "text/x-yaml", "application/x-yaml", "application/x-protobuf", + "application/x-protostuff"}) + @GZIP + @Interceptors(RequiredInterceptor.class) + public VoidResponse reloadCache(@Required("tableName") @PathParam("tableName") String tableName) { + VoidResponse response = new VoidResponse(); + try { + reloadTableCache(tableName); + } catch (Exception e) { + response.addException(new QueryException(e).getBottomQueryException()); + throw new DatawaveWebApplicationException(e, response); + } + return response; + } + + @Override + public void reloadTableCache(String tableName) { + tableCache.reloadTableCache(tableName); + sendCacheReloadMessage(tableName); + } + + /** + * JBossAdministrator or Administrator credentials required. + * + * @return datawave.webservice.common.result.AccumuloTableCacheStatus + * @RequestHeader X-ProxiedEntitiesChain use when proxying request for user + * @RequestHeader X-ProxiedIssuersChain required when using X-ProxiedEntitiesChain, specify one issuer DN per subject DN listed in X-ProxiedEntitiesChain + * @RequestHeader query-session-id session id value used for load balancing purposes. query-session-id can be placed in the request in a Cookie header or as + * a query parameter + * @ResponseHeader X-OperationTimeInMS time spent on the server performing the operation, does not account for network or result serialization + * + * @HTTP 200 success + */ + @GET + @Path("/") + @Produces({"application/xml", "text/xml", "application/json", "text/yaml", "text/x-yaml", "application/x-yaml", "application/x-protobuf", + "application/x-protostuff", "text/html"}) + @GZIP + public AccumuloTableCacheStatus getStatus() { + AccumuloTableCacheStatus response = new AccumuloTableCacheStatus(); + response.getCaches().addAll(getTableCaches()); + return response; + } + + @Override + public List getTableCaches() { + return tableCache.getTableCaches(); + } + + private void sendCacheReloadMessage(String tableName) { + log.warn("table:" + tableName + " sending cache reload message about table " + tableName); + + jmsContext.createProducer().send(cacheTopic, tableName); + } + +} diff --git a/web-services/common/src/main/java/datawave/webservice/common/cache/AccumuloTableCacheConfiguration.java b/web-services/common/src/main/java/datawave/webservice/common/cache/AccumuloTableCacheConfiguration.java deleted file mode 100644 index b6b98f24776..00000000000 --- a/web-services/common/src/main/java/datawave/webservice/common/cache/AccumuloTableCacheConfiguration.java +++ /dev/null @@ -1,53 +0,0 @@ -package datawave.webservice.common.cache; - -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import javax.annotation.PostConstruct; -import javax.inject.Inject; - -import org.apache.deltaspike.core.api.config.ConfigProperty; - -public class AccumuloTableCacheConfiguration { - - @Inject - @ConfigProperty(name = "dw.warehouse.zookeepers") - private String zookeepers = null; - @SuppressWarnings("MismatchedQueryAndUpdateOfCollection") - @Inject - @ConfigProperty(name = "dw.cache.tableNames", defaultValue = "DatawaveMetadata,QueryMetrics_m,errorMetadata") - private List tableNames; - @Inject - @ConfigProperty(name = "dw.cache.pool", defaultValue = "WAREHOUSE") - private String poolName; - @Inject - @ConfigProperty(name = "dw.cache.reloadInterval", defaultValue = "86400000") - private long reloadInterval; - - private Map caches = new HashMap<>(); - - @PostConstruct - private void initializeCaches() { - for (String tableName : tableNames) { - BaseTableCache cache = new BaseTableCache(); - cache.setTableName(tableName); - cache.setConnectionPoolName(poolName); - cache.setReloadInterval(reloadInterval); - caches.put(tableName, cache); - } - } - - public String getZookeepers() { - return zookeepers; - } - - public void setZookeepers(String zookeepers) { - this.zookeepers = zookeepers; - } - - public Map getCaches() { - return Collections.unmodifiableMap(caches); - } -} diff --git a/web-services/common/src/main/java/datawave/webservice/common/connection/AccumuloConnectionFactoryBean.java b/web-services/common/src/main/java/datawave/webservice/common/connection/AccumuloConnectionFactoryBean.java index cac20e62908..87cf86b19e3 100644 --- a/web-services/common/src/main/java/datawave/webservice/common/connection/AccumuloConnectionFactoryBean.java +++ b/web-services/common/src/main/java/datawave/webservice/common/connection/AccumuloConnectionFactoryBean.java @@ -1,17 +1,9 @@ package datawave.webservice.common.connection; import java.security.Principal; -import java.text.SimpleDateFormat; -import java.util.ArrayList; import java.util.Collection; -import java.util.Collections; -import java.util.Date; -import java.util.HashMap; -import java.util.HashSet; import java.util.List; import java.util.Map; -import java.util.Map.Entry; -import java.util.Set; import javax.annotation.PostConstruct; import javax.annotation.PreDestroy; @@ -20,6 +12,7 @@ import javax.annotation.security.PermitAll; import javax.annotation.security.RolesAllowed; import javax.annotation.security.RunAs; +import javax.ejb.EJB; import javax.ejb.EJBContext; import javax.ejb.Local; import javax.ejb.LocalBean; @@ -35,26 +28,21 @@ import javax.ws.rs.Produces; import org.apache.accumulo.core.client.AccumuloClient; -import org.apache.accumulo.core.client.admin.SecurityOperations; -import org.apache.accumulo.core.client.security.tokens.PasswordToken; -import org.apache.accumulo.core.util.Pair; -import org.apache.commons.lang.StringUtils; -import org.apache.commons.lang.mutable.MutableInt; +import org.apache.deltaspike.core.api.config.ConfigProperty; import org.apache.deltaspike.core.api.exclude.Exclude; import org.apache.deltaspike.core.api.jmx.JmxManaged; import org.apache.deltaspike.core.api.jmx.MBean; import org.apache.log4j.Logger; import org.jboss.resteasy.annotations.GZIP; -import datawave.accumulo.inmemory.InMemoryAccumuloClient; import datawave.configuration.DatawaveEmbeddedProjectStageHolder; +import datawave.core.common.cache.AccumuloTableCache; +import datawave.core.common.connection.AccumuloConnectionFactory; +import datawave.core.common.connection.AccumuloConnectionFactoryImpl; +import datawave.core.common.result.ConnectionFactoryResponse; +import datawave.core.common.result.ConnectionPool; import datawave.security.authorization.DatawavePrincipal; -import datawave.webservice.common.cache.AccumuloTableCache; -import datawave.webservice.common.connection.config.ConnectionPoolConfiguration; import datawave.webservice.common.connection.config.ConnectionPoolsConfiguration; -import datawave.webservice.common.result.Connection; -import datawave.webservice.common.result.ConnectionFactoryResponse; -import datawave.webservice.common.result.ConnectionPool; @Path("/Common/AccumuloConnectionFactory") @Produces({"application/xml", "text/xml", "application/json", "text/yaml", "text/x-yaml", "application/x-yaml", "text/html"}) @@ -82,126 +70,42 @@ public class AccumuloConnectionFactoryBean implements AccumuloConnectionFactory @Resource private EJBContext context; - @Inject + @EJB private AccumuloTableCache cache; - private Map> pools; + @Inject + @ConfigProperty(name = "dw.connectionPool.default", defaultValue = "WAREHOUSE") + private String defaultPool = null; + @SuppressWarnings("MismatchedQueryAndUpdateOfCollection") @Inject - private ConnectionPoolsConfiguration connectionPoolsConfiguration; + @ConfigProperty(name = "dw.connectionPool.pools", defaultValue = "WAREHOUSE,METRICS") + private List poolNames; - private String defaultPoolName = null; + private AccumuloConnectionFactory factory; @PostConstruct public void init() { - this.pools = new HashMap<>(); - - if (this.connectionPoolsConfiguration == null) { - log.error("connectionPoolsConfiguration was null - aborting init()"); - return; - } - HashMap> instances = new HashMap<>(); - this.defaultPoolName = connectionPoolsConfiguration.getDefaultPool(); - for (Entry entry : connectionPoolsConfiguration.getPools().entrySet()) { - Map p = new HashMap<>(); - ConnectionPoolConfiguration conf = entry.getValue(); - p.put(Priority.ADMIN, createConnectionPool(conf, conf.getAdminPriorityPoolSize())); - p.put(Priority.HIGH, createConnectionPool(conf, conf.getHighPriorityPoolSize())); - p.put(Priority.NORMAL, createConnectionPool(conf, conf.getNormalPriorityPoolSize())); - p.put(Priority.LOW, createConnectionPool(conf, conf.getLowPriorityPoolSize())); - this.pools.put(entry.getKey(), Collections.unmodifiableMap(p)); - try { - setupMockAccumuloUser(conf, p.get(Priority.NORMAL), instances); - } catch (Exception e) { - log.error("Error configuring mock accumulo user for AccumuloConnectionFactoryBean.", e); - } - - // Initialize the distributed tracing system. This needs to be done once at application startup. Since - // it is tied to Accumulo connections, we do it here in this singleton bean. - String appName = "datawave_ws"; - try { - appName = System.getProperty("app", "datawave_ws"); - } catch (SecurityException e) { - log.warn("Unable to retrieve system property \"app\": " + e.getMessage()); - } - } - - cache.setConnectionFactory(this); + ConnectionPoolsConfiguration config = new ConnectionPoolsConfiguration().withDefaultPool(defaultPool).withPoolNames(poolNames).build(); + factory = AccumuloConnectionFactoryImpl.getInstance(cache, config); } - private AccumuloClientPool createConnectionPool(ConnectionPoolConfiguration conf, int limit) { - AccumuloClientPoolFactory factory = new AccumuloClientPoolFactory(conf.getUsername(), conf.getPassword(), conf.getZookeepers(), conf.getInstance()); - AccumuloClientPool pool = new AccumuloClientPool(factory); - pool.setTestOnBorrow(true); - pool.setTestOnReturn(true); - pool.setMaxTotal(limit); - pool.setMaxIdle(-1); - - try { - pool.addObject(); - } catch (Exception e) { - log.error("Error pre-populating connection pool", e); - } - - return pool; + @PreDestroy + public void tearDown() { + close(); } - private void setupMockAccumuloUser(ConnectionPoolConfiguration conf, AccumuloClientPool pool, HashMap> instances) - throws Exception { - AccumuloClient c = null; + @Override + public void close() { try { - c = pool.borrowObject(new HashMap<>()); - - Pair pair = instances.get(cache.getInstance().getInstanceID()); - String user = "root"; - PasswordToken password = new PasswordToken(new byte[0]); - if (pair != null && user.equals(pair.getFirst())) - password = pair.getSecond(); - SecurityOperations security = cache.getInstance().getConnector(user, password).securityOperations(); - Set users = security.listLocalUsers(); - if (!users.contains(conf.getUsername())) { - security.createLocalUser(conf.getUsername(), new PasswordToken(conf.getPassword())); - security.changeUserAuthorizations(conf.getUsername(), c.securityOperations().getUserAuthorizations(conf.getUsername())); - } else { - PasswordToken newPassword = new PasswordToken(conf.getPassword()); - // If we're changing root's password, and trying to change then keep track of that. If we have multiple instances - // that specify mismatching passwords, then throw an error. - if (user.equals(conf.getUsername())) { - if (pair != null && !newPassword.equals(pair.getSecond())) - throw new IllegalStateException( - "Invalid AccumuloConnectionFactoryBean configuration--multiple pools are configured with different root passwords!"); - instances.put(cache.getInstance().getInstanceID(), new Pair<>(conf.getUsername(), newPassword)); - } - // match root's password on mock to the password on the actual Accumulo instance - security.changeLocalUserPassword(conf.getUsername(), newPassword); - } + factory.close(); + } catch (Exception e) { + throw new RuntimeException(e); } finally { - pool.returnObject(c); - } - } - - @PreDestroy - public void tearDown() { - for (Entry> entry : this.pools.entrySet()) { - for (Entry poolEntry : entry.getValue().entrySet()) { - try { - poolEntry.getValue().close(); - } catch (Exception e) { - log.error("Error closing Accumulo Connection Pool: " + e); - } - } + factory = null; } } - /** - * @param poolName - * the name of the pool to query - * @return name of the user used in the connection pools - */ - public String getConnectionUserName(String poolName) { - return connectionPoolsConfiguration.getPools().get(poolName).getUsername(); - } - /** * Gets a client from the pool with the assigned priority * @@ -209,12 +113,19 @@ public String getConnectionUserName(String poolName) { * * @param priority * the client's Priority + * @param trackingMap + * a tracking map * @return accumulo client * @throws Exception * if there are issues */ public AccumuloClient getClient(Priority priority, Map trackingMap) throws Exception { - return getClient(null, priority, trackingMap); + return getClient(getCurrentUserDN(), getCurrentProxyServers(), priority, trackingMap); + } + + @Override + public AccumuloClient getClient(String userDN, Collection proxyServers, Priority priority, Map trackingMap) throws Exception { + return factory.getClient(userDN, proxyServers, priority, trackingMap); } /** @@ -224,42 +135,20 @@ public AccumuloClient getClient(Priority priority, Map trackingMa * the name of the pool to retrieve the client from * @param priority * the priority of the client - * @param tm + * @param trackingMap * the tracking map * @return Accumulo client * @throws Exception * if there are issues */ - public AccumuloClient getClient(final String cpn, final Priority priority, final Map tm) throws Exception { - final Map trackingMap = (tm != null) ? tm : new HashMap<>(); - final String poolName = (cpn != null) ? cpn : defaultPoolName; - - if (!priority.equals(Priority.ADMIN)) { - final String userDN = getCurrentUserDN(); - if (userDN != null) - trackingMap.put("user.dn", userDN); + public AccumuloClient getClient(final String cpn, final Priority priority, final Map trackingMap) throws Exception { + return getClient(getCurrentUserDN(), getCurrentProxyServers(), cpn, priority, trackingMap); + } - final Collection proxyServers = getCurrentProxyServers(); - if (proxyServers != null) - trackingMap.put("proxyServers", StringUtils.join(proxyServers, " -> ")); - } - AccumuloClientPool pool = pools.get(poolName).get(priority); - AccumuloClient c = pool.borrowObject(trackingMap); - AccumuloClient mock = new InMemoryAccumuloClient(pool.getFactory().getUsername(), cache.getInstance()); - mock.securityOperations().changeLocalUserPassword(pool.getFactory().getUsername(), new PasswordToken(pool.getFactory().getPassword())); - WrappedAccumuloClient wrappedAccumuloClient = new WrappedAccumuloClient(c, mock); - if (connectionPoolsConfiguration.getClientConfiguration(poolName) != null) { - wrappedAccumuloClient.setClientConfig(connectionPoolsConfiguration.getClientConfiguration(poolName).getConfiguration()); - } - String classLoaderContext = System.getProperty("dw.accumulo.classLoader.context"); - if (classLoaderContext != null) { - wrappedAccumuloClient.setScannerClassLoaderContext(classLoaderContext); - } - String timeout = System.getProperty("dw.accumulo.scan.batch.timeout.seconds"); - if (timeout != null) { - wrappedAccumuloClient.setScanBatchTimeoutSeconds(Long.parseLong(timeout)); - } - return wrappedAccumuloClient; + @Override + public AccumuloClient getClient(String userDN, Collection proxyServers, String cpn, Priority priority, Map trackingMap) + throws Exception { + return factory.getClient(userDN, proxyServers, cpn, priority, trackingMap); } /** @@ -267,40 +156,20 @@ public AccumuloClient getClient(final String cpn, final Priority priority, final * * @param client * The client to return + * @throws Exception + * if there are issues */ @PermitAll // permit anyone to return a connection - public void returnClient(AccumuloClient client) { - if (client instanceof WrappedAccumuloClient) { - WrappedAccumuloClient wrappedAccumuloClient = (WrappedAccumuloClient) client; - wrappedAccumuloClient.clearScannerClassLoaderContext(); - client = wrappedAccumuloClient.getReal(); - } - for (Entry> entry : this.pools.entrySet()) { - for (Entry poolEntry : entry.getValue().entrySet()) { - if (poolEntry.getValue().connectorCameFromHere(client)) { - poolEntry.getValue().returnObject(client); - return; - } - } - } - log.info("returnConnection called with connection that did not come from any AccumuloConnectionPool"); + public void returnClient(AccumuloClient client) throws Exception { + factory.returnClient(client); } @PermitAll // permit anyone to get the report @JmxManaged public String report() { - StringBuilder buf = new StringBuilder(); - for (Entry> entry : this.pools.entrySet()) { - buf.append("**** ").append(entry.getKey()).append(" ****\n"); - buf.append("ADMIN: ").append(entry.getValue().get(Priority.ADMIN)).append("\n"); - buf.append("HIGH: ").append(entry.getValue().get(Priority.HIGH)).append("\n"); - buf.append("NORMAL: ").append(entry.getValue().get(Priority.NORMAL)).append("\n"); - buf.append("LOW: ").append(entry.getValue().get(Priority.LOW)).append("\n"); - } - - return buf.toString(); + return factory.report(); } /** @@ -317,109 +186,25 @@ public String report() { @RolesAllowed({"Administrator", "JBossAdministrator", "InternalUser"}) public ConnectionFactoryResponse getConnectionFactoryMetrics() { ConnectionFactoryResponse response = new ConnectionFactoryResponse(); - ArrayList connectionPools = new ArrayList<>(); - - Set exclude = new HashSet<>(); - exclude.add("connection.state.start"); - exclude.add("state"); - exclude.add("request.location"); - - SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss,SSS"); - - for (Entry> entry : this.pools.entrySet()) { - for (Entry entry2 : entry.getValue().entrySet()) { - String poolName = entry.getKey(); - Priority priority = entry2.getKey(); - AccumuloClientPool p = entry2.getValue(); - - long now = System.currentTimeMillis(); - MutableInt maxActive = new MutableInt(); - MutableInt numActive = new MutableInt(); - MutableInt maxIdle = new MutableInt(); - MutableInt numIdle = new MutableInt(); - MutableInt numWaiting = new MutableInt(); - // getConnectionPoolStats will collect the tracking maps and maxActive, numActive, maxIdle, numIdle while synchronized - // to ensure consistency between the GenericObjectPool and the tracking maps - List> requestingConnectionsMap = p.getConnectionPoolStats(maxActive, numActive, maxIdle, numIdle, numWaiting); - - ConnectionPool poolInfo = new ConnectionPool(); - poolInfo.setPriority(priority.name()); - poolInfo.setMaxActive(maxActive.toInteger()); - poolInfo.setNumActive(numActive.toInteger()); - poolInfo.setNumWaiting(numWaiting.toInteger()); - poolInfo.setMaxIdle(maxIdle.toInteger()); - poolInfo.setNumIdle(numIdle.toInteger()); - poolInfo.setPoolName(poolName); - - List requestingConnections = new ArrayList<>(); - for (Map m : requestingConnectionsMap) { - Connection c = new Connection(); - String state = m.get("state"); - if (state != null) { - c.setState(state); - } - String requestLocation = m.get("request.location"); - if (requestLocation != null) { - c.setRequestLocation(requestLocation); - } - String stateStart = m.get("connection.state.start"); - if (stateStart != null) { - long stateStartLong = Long.parseLong(stateStart); - c.setTimeInState((now - stateStartLong)); - Date stateStartDate = new Date(stateStartLong); - c.addProperty("connection.state.start", sdf.format(stateStartDate)); - } - for (Map.Entry e : m.entrySet()) { - if (!exclude.contains(e.getKey())) { - c.addProperty(e.getKey(), e.getValue()); - } - } - requestingConnections.add(c); - } - Collections.sort(requestingConnections); - poolInfo.setConnectionRequests(requestingConnections); - connectionPools.add(poolInfo); - } - } - response.setConnectionPools(connectionPools); + response.setConnectionPools(getConnectionPools()); return response; } + @Override + public List getConnectionPools() { + return factory.getConnectionPools(); + } + @PermitAll @JmxManaged public int getConnectionUsagePercent() { - double maxPercentage = 0.0; - for (Entry> entry : pools.entrySet()) { - for (Entry poolEntry : entry.getValue().entrySet()) { - // Don't include ADMIN priority connections when computing a usage percentage - if (Priority.ADMIN.equals(poolEntry.getKey())) - continue; - - MutableInt maxActive = new MutableInt(); - MutableInt numActive = new MutableInt(); - MutableInt numWaiting = new MutableInt(); - MutableInt unused = new MutableInt(); - poolEntry.getValue().getConnectionPoolStats(maxActive, numActive, unused, unused, numWaiting); - - double percentage = (numActive.doubleValue() + numWaiting.doubleValue()) / maxActive.doubleValue(); - if (percentage > maxPercentage) { - maxPercentage = percentage; - } - } - } - return (int) (maxPercentage * 100); + return factory.getConnectionUsagePercent(); } @Override @PermitAll public Map getTrackingMap(StackTraceElement[] stackTrace) { - HashMap trackingMap = new HashMap<>(); - if (stackTrace != null) { - StackTraceElement ste = stackTrace[1]; - trackingMap.put("request.location", ste.getClassName() + "." + ste.getMethodName() + ":" + ste.getLineNumber()); - } - - return trackingMap; + return factory.getTrackingMap(stackTrace); } public String getCurrentUserDN() { diff --git a/web-services/common/src/main/java/datawave/webservice/common/connection/config/ConnectionPoolClientConfiguration.java b/web-services/common/src/main/java/datawave/webservice/common/connection/config/ConnectionPoolClientConfiguration.java index c547bc23aea..7bbb066fe86 100644 --- a/web-services/common/src/main/java/datawave/webservice/common/connection/config/ConnectionPoolClientConfiguration.java +++ b/web-services/common/src/main/java/datawave/webservice/common/connection/config/ConnectionPoolClientConfiguration.java @@ -7,6 +7,7 @@ import org.apache.deltaspike.core.api.config.ConfigResolver; import org.apache.log4j.Logger; +import datawave.core.common.result.ConnectionPoolClientProperties; import datawave.webservice.common.connection.AccumuloClientConfiguration; /** @@ -15,10 +16,9 @@ * dw.{pool}.client.{tableName}.consistency = IMMEDIATE|EVENTUAL dw.{pool}.client.{tableName}.{hintName} = {hintValue} * */ -public class ConnectionPoolClientConfiguration { +public class ConnectionPoolClientConfiguration extends ConnectionPoolClientProperties { private static final Logger log = Logger.getLogger(ConnectionPoolConfiguration.class); - private AccumuloClientConfiguration config = new AccumuloClientConfiguration(); public ConnectionPoolClientConfiguration(String poolName) { String prefix = "dw." + poolName + ".client"; @@ -38,7 +38,4 @@ public ConnectionPoolClientConfiguration(String poolName) { } } - public AccumuloClientConfiguration getConfiguration() { - return config; - } } diff --git a/web-services/common/src/main/java/datawave/webservice/common/connection/config/ConnectionPoolConfiguration.java b/web-services/common/src/main/java/datawave/webservice/common/connection/config/ConnectionPoolConfiguration.java index a4823d00beb..d858119bacd 100644 --- a/web-services/common/src/main/java/datawave/webservice/common/connection/config/ConnectionPoolConfiguration.java +++ b/web-services/common/src/main/java/datawave/webservice/common/connection/config/ConnectionPoolConfiguration.java @@ -1,24 +1,11 @@ package datawave.webservice.common.connection.config; import org.apache.deltaspike.core.api.config.ConfigResolver; -import org.apache.log4j.Logger; -import datawave.webservice.common.logging.ThreadConfigurableLogger; -import datawave.webservice.util.EnvProvider; - -public class ConnectionPoolConfiguration { - - private static final Logger log = ThreadConfigurableLogger.getLogger(ConnectionPoolConfiguration.class); - - private String username; - private String password; - private String instance; - private String zookeepers; - private int lowPriorityPoolSize; - private int normalPriorityPoolSize; - private int highPriorityPoolSize; - private int adminPriorityPoolSize; +import datawave.core.common.result.ConnectionPoolProperties; +import datawave.core.common.util.EnvProvider; +public class ConnectionPoolConfiguration extends ConnectionPoolProperties { public ConnectionPoolConfiguration(String poolName) { username = ConfigResolver.getPropertyValue("dw." + poolName + ".accumulo.userName"); password = resolvePassword(poolName); @@ -41,37 +28,4 @@ protected String resolvePassword(String poolName) { String value = ConfigResolver.getPropertyValue("dw." + poolName + ".accumulo.password"); return EnvProvider.resolve(value); } - - public String getUsername() { - return username; - } - - public String getPassword() { - return password; - } - - public String getInstance() { - return instance; - } - - public String getZookeepers() { - return zookeepers; - } - - public int getLowPriorityPoolSize() { - return lowPriorityPoolSize; - } - - public int getNormalPriorityPoolSize() { - return normalPriorityPoolSize; - } - - public int getHighPriorityPoolSize() { - return highPriorityPoolSize; - } - - public int getAdminPriorityPoolSize() { - return adminPriorityPoolSize; - } - } diff --git a/web-services/common/src/main/java/datawave/webservice/common/connection/config/ConnectionPoolsConfiguration.java b/web-services/common/src/main/java/datawave/webservice/common/connection/config/ConnectionPoolsConfiguration.java index 6aa7f56ed1a..8ba055ac1a5 100644 --- a/web-services/common/src/main/java/datawave/webservice/common/connection/config/ConnectionPoolsConfiguration.java +++ b/web-services/common/src/main/java/datawave/webservice/common/connection/config/ConnectionPoolsConfiguration.java @@ -1,54 +1,29 @@ package datawave.webservice.common.connection.config; import java.util.Collections; -import java.util.HashMap; import java.util.List; import java.util.Map; -import javax.annotation.PostConstruct; -import javax.inject.Inject; +import datawave.core.common.result.ConnectionPoolsProperties; -import org.apache.deltaspike.core.api.config.ConfigProperty; - -public class ConnectionPoolsConfiguration { - - @Inject - @ConfigProperty(name = "dw.connectionPool.default", defaultValue = "WAREHOUSE") - private String defaultPool = null; - - @SuppressWarnings("MismatchedQueryAndUpdateOfCollection") - @Inject - @ConfigProperty(name = "dw.connectionPool.pools", defaultValue = "WAREHOUSE,METRICS") +public class ConnectionPoolsConfiguration extends ConnectionPoolsProperties { private List poolNames; - private Map pools = new HashMap<>(); - private Map configs = new HashMap<>(); - - @PostConstruct - private void initializePools() { + public ConnectionPoolsConfiguration build() { for (String poolName : poolNames) { pools.put(poolName, new ConnectionPoolConfiguration(poolName.toLowerCase())); configs.put(poolName, new ConnectionPoolClientConfiguration(poolName.toLowerCase())); } + return this; } - public String getDefaultPool() { - return defaultPool; - } - - public Map getPools() { - return Collections.unmodifiableMap(pools); - } - - public ConnectionPoolConfiguration getConfiguration(String pool) { - return pools.get(pool); - } - - public Map getClientConfiguration() { - return Collections.unmodifiableMap(configs); + public ConnectionPoolsConfiguration withPoolNames(List poolNames) { + this.poolNames = poolNames; + return this; } - public ConnectionPoolClientConfiguration getClientConfiguration(String pool) { - return configs.get(pool); + public ConnectionPoolsConfiguration withDefaultPool(String defaultPool) { + this.defaultPool = defaultPool; + return this; } } diff --git a/web-services/common/src/main/java/datawave/webservice/common/health/HealthBean.java b/web-services/common/src/main/java/datawave/webservice/common/health/HealthBean.java index e7d993fee68..d496e870179 100644 --- a/web-services/common/src/main/java/datawave/webservice/common/health/HealthBean.java +++ b/web-services/common/src/main/java/datawave/webservice/common/health/HealthBean.java @@ -46,7 +46,7 @@ import com.sun.management.OperatingSystemMXBean; import datawave.configuration.DatawaveEmbeddedProjectStageHolder; -import datawave.webservice.common.connection.AccumuloConnectionFactoryBean; +import datawave.core.common.connection.AccumuloConnectionFactory; import datawave.webservice.result.GenericResponse; @PermitAll @@ -66,7 +66,7 @@ public class HealthBean { private static String status = "ready"; @Inject - private AccumuloConnectionFactoryBean accumuloConnectionFactoryBean; + private AccumuloConnectionFactory accumuloConnectionFactoryBean; @Inject @ConfigProperty(name = "dw.health.connection.percent.limit", defaultValue = "200") diff --git a/web-services/deploy/application/pom.xml b/web-services/deploy/application/pom.xml index be044f4a418..2222cf375dd 100644 --- a/web-services/deploy/application/pom.xml +++ b/web-services/deploy/application/pom.xml @@ -4,11 +4,14 @@ gov.nsa.datawave.webservices datawave-ws-deploy-parent - 6.14.0-SNAPSHOT + 7.0.0-SNAPSHOT datawave-ws-deploy-application ear ${project.artifactId} + + true + commons-configuration @@ -33,6 +36,12 @@ gov.nsa.datawave datawave-ingest-configuration ${project.version} + + + gov.nsa.datawave.core + datawave-core-common-util + + gov.nsa.datawave @@ -43,12 +52,28 @@ gov.nsa.datawave datawave-query-core ${project.version} + + + gov.nsa.datawave.core + datawave-core-common-util + + + gov.nsa.datawave.core + datawave-core-connection-pool + + gov.nsa.datawave.webservices datawave-ws-accumulo ${project.version} ejb + + + gov.nsa.datawave.core + datawave-core-common-util + + gov.nsa.datawave.webservices @@ -82,6 +107,12 @@ datawave-ws-common ${project.version} ejb + + + gov.nsa.datawave.core + datawave-core-connection-pool + + gov.nsa.datawave.webservices @@ -106,6 +137,12 @@ ${project.version} ejb + + gov.nsa.datawave.webservices + datawave-ws-metrics + ${project.version} + ejb + gov.nsa.datawave.webservices datawave-ws-model @@ -117,12 +154,32 @@ datawave-ws-modification ${project.version} ejb + + + gov.nsa.datawave.core + datawave-core-common-util + + + gov.nsa.datawave.core + datawave-core-connection-pool + + gov.nsa.datawave.webservices datawave-ws-query ${project.version} ejb + + + gov.nsa.datawave.core + datawave-core-common-util + + + gov.nsa.datawave.core + datawave-core-connection-pool + + gov.nsa.datawave.webservices @@ -521,6 +578,14 @@ org.apache.hadoop hadoop-client-runtime + + org.apache.hadoop.thirdparty + hadoop-shaded-guava + + + org.apache.hadoop.thirdparty + hadoop-shaded-protobuf_3_7 + org.apache.hadoop.thirdparty hadoop-shaded-guava diff --git a/web-services/deploy/configuration/pom.xml b/web-services/deploy/configuration/pom.xml index cda81737a3f..dbb47b8f28d 100644 --- a/web-services/deploy/configuration/pom.xml +++ b/web-services/deploy/configuration/pom.xml @@ -4,10 +4,13 @@ gov.nsa.datawave.webservices datawave-ws-deploy-parent - 6.14.0-SNAPSHOT + 7.0.0-SNAPSHOT datawave-ws-deploy-configuration jar + + true + ${project.artifactId} diff --git a/web-services/deploy/configuration/src/main/resources/datawave/mapreduce/MapReduceJobs.xml b/web-services/deploy/configuration/src/main/resources/datawave/mapreduce/MapReduceJobs.xml index dc9315ad322..b3a04ec7bb6 100644 --- a/web-services/deploy/configuration/src/main/resources/datawave/mapreduce/MapReduceJobs.xml +++ b/web-services/deploy/configuration/src/main/resources/datawave/mapreduce/MapReduceJobs.xml @@ -48,7 +48,7 @@ - + diff --git a/web-services/deploy/configuration/src/main/resources/datawave/modification/ExampleModificationServices.xml b/web-services/deploy/configuration/src/main/resources/datawave/modification/ExampleModificationServices.xml index 762919576ea..0be73c2a3d6 100644 --- a/web-services/deploy/configuration/src/main/resources/datawave/modification/ExampleModificationServices.xml +++ b/web-services/deploy/configuration/src/main/resources/datawave/modification/ExampleModificationServices.xml @@ -19,7 +19,7 @@ --> - + @@ -51,4 +51,4 @@ - \ No newline at end of file + diff --git a/web-services/deploy/configuration/src/main/resources/datawave/modification/ModificationServices.xml b/web-services/deploy/configuration/src/main/resources/datawave/modification/ModificationServices.xml index 36fc620d46f..c574aa40e84 100644 --- a/web-services/deploy/configuration/src/main/resources/datawave/modification/ModificationServices.xml +++ b/web-services/deploy/configuration/src/main/resources/datawave/modification/ModificationServices.xml @@ -21,13 +21,13 @@ --> - + - + - + AuthorizedUser @@ -66,7 +66,7 @@ - + diff --git a/web-services/deploy/configuration/src/main/resources/datawave/query/EdgeQueryLogicFactory.xml b/web-services/deploy/configuration/src/main/resources/datawave/query/EdgeQueryLogicFactory.xml index ee701cd9289..09a505404dc 100644 --- a/web-services/deploy/configuration/src/main/resources/datawave/query/EdgeQueryLogicFactory.xml +++ b/web-services/deploy/configuration/src/main/resources/datawave/query/EdgeQueryLogicFactory.xml @@ -11,6 +11,7 @@ Provides a fall-back model in the event that the named query model 'modelName' isn't defined in DatawaveMetadata for whatever reason... --> + @@ -18,6 +19,7 @@ + diff --git a/web-services/deploy/configuration/src/main/resources/datawave/query/QueryExpiration.xml b/web-services/deploy/configuration/src/main/resources/datawave/query/QueryExpiration.xml index b7998745fd7..045e5d0480c 100644 --- a/web-services/deploy/configuration/src/main/resources/datawave/query/QueryExpiration.xml +++ b/web-services/deploy/configuration/src/main/resources/datawave/query/QueryExpiration.xml @@ -11,16 +11,16 @@ http://www.springframework.org/schema/util http://www.springframework.org/schema/util/spring-util-4.0.xsd"> - + - + - + - + - + diff --git a/web-services/deploy/configuration/src/main/resources/datawave/query/QueryLogicFactory.xml b/web-services/deploy/configuration/src/main/resources/datawave/query/QueryLogicFactory.xml index 48d9b40df13..5adce1647d0 100644 --- a/web-services/deploy/configuration/src/main/resources/datawave/query/QueryLogicFactory.xml +++ b/web-services/deploy/configuration/src/main/resources/datawave/query/QueryLogicFactory.xml @@ -11,8 +11,6 @@ http://www.springframework.org/schema/util http://www.springframework.org/schema/util/spring-util-4.0.xsd"> - - @@ -160,11 +158,11 @@ + - - + - + @@ -173,21 +171,21 @@ - + - + - + - - + + @@ -355,7 +353,7 @@ - + @@ -421,7 +419,7 @@ - + @@ -467,7 +465,7 @@ - + @@ -547,6 +545,7 @@ + @@ -565,6 +564,7 @@ + @@ -687,10 +687,6 @@ - - - - + + \ No newline at end of file diff --git a/web-services/metrics/src/test/java/datawave/query/map/SimpleQueryGeometryHandlerTest.java b/web-services/metrics/src/test/java/datawave/query/map/SimpleQueryGeometryHandlerTest.java new file mode 100644 index 00000000000..8d1e775bc0f --- /dev/null +++ b/web-services/metrics/src/test/java/datawave/query/map/SimpleQueryGeometryHandlerTest.java @@ -0,0 +1,231 @@ +package datawave.query.map; + +import static datawave.query.QueryParameters.QUERY_SYNTAX; + +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; + +import datawave.microservice.query.QueryImpl; +import datawave.microservice.querymetric.QueryMetric; +import datawave.webservice.query.exception.QueryExceptionType; +import datawave.webservice.query.map.QueryGeometry; +import datawave.webservice.query.map.QueryGeometryResponse; + +public class SimpleQueryGeometryHandlerTest { + + private SimpleQueryGeometryHandler handler; + + private String commonId; + + private Set luceneParams; + private Set jexlParams; + private Set emptyParams; + + @Before + public void setup() { + handler = new SimpleQueryGeometryHandler(); + + commonId = "super-special-query-id"; + + luceneParams = new HashSet<>(); + luceneParams.add(new QueryImpl.Parameter(QUERY_SYNTAX, "LUCENE")); + + jexlParams = new HashSet<>(); + jexlParams.add(new QueryImpl.Parameter(QUERY_SYNTAX, "JEXL")); + + emptyParams = new HashSet<>(); + } + + public QueryGeometryResponse generateResponse(String id, String query, Set params) { + List queryMetrics = new ArrayList<>(); + + QueryMetric qm = new QueryMetric(); + qm.setQueryId(id); + qm.setQuery(query); + qm.setParameters(params); + queryMetrics.add(qm); + + return handler.getQueryGeometryResponse(id, queryMetrics); + } + + @Test + public void validQueryJexlTest() { + QueryGeometryResponse resp = generateResponse(commonId, "geowave:contains(field1, 'POINT(0 0)')", jexlParams); + + Assert.assertEquals(1, resp.getResult().size()); + Assert.assertNull(resp.getExceptions()); + + QueryGeometry queryGeometry = resp.getResult().get(0); + Assert.assertEquals("{\"type\":\"Point\",\"coordinates\":[0.0,0.0]}", queryGeometry.getGeometry()); + Assert.assertEquals("geowave:contains(field1, 'POINT(0 0)')", queryGeometry.getFunction()); + } + + @Test + public void validGeoQueryJexlTest() { + QueryGeometryResponse resp = generateResponse(commonId, "geo:within_bounding_box(field1, '0_0', '10_10')", jexlParams); + + Assert.assertEquals(1, resp.getResult().size()); + Assert.assertNull(resp.getExceptions()); + + QueryGeometry queryGeometry = resp.getResult().get(0); + Assert.assertEquals("{\"type\":\"Polygon\",\"coordinates\":[[[0.0,0.0],[10,0.0],[10,10],[0.0,10],[0.0,0.0]]]}", queryGeometry.getGeometry()); + Assert.assertEquals("geo:within_bounding_box(field1, '0_0', '10_10')", queryGeometry.getFunction()); + } + + @Test + public void validQueryLuceneTest() { + QueryGeometryResponse resp = generateResponse(commonId, "#COVERS(field2, 'POINT(1 1)')", luceneParams); + + Assert.assertEquals(1, resp.getResult().size()); + Assert.assertNull(resp.getExceptions()); + + QueryGeometry queryGeometry = resp.getResult().get(0); + Assert.assertEquals("{\"type\":\"Point\",\"coordinates\":[1,1]}", queryGeometry.getGeometry()); + Assert.assertEquals("#COVERS(field2, 'POINT(1 1)')", queryGeometry.getFunction()); + } + + @Test + public void validGeoBoxQueryLuceneTest() { + QueryGeometryResponse resp = generateResponse(commonId, "#GEO(bounding_box, field1, '0_0', '10_10')", luceneParams); + + Assert.assertEquals(1, resp.getResult().size()); + Assert.assertNull(resp.getExceptions()); + + QueryGeometry queryGeometry = resp.getResult().get(0); + Assert.assertEquals("{\"type\":\"Polygon\",\"coordinates\":[[[0.0,0.0],[10,0.0],[10,10],[0.0,10],[0.0,0.0]]]}", queryGeometry.getGeometry()); + Assert.assertEquals("#GEO(bounding_box, field1, '0_0', '10_10')", queryGeometry.getFunction()); + } + + @Test + public void validGeoCircleQueryLuceneTest() { + QueryGeometryResponse resp = generateResponse(commonId, "#GEO(circle, field1, '0_0', 10)", luceneParams); + + Assert.assertEquals(1, resp.getResult().size()); + Assert.assertNull(resp.getExceptions()); + + QueryGeometry queryGeometry = resp.getResult().get(0); + Assert.assertEquals( + "{\"type\":\"Polygon\",\"coordinates\":[[[10,0.0],[9.9452,1.0453],[9.7815,2.0791],[9.5106,3.0902],[9.1355,4.0674],[8.6603,5],[8.0902,5.8779],[7.4314,6.6913],[6.6913,7.4314],[5.8779,8.0902],[5,8.6603],[4.0674,9.1355],[3.0902,9.5106],[2.0791,9.7815],[1.0453,9.9452],[6.0E-16,10],[-1.0453,9.9452],[-2.0791,9.7815],[-3.0902,9.5106],[-4.0674,9.1355],[-5,8.6603],[-5.8779,8.0902],[-6.6913,7.4314],[-7.4314,6.6913],[-8.0902,5.8779],[-8.6603,5],[-9.1355,4.0674],[-9.5106,3.0902],[-9.7815,2.0791],[-9.9452,1.0453],[-10,1.2E-15],[-9.9452,-1.0453],[-9.7815,-2.0791],[-9.5106,-3.0902],[-9.1355,-4.0674],[-8.6603,-5],[-8.0902,-5.8779],[-7.4314,-6.6913],[-6.6913,-7.4314],[-5.8779,-8.0902],[-5,-8.6603],[-4.0674,-9.1355],[-3.0902,-9.5106],[-2.0791,-9.7815],[-1.0453,-9.9452],[-1.8E-15,-10],[1.0453,-9.9452],[2.0791,-9.7815],[3.0902,-9.5106],[4.0674,-9.1355],[5,-8.6603],[5.8779,-8.0902],[6.6913,-7.4314],[7.4314,-6.6913],[8.0902,-5.8779],[8.6603,-5],[9.1355,-4.0674],[9.5106,-3.0902],[9.7815,-2.0791],[9.9452,-1.0453],[10,0.0]]]}", + queryGeometry.getGeometry()); + Assert.assertEquals("#GEO(circle, field1, '0_0', 10)", queryGeometry.getFunction()); + } + + @Test + public void validJexlQueryUndefinedSyntaxTest() { + QueryGeometryResponse resp = generateResponse(commonId, "geowave:covered_by(field3, 'POINT(2 2)')", emptyParams); + + Assert.assertEquals(1, resp.getResult().size()); + Assert.assertNull(resp.getExceptions()); + + QueryGeometry queryGeometry = resp.getResult().get(0); + Assert.assertEquals("{\"type\":\"Point\",\"coordinates\":[2,2]}", queryGeometry.getGeometry()); + Assert.assertEquals("geowave:covered_by(field3, 'POINT(2 2)')", queryGeometry.getFunction()); + } + + @Test + public void validLuceneQueryUndefinedSyntaxTest() { + QueryGeometryResponse resp = generateResponse(commonId, "#CROSSES(field4, 'POINT(3 3)')", emptyParams); + + Assert.assertEquals(0, resp.getResult().size()); + Assert.assertEquals(1, resp.getExceptions().size()); + + QueryExceptionType queryExceptionType = resp.getExceptions().get(0); + Assert.assertEquals("Unable to parse the geo features", queryExceptionType.getMessage()); + } + + @Test + public void validMultiFunctionQueryJexlTest() { + QueryGeometryResponse resp = generateResponse(commonId, "geowave:intersects(field5, 'POINT(4 4)') || geowave:overlaps(field6, 'POINT(5 5)')", + jexlParams); + + Assert.assertEquals(2, resp.getResult().size()); + Assert.assertNull(resp.getExceptions()); + + QueryGeometry queryGeometry = resp.getResult().get(0); + Assert.assertEquals("{\"type\":\"Point\",\"coordinates\":[4,4]}", queryGeometry.getGeometry()); + Assert.assertEquals("geowave:intersects(field5, 'POINT(4 4)')", queryGeometry.getFunction()); + + queryGeometry = resp.getResult().get(1); + Assert.assertEquals("{\"type\":\"Point\",\"coordinates\":[5,5]}", queryGeometry.getGeometry()); + Assert.assertEquals("geowave:overlaps(field6, 'POINT(5 5)')", queryGeometry.getFunction()); + } + + @Test + public void validMultiFunctionQueryLuceneTest() { + QueryGeometryResponse resp = generateResponse(commonId, "#INTERSECTS(field7, 'POINT(6 6)') || #WITHIN(field8, 'POINT(7 7)')", luceneParams); + + Assert.assertEquals(2, resp.getResult().size()); + Assert.assertNull(resp.getExceptions()); + + QueryGeometry queryGeometry = resp.getResult().get(0); + Assert.assertEquals("{\"type\":\"Point\",\"coordinates\":[6,6]}", queryGeometry.getGeometry()); + Assert.assertEquals("#INTERSECTS(field7, 'POINT(6 6)')", queryGeometry.getFunction()); + + queryGeometry = resp.getResult().get(1); + Assert.assertEquals("{\"type\":\"Point\",\"coordinates\":[7,7]}", queryGeometry.getGeometry()); + Assert.assertEquals("#WITHIN(field8, 'POINT(7 7)')", queryGeometry.getFunction()); + } + + @Test + public void validNonGeoQueryLuceneTest() { + QueryGeometryResponse resp = generateResponse(commonId, "field9: 'term'", luceneParams); + + Assert.assertEquals(0, resp.getResult().size()); + Assert.assertNull(resp.getExceptions()); + } + + @Test + public void invalidQueryJexlTest() { + QueryGeometryResponse resp = generateResponse(commonId, "geowave:intersects(field11, 3000)", jexlParams); + + Assert.assertEquals(0, resp.getResult().size()); + Assert.assertNull(resp.getExceptions()); + } + + @Test + public void invalidQueryLuceneTest() { + QueryGeometryResponse resp = generateResponse(commonId, "#INTERSECTS(field12, 5000)", luceneParams); + + Assert.assertEquals(0, resp.getResult().size()); + Assert.assertNull(resp.getExceptions()); + } + + @Test + public void multipleQueryMetricsTest() { + List queryMetrics = new ArrayList<>(); + + // Valid query, Lucene syntax QueryMetric + QueryMetric qm = new QueryMetric(); + qm.setQueryId(commonId); + qm.setQuery("#COVERS(field1, 'POINT(1 1)')"); + qm.setParameters(luceneParams); + queryMetrics.add(qm); + + // Valid query, unique query id, Jexl syntax QueryMetric + qm = new QueryMetric(); + qm.setQueryId("special-snowflake-id"); + qm.setQuery("geowave:intersects(field2, 'POINT(2 2)')"); + qm.setParameters(jexlParams); + queryMetrics.add(qm); + + QueryGeometryResponse resp = handler.getQueryGeometryResponse(commonId, queryMetrics); + + Assert.assertEquals(2, resp.getResult().size()); + + QueryGeometry queryGeometry = resp.getResult().get(0); + Assert.assertEquals("{\"type\":\"Point\",\"coordinates\":[1,1]}", queryGeometry.getGeometry()); + Assert.assertEquals("#COVERS(field1, 'POINT(1 1)')", queryGeometry.getFunction()); + + queryGeometry = resp.getResult().get(1); + Assert.assertEquals("{\"type\":\"Point\",\"coordinates\":[2,2]}", queryGeometry.getGeometry()); + Assert.assertEquals("geowave:intersects(field2, 'POINT(2 2)')", queryGeometry.getFunction()); + + System.out.println("done!"); + } +} diff --git a/web-services/metrics/src/test/resources/log4j.properties b/web-services/metrics/src/test/resources/log4j.properties new file mode 100644 index 00000000000..6646cecab8b --- /dev/null +++ b/web-services/metrics/src/test/resources/log4j.properties @@ -0,0 +1,31 @@ +log4j.rootLogger=DEBUG, R +log4j.appender.R=org.apache.log4j.ConsoleAppender +log4j.appender.R.layout=org.apache.log4j.PatternLayout +log4j.appender.R.layout.ConversionPattern=%d %p %C:%L %t %m%n +log4j.appender.R.encoding=UTF-8 + +#log4j.logger.org.apache.commons.jci=OFF +#log4j.logger.datawave.query.iterators.bb.BoundingBoxIterator=OFF +#log4j.logger.datawave.query.iterators.bb.SelectorSearchingIterator=OFF +log4j.logger.datawave=info +log4j.logger.datawave.query=info +log4j.logger.datawave.query.*=info + +log4j.logger.datawave.ingest.data.normalizer=FATAL +log4j.logger.org.apache.commons.jexl2.JexlEngine=ERROR +log4j.logger.org.springframework=WARN +log4j.logger.org.apache.hadoop=WARN + +log4j.logger.datawave.ingest=WARN +log4j.logger.datawave.query.testframework=INFO +# enable dump of tables by setting value to debug +log4j.logger.datawave.helpers.PrintUtility=info + +# set DefaultQueryPlanner to debug for analysis of query plan +log4j.logger.datawave.query.planner.DefaultQueryPlanner=info +log4j.logger.datawave.query.tables.ShardQueryLogic=info +log4j.logger.org.apache.commons.beanutils=INFO +log4j.logger.org.apache.accumulo=INFO + +#log4j.logger.datawave.query.iterator.facets=DEBUG +#log4j.logger.datawave.query.tables.facets=DEBUG diff --git a/web-services/model/pom.xml b/web-services/model/pom.xml index 2778c520262..57afbcd8ed9 100644 --- a/web-services/model/pom.xml +++ b/web-services/model/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 6.14.0-SNAPSHOT + 7.0.0-SNAPSHOT datawave-ws-model ejb @@ -88,6 +88,11 @@ commons-configuration2 provided + + org.apache.hadoop.thirdparty + hadoop-shaded-guava + provided + org.jboss.logging jboss-logging diff --git a/web-services/model/src/main/java/datawave/webservice/query/model/ModelBean.java b/web-services/model/src/main/java/datawave/webservice/query/model/ModelBean.java index c597d0e79fd..94d6da61b78 100644 --- a/web-services/model/src/main/java/datawave/webservice/query/model/ModelBean.java +++ b/web-services/model/src/main/java/datawave/webservice/query/model/ModelBean.java @@ -3,6 +3,7 @@ import java.security.Principal; import java.util.Collection; import java.util.HashSet; +import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; @@ -50,14 +51,14 @@ import com.google.common.collect.Sets; import datawave.annotation.Required; +import datawave.core.common.cache.AccumuloTableCache; +import datawave.core.common.connection.AccumuloConnectionFactory; import datawave.interceptor.RequiredInterceptor; import datawave.interceptor.ResponseInterceptor; import datawave.query.model.FieldMapping; import datawave.query.model.ModelKeyParser; import datawave.security.authorization.DatawavePrincipal; import datawave.security.util.ScannerHelper; -import datawave.webservice.common.cache.AccumuloTableCache; -import datawave.webservice.common.connection.AccumuloConnectionFactory; import datawave.webservice.common.exception.DatawaveWebApplicationException; import datawave.webservice.common.exception.NotFoundException; import datawave.webservice.common.exception.PreConditionFailedException; @@ -151,7 +152,7 @@ public ModelList listModelNames(@QueryParam("modelTableName") String modelTableN HashSet modelNames = new HashSet<>(); try { Map trackingMap = connectionFactory.getTrackingMap(Thread.currentThread().getStackTrace()); - client = connectionFactory.getClient(AccumuloConnectionFactory.Priority.LOW, trackingMap); + client = connectionFactory.getClient(getCurrentUserDN(), getCurrentProxyServers(), AccumuloConnectionFactory.Priority.LOW, trackingMap); try (Scanner scanner = ScannerHelper.createScanner(client, this.checkModelTableName(modelTableName), cbAuths)) { for (Entry entry : scanner) { String colf = entry.getKey().getColumnFamily().toString(); @@ -354,7 +355,7 @@ public datawave.webservice.model.Model getModel(@Required("name") @PathParam("na AccumuloClient client = null; try { Map trackingMap = connectionFactory.getTrackingMap(Thread.currentThread().getStackTrace()); - client = connectionFactory.getClient(AccumuloConnectionFactory.Priority.LOW, trackingMap); + client = connectionFactory.getClient(getCurrentUserDN(), getCurrentProxyServers(), AccumuloConnectionFactory.Priority.LOW, trackingMap); try (Scanner scanner = ScannerHelper.createScanner(client, this.checkModelTableName(modelTableName), cbAuths)) { IteratorSetting cfg = new IteratorSetting(21, "colfRegex", RegExFilter.class.getName()); cfg.addOption(RegExFilter.COLF_REGEX, "^" + name + "(\\x00.*)?"); @@ -422,7 +423,7 @@ public VoidResponse insertMapping(datawave.webservice.model.Model model, @QueryP String tableName = this.checkModelTableName(modelTableName); try { Map trackingMap = connectionFactory.getTrackingMap(Thread.currentThread().getStackTrace()); - client = connectionFactory.getClient(AccumuloConnectionFactory.Priority.LOW, trackingMap); + client = connectionFactory.getClient(getCurrentUserDN(), getCurrentProxyServers(), AccumuloConnectionFactory.Priority.LOW, trackingMap); writer = client.createBatchWriter(tableName, new BatchWriterConfig().setMaxLatency(BATCH_WRITER_MAX_LATENCY, TimeUnit.MILLISECONDS) .setMaxMemory(BATCH_WRITER_MAX_MEMORY).setMaxWriteThreads(BATCH_WRITER_MAX_THREADS)); for (FieldMapping mapping : model.getFields()) { @@ -453,7 +454,7 @@ public VoidResponse insertMapping(datawave.webservice.model.Model model, @QueryP } } } - cache.reloadCache(tableName); + cache.reloadTableCache(tableName); return response; } @@ -495,7 +496,7 @@ private VoidResponse deleteMapping(datawave.webservice.model.Model model, String String tableName = this.checkModelTableName(modelTableName); try { Map trackingMap = connectionFactory.getTrackingMap(Thread.currentThread().getStackTrace()); - client = connectionFactory.getClient(AccumuloConnectionFactory.Priority.LOW, trackingMap); + client = connectionFactory.getClient(getCurrentUserDN(), getCurrentProxyServers(), AccumuloConnectionFactory.Priority.LOW, trackingMap); writer = client.createBatchWriter(tableName, new BatchWriterConfig().setMaxLatency(BATCH_WRITER_MAX_LATENCY, TimeUnit.MILLISECONDS) .setMaxMemory(BATCH_WRITER_MAX_MEMORY).setMaxWriteThreads(BATCH_WRITER_MAX_THREADS)); for (FieldMapping mapping : model.getFields()) { @@ -527,7 +528,7 @@ private VoidResponse deleteMapping(datawave.webservice.model.Model model, String } } if (reloadCache) - cache.reloadCache(tableName); + cache.reloadTableCache(tableName); return response; } @@ -543,4 +544,28 @@ private String checkModelTableName(String tableName) { else return tableName; } + + public String getCurrentUserDN() { + + String currentUserDN = null; + Principal p = ctx.getCallerPrincipal(); + + if (p != null && p instanceof DatawavePrincipal) { + currentUserDN = ((DatawavePrincipal) p).getUserDN().subjectDN(); + } + + return currentUserDN; + } + + public Collection getCurrentProxyServers() { + List currentProxyServers = null; + Principal p = ctx.getCallerPrincipal(); + + if (p != null && p instanceof DatawavePrincipal) { + currentProxyServers = ((DatawavePrincipal) p).getProxyServers(); + } + + return currentProxyServers; + } + } diff --git a/web-services/model/src/test/java/datawave/webservice/query/model/ModelBeanTest.java b/web-services/model/src/test/java/datawave/webservice/query/model/ModelBeanTest.java index 721c4ba0f62..b9aed896f46 100644 --- a/web-services/model/src/test/java/datawave/webservice/query/model/ModelBeanTest.java +++ b/web-services/model/src/test/java/datawave/webservice/query/model/ModelBeanTest.java @@ -37,6 +37,8 @@ import datawave.accumulo.inmemory.InMemoryAccumuloClient; import datawave.accumulo.inmemory.InMemoryInstance; +import datawave.core.common.cache.AccumuloTableCache; +import datawave.core.common.connection.AccumuloConnectionFactory; import datawave.query.model.ModelKeyParser; import datawave.security.authorization.DatawavePrincipal; import datawave.security.authorization.DatawaveUser; @@ -44,8 +46,6 @@ import datawave.security.authorization.SubjectIssuerDNPair; import datawave.security.util.DnUtils; import datawave.security.util.ScannerHelper; -import datawave.webservice.common.cache.AccumuloTableCache; -import datawave.webservice.common.connection.AccumuloConnectionFactory; import datawave.webservice.common.exception.DatawaveWebApplicationException; import datawave.webservice.model.ModelList; @@ -134,9 +134,10 @@ public void tearDown() { public void testModelImportNoTable() throws Exception { HashMap trackingMap = new HashMap<>(); EasyMock.expect(connectionFactory.getTrackingMap((StackTraceElement[]) EasyMock.anyObject())).andReturn(trackingMap); - EasyMock.expect(connectionFactory.getClient(EasyMock.eq(AccumuloConnectionFactory.Priority.LOW), EasyMock.eq(trackingMap))).andReturn(client); + EasyMock.expect(connectionFactory.getClient(EasyMock.eq(userDN.toLowerCase()), EasyMock.eq(null), EasyMock.eq(AccumuloConnectionFactory.Priority.LOW), + EasyMock.eq(trackingMap))).andReturn(client); connectionFactory.returnClient(client); - EasyMock.expect(ctx.getCallerPrincipal()).andReturn(principal); + EasyMock.expect(ctx.getCallerPrincipal()).andReturn(principal).anyTimes(); PowerMock.replayAll(); bean.importModel(MODEL_ONE, (String) null); @@ -148,16 +149,18 @@ private void importModels() throws Exception { HashMap trackingMap = new HashMap<>(); EasyMock.expect(connectionFactory.getTrackingMap((StackTraceElement[]) EasyMock.anyObject())).andReturn(trackingMap); - EasyMock.expect(connectionFactory.getClient(EasyMock.eq(AccumuloConnectionFactory.Priority.LOW), EasyMock.eq(trackingMap))).andReturn(client); + EasyMock.expect(connectionFactory.getClient(EasyMock.eq(userDN.toLowerCase()), EasyMock.eq(null), EasyMock.eq(AccumuloConnectionFactory.Priority.LOW), + EasyMock.eq(trackingMap))).andReturn(client); connectionFactory.returnClient(client); - EasyMock.expect(ctx.getCallerPrincipal()).andReturn(principal); + EasyMock.expect(ctx.getCallerPrincipal()).andReturn(principal).anyTimes(); EasyMock.expect(connectionFactory.getTrackingMap((StackTraceElement[]) EasyMock.anyObject())).andReturn(trackingMap); - EasyMock.expect(connectionFactory.getClient(EasyMock.eq(AccumuloConnectionFactory.Priority.LOW), EasyMock.eq(trackingMap))).andReturn(client); + EasyMock.expect(connectionFactory.getClient(EasyMock.eq(userDN.toLowerCase()), EasyMock.eq(null), EasyMock.eq(AccumuloConnectionFactory.Priority.LOW), + EasyMock.eq(trackingMap))).andReturn(client); EasyMock.expect(System.currentTimeMillis()).andReturn(TIMESTAMP); connectionFactory.returnClient(client); EasyMock.expect(System.currentTimeMillis()).andReturn(TIMESTAMP); EasyMock.expect(System.currentTimeMillis()).andReturn(TIMESTAMP); - EasyMock.expect(cache.reloadCache(ModelBean.DEFAULT_MODEL_TABLE_NAME)).andReturn(null); + cache.reloadTableCache(ModelBean.DEFAULT_MODEL_TABLE_NAME); PowerMock.replayAll(); bean.importModel(MODEL_ONE, (String) null); @@ -165,17 +168,19 @@ private void importModels() throws Exception { PowerMock.resetAll(); EasyMock.expect(connectionFactory.getTrackingMap((StackTraceElement[]) EasyMock.anyObject())).andReturn(trackingMap); - EasyMock.expect(connectionFactory.getClient(EasyMock.eq(AccumuloConnectionFactory.Priority.LOW), EasyMock.eq(trackingMap))).andReturn(client); + EasyMock.expect(connectionFactory.getClient(EasyMock.eq(userDN.toLowerCase()), EasyMock.eq(null), EasyMock.eq(AccumuloConnectionFactory.Priority.LOW), + EasyMock.eq(trackingMap))).andReturn(client); connectionFactory.returnClient(client); - EasyMock.expect(ctx.getCallerPrincipal()).andReturn(principal); + EasyMock.expect(ctx.getCallerPrincipal()).andReturn(principal).anyTimes(); EasyMock.expect(connectionFactory.getTrackingMap((StackTraceElement[]) EasyMock.anyObject())).andReturn(trackingMap); - EasyMock.expect(connectionFactory.getClient(EasyMock.eq(AccumuloConnectionFactory.Priority.LOW), EasyMock.eq(trackingMap))).andReturn(client); + EasyMock.expect(connectionFactory.getClient(EasyMock.eq(userDN.toLowerCase()), EasyMock.eq(null), EasyMock.eq(AccumuloConnectionFactory.Priority.LOW), + EasyMock.eq(trackingMap))).andReturn(client); EasyMock.expect(System.currentTimeMillis()).andReturn(TIMESTAMP); connectionFactory.returnClient(client); EasyMock.expect(System.currentTimeMillis()).andReturn(TIMESTAMP); EasyMock.expect(System.currentTimeMillis()).andReturn(TIMESTAMP); EasyMock.expect(System.currentTimeMillis()).andReturn(TIMESTAMP); - EasyMock.expect(cache.reloadCache(ModelBean.DEFAULT_MODEL_TABLE_NAME)).andReturn(null); + cache.reloadTableCache(ModelBean.DEFAULT_MODEL_TABLE_NAME); PowerMock.replayAll(); bean.importModel(MODEL_TWO, (String) null); @@ -188,10 +193,11 @@ public void testListModels() throws Exception { importModels(); PowerMock.resetAll(); - EasyMock.expect(ctx.getCallerPrincipal()).andReturn(principal); + EasyMock.expect(ctx.getCallerPrincipal()).andReturn(principal).anyTimes(); HashMap trackingMap = new HashMap<>(); EasyMock.expect(connectionFactory.getTrackingMap((StackTraceElement[]) EasyMock.anyObject())).andReturn(trackingMap); - EasyMock.expect(connectionFactory.getClient(EasyMock.eq(AccumuloConnectionFactory.Priority.LOW), EasyMock.eq(trackingMap))).andReturn(client); + EasyMock.expect(connectionFactory.getClient(EasyMock.eq(userDN.toLowerCase()), EasyMock.eq(null), EasyMock.eq(AccumuloConnectionFactory.Priority.LOW), + EasyMock.eq(trackingMap))).andReturn(client); connectionFactory.returnClient(client); PowerMock.replayAll(); @@ -208,10 +214,11 @@ public void testModelGet() throws Exception { importModels(); PowerMock.resetAll(); - EasyMock.expect(ctx.getCallerPrincipal()).andReturn(principal); + EasyMock.expect(ctx.getCallerPrincipal()).andReturn(principal).anyTimes(); HashMap trackingMap = new HashMap<>(); EasyMock.expect(connectionFactory.getTrackingMap((StackTraceElement[]) EasyMock.anyObject())).andReturn(trackingMap); - EasyMock.expect(connectionFactory.getClient(EasyMock.eq(AccumuloConnectionFactory.Priority.LOW), EasyMock.eq(trackingMap))).andReturn(client); + EasyMock.expect(connectionFactory.getClient(EasyMock.eq(userDN.toLowerCase()), EasyMock.eq(null), EasyMock.eq(AccumuloConnectionFactory.Priority.LOW), + EasyMock.eq(trackingMap))).andReturn(client); connectionFactory.returnClient(client); PowerMock.replayAll(); @@ -226,22 +233,24 @@ public void testModelDelete() throws Exception { importModels(); PowerMock.resetAll(); - EasyMock.expect(ctx.getCallerPrincipal()).andReturn(principal); + EasyMock.expect(ctx.getCallerPrincipal()).andReturn(principal).anyTimes(); HashMap trackingMap = new HashMap<>(); EasyMock.expect(connectionFactory.getTrackingMap((StackTraceElement[]) EasyMock.anyObject())).andReturn(trackingMap); - EasyMock.expect(connectionFactory.getClient(EasyMock.eq(AccumuloConnectionFactory.Priority.LOW), EasyMock.eq(trackingMap))).andReturn(client); + EasyMock.expect(connectionFactory.getClient(EasyMock.eq(userDN.toLowerCase()), EasyMock.eq(null), EasyMock.eq(AccumuloConnectionFactory.Priority.LOW), + EasyMock.eq(trackingMap))).andReturn(client); connectionFactory.returnClient(client); EasyMock.expect(connectionFactory.getTrackingMap((StackTraceElement[]) EasyMock.anyObject())).andReturn(trackingMap); - EasyMock.expect(connectionFactory.getClient(EasyMock.eq(AccumuloConnectionFactory.Priority.LOW), EasyMock.eq(trackingMap))).andReturn(client); + EasyMock.expect(connectionFactory.getClient(EasyMock.eq(userDN.toLowerCase()), EasyMock.eq(null), EasyMock.eq(AccumuloConnectionFactory.Priority.LOW), + EasyMock.eq(trackingMap))).andReturn(client); connectionFactory.returnClient(client); - EasyMock.expect(ctx.getCallerPrincipal()).andReturn(principal); EasyMock.expect(connectionFactory.getTrackingMap((StackTraceElement[]) EasyMock.anyObject())).andReturn(trackingMap); - EasyMock.expect(connectionFactory.getClient(EasyMock.eq(AccumuloConnectionFactory.Priority.LOW), EasyMock.eq(trackingMap))).andReturn(client); + EasyMock.expect(connectionFactory.getClient(EasyMock.eq(userDN.toLowerCase()), EasyMock.eq(null), EasyMock.eq(AccumuloConnectionFactory.Priority.LOW), + EasyMock.eq(trackingMap))).andReturn(client); EasyMock.expect(System.currentTimeMillis()).andReturn(TIMESTAMP); EasyMock.expect(System.currentTimeMillis()).andReturn(TIMESTAMP); EasyMock.expect(System.currentTimeMillis()).andReturn(TIMESTAMP); connectionFactory.returnClient(client); - EasyMock.expect(cache.reloadCache(ModelBean.DEFAULT_MODEL_TABLE_NAME)).andReturn(null); + cache.reloadTableCache(ModelBean.DEFAULT_MODEL_TABLE_NAME); EasyMock.expect(System.currentTimeMillis()).andReturn(TIMESTAMP); EasyMock.expect(System.currentTimeMillis()).andReturn(TIMESTAMP); EasyMock.expect(System.currentTimeMillis()).andReturn(TIMESTAMP); @@ -251,9 +260,10 @@ public void testModelDelete() throws Exception { PowerMock.verifyAll(); PowerMock.resetAll(); - EasyMock.expect(ctx.getCallerPrincipal()).andReturn(principal); + EasyMock.expect(ctx.getCallerPrincipal()).andReturn(principal).anyTimes(); EasyMock.expect(connectionFactory.getTrackingMap((StackTraceElement[]) EasyMock.anyObject())).andReturn(trackingMap); - EasyMock.expect(connectionFactory.getClient(EasyMock.eq(AccumuloConnectionFactory.Priority.LOW), EasyMock.eq(trackingMap))).andReturn(client); + EasyMock.expect(connectionFactory.getClient(EasyMock.eq(userDN.toLowerCase()), EasyMock.eq(null), EasyMock.eq(AccumuloConnectionFactory.Priority.LOW), + EasyMock.eq(trackingMap))).andReturn(client); connectionFactory.returnClient(client); PowerMock.replayAll(); try { @@ -271,9 +281,10 @@ public void testModelDelete() throws Exception { PowerMock.verifyAll(); PowerMock.resetAll(); // Ensure model one still intact - EasyMock.expect(ctx.getCallerPrincipal()).andReturn(principal); + EasyMock.expect(ctx.getCallerPrincipal()).andReturn(principal).anyTimes(); EasyMock.expect(connectionFactory.getTrackingMap((StackTraceElement[]) EasyMock.anyObject())).andReturn(trackingMap); - EasyMock.expect(connectionFactory.getClient(EasyMock.eq(AccumuloConnectionFactory.Priority.LOW), EasyMock.eq(trackingMap))).andReturn(client); + EasyMock.expect(connectionFactory.getClient(EasyMock.eq(userDN.toLowerCase()), EasyMock.eq(null), EasyMock.eq(AccumuloConnectionFactory.Priority.LOW), + EasyMock.eq(trackingMap))).andReturn(client); connectionFactory.returnClient(client); PowerMock.replayAll(); datawave.webservice.model.Model model1 = bean.getModel(MODEL_ONE.getName(), (String) null); @@ -284,10 +295,11 @@ public void testModelDelete() throws Exception { @Test(expected = DatawaveWebApplicationException.class) public void testModelGetInvalidModelName() throws Exception { - EasyMock.expect(ctx.getCallerPrincipal()).andReturn(principal); + EasyMock.expect(ctx.getCallerPrincipal()).andReturn(principal).anyTimes(); HashMap trackingMap = new HashMap<>(); EasyMock.expect(connectionFactory.getTrackingMap((StackTraceElement[]) EasyMock.anyObject())).andReturn(trackingMap); - EasyMock.expect(connectionFactory.getClient(EasyMock.eq(AccumuloConnectionFactory.Priority.LOW), EasyMock.eq(trackingMap))).andReturn(client); + EasyMock.expect(connectionFactory.getClient(EasyMock.eq(userDN.toLowerCase()), EasyMock.eq(null), EasyMock.eq(AccumuloConnectionFactory.Priority.LOW), + EasyMock.eq(trackingMap))).andReturn(client); connectionFactory.returnClient(client); PowerMock.replayAll(); @@ -300,18 +312,20 @@ public void testCloneModel() throws Exception { importModels(); PowerMock.resetAll(); - EasyMock.expect(ctx.getCallerPrincipal()).andReturn(principal); + EasyMock.expect(ctx.getCallerPrincipal()).andReturn(principal).anyTimes(); HashMap trackingMap = new HashMap<>(); EasyMock.expect(connectionFactory.getTrackingMap((StackTraceElement[]) EasyMock.anyObject())).andReturn(trackingMap); - EasyMock.expect(connectionFactory.getClient(EasyMock.eq(AccumuloConnectionFactory.Priority.LOW), EasyMock.eq(trackingMap))).andReturn(client); + EasyMock.expect(connectionFactory.getClient(EasyMock.eq(userDN.toLowerCase()), EasyMock.eq(null), EasyMock.eq(AccumuloConnectionFactory.Priority.LOW), + EasyMock.eq(trackingMap))).andReturn(client); connectionFactory.returnClient(client); EasyMock.expect(connectionFactory.getTrackingMap((StackTraceElement[]) EasyMock.anyObject())).andReturn(trackingMap); - EasyMock.expect(connectionFactory.getClient(EasyMock.eq(AccumuloConnectionFactory.Priority.LOW), EasyMock.eq(trackingMap))).andReturn(client); + EasyMock.expect(connectionFactory.getClient(EasyMock.eq(userDN.toLowerCase()), EasyMock.eq(null), EasyMock.eq(AccumuloConnectionFactory.Priority.LOW), + EasyMock.eq(trackingMap))).andReturn(client); connectionFactory.returnClient(client); - EasyMock.expect(ctx.getCallerPrincipal()).andReturn(principal); EasyMock.expect(connectionFactory.getTrackingMap((StackTraceElement[]) EasyMock.anyObject())).andReturn(trackingMap); - EasyMock.expect(connectionFactory.getClient(EasyMock.eq(AccumuloConnectionFactory.Priority.LOW), EasyMock.eq(trackingMap))).andReturn(client); - EasyMock.expect(cache.reloadCache(ModelBean.DEFAULT_MODEL_TABLE_NAME)).andReturn(null); + EasyMock.expect(connectionFactory.getClient(EasyMock.eq(userDN.toLowerCase()), EasyMock.eq(null), EasyMock.eq(AccumuloConnectionFactory.Priority.LOW), + EasyMock.eq(trackingMap))).andReturn(client); + cache.reloadTableCache(ModelBean.DEFAULT_MODEL_TABLE_NAME); EasyMock.expect(System.currentTimeMillis()).andReturn(TIMESTAMP); connectionFactory.returnClient(client); EasyMock.expect(System.currentTimeMillis()).andReturn(TIMESTAMP); @@ -321,9 +335,10 @@ public void testCloneModel() throws Exception { bean.cloneModel(MODEL_ONE.getName(), "MODEL2", (String) null); PowerMock.verifyAll(); PowerMock.resetAll(); - EasyMock.expect(ctx.getCallerPrincipal()).andReturn(principal); + EasyMock.expect(ctx.getCallerPrincipal()).andReturn(principal).anyTimes(); EasyMock.expect(connectionFactory.getTrackingMap((StackTraceElement[]) EasyMock.anyObject())).andReturn(trackingMap); - EasyMock.expect(connectionFactory.getClient(EasyMock.eq(AccumuloConnectionFactory.Priority.LOW), EasyMock.eq(trackingMap))).andReturn(client); + EasyMock.expect(connectionFactory.getClient(EasyMock.eq(userDN.toLowerCase()), EasyMock.eq(null), EasyMock.eq(AccumuloConnectionFactory.Priority.LOW), + EasyMock.eq(trackingMap))).andReturn(client); connectionFactory.returnClient(client); PowerMock.replayAll(); diff --git a/web-services/modification/pom.xml b/web-services/modification/pom.xml index 9c67b78f277..edaeb5309cf 100644 --- a/web-services/modification/pom.xml +++ b/web-services/modification/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 6.14.0-SNAPSHOT + 7.0.0-SNAPSHOT datawave-ws-modification ejb @@ -19,6 +19,11 @@ datawave-query-core ${project.version} + + gov.nsa.datawave.core + datawave-core-modification + ${project.version} + javax.enterprise cdi-api diff --git a/web-services/modification/src/main/java/datawave/webservice/modification/ModificationBean.java b/web-services/modification/src/main/java/datawave/webservice/modification/ModificationBean.java index f20a9ac9d60..5021835c563 100644 --- a/web-services/modification/src/main/java/datawave/webservice/modification/ModificationBean.java +++ b/web-services/modification/src/main/java/datawave/webservice/modification/ModificationBean.java @@ -1,16 +1,6 @@ package datawave.webservice.modification; -import static java.util.Map.Entry; - -import java.security.Principal; -import java.text.MessageFormat; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.HashSet; import java.util.List; -import java.util.Map; -import java.util.Set; import javax.annotation.Resource; import javax.annotation.security.DeclareRoles; @@ -30,31 +20,22 @@ import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; -import javax.ws.rs.core.MultivaluedMap; -import org.apache.accumulo.core.client.AccumuloClient; -import org.apache.accumulo.core.security.Authorizations; import org.apache.log4j.Logger; import org.jboss.resteasy.annotations.GZIP; -import org.jboss.resteasy.specimpl.MultivaluedMapImpl; import datawave.annotation.Required; import datawave.configuration.spring.SpringBean; +import datawave.core.common.connection.AccumuloConnectionFactory; import datawave.interceptor.RequiredInterceptor; import datawave.interceptor.ResponseInterceptor; +import datawave.modification.DatawaveModificationException; +import datawave.modification.ModificationService; +import datawave.modification.configuration.ModificationConfiguration; import datawave.security.authorization.DatawavePrincipal; -import datawave.webservice.common.audit.AuditParameterBuilder; -import datawave.webservice.common.connection.AccumuloConnectionFactory; -import datawave.webservice.common.exception.BadRequestException; import datawave.webservice.common.exception.DatawaveWebApplicationException; -import datawave.webservice.common.exception.UnauthorizedException; import datawave.webservice.modification.cache.ModificationCacheBean; -import datawave.webservice.modification.configuration.ModificationConfiguration; -import datawave.webservice.modification.configuration.ModificationServiceConfiguration; -import datawave.webservice.query.exception.BadRequestQueryException; -import datawave.webservice.query.exception.DatawaveErrorCode; import datawave.webservice.query.exception.QueryException; -import datawave.webservice.query.exception.UnauthorizedQueryException; import datawave.webservice.query.runner.QueryExecutorBean; import datawave.webservice.result.VoidResponse; import datawave.webservice.results.modification.ModificationConfigurationResponse; @@ -86,8 +67,15 @@ public class ModificationBean { @SpringBean(refreshable = true) private ModificationConfiguration modificationConfiguration; - @Inject - private AuditParameterBuilder auditParameterBuilder; + private ModificationService service; + + private ModificationService getService() { + if (service == null) { + service = new ModificationService(modificationConfiguration, cache.getCache(), connectionFactory, + new QueryExecutorBeanService(queryService).getFactory()); + } + return service; + } /** * Returns a list of the Modification service names and their configurations @@ -104,16 +92,7 @@ public class ModificationBean { @GZIP @Interceptors({RequiredInterceptor.class, ResponseInterceptor.class}) public List listConfigurations() { - List configs = new ArrayList<>(); - for (Entry entry : this.modificationConfiguration.getConfigurations().entrySet()) { - ModificationConfigurationResponse r = new ModificationConfigurationResponse(); - r.setName(entry.getKey()); - r.setRequestClass(entry.getValue().getRequestClass().getName()); - r.setDescription(entry.getValue().getDescription()); - r.setAuthorizedRoles(entry.getValue().getAuthorizedRoles()); - configs.add(r); - } - return configs; + return getService().listConfigurations(); } /** @@ -141,84 +120,15 @@ public List listConfigurations() { @Interceptors({RequiredInterceptor.class, ResponseInterceptor.class}) public VoidResponse submit(@Required("modificationServiceName") @PathParam("serviceName") String modificationServiceName, @Required("request") ModificationRequestBase request) { - VoidResponse response = new VoidResponse(); - - // Find out who/what called this method - Principal p = ctx.getCallerPrincipal(); - String user; - Set cbAuths = new HashSet<>(); - Collection userRoles = Collections.emptySet(); - if (p instanceof DatawavePrincipal) { - DatawavePrincipal dp = (DatawavePrincipal) p; - user = dp.getShortName(); - userRoles = dp.getPrimaryUser().getRoles(); - for (Collection c : dp.getAuthorizations()) - cbAuths.add(new Authorizations(c.toArray(new String[c.size()]))); - } else { - QueryException qe = new QueryException(DatawaveErrorCode.UNEXPECTED_PRINCIPAL_ERROR, MessageFormat.format("Class: {0}", p.getClass().getName())); - response.addException(qe); - throw new DatawaveWebApplicationException(qe, response); - } - - AccumuloClient client = null; - AccumuloConnectionFactory.Priority priority; try { - // Get the Modification Service from the configuration - ModificationServiceConfiguration service = modificationConfiguration.getConfiguration(modificationServiceName); - if (!request.getClass().equals(service.getRequestClass())) { - BadRequestQueryException qe = new BadRequestQueryException(DatawaveErrorCode.INVALID_REQUEST_CLASS, - MessageFormat.format("Requires: {0}", service.getRequestClass().getName())); + DatawavePrincipal p = (DatawavePrincipal) ctx.getCallerPrincipal(); + return getService().submit(p, modificationServiceName, request); + } catch (DatawaveModificationException dme) { + VoidResponse response = new VoidResponse(); + for (QueryException qe : dme.getExceptions()) { response.addException(qe); - throw new BadRequestException(qe, response); - } - - priority = service.getPriority(); - - // Ensure that the user is in the list of authorized roles - if (null != service.getAuthorizedRoles()) { - boolean authorized = !Collections.disjoint(userRoles, service.getAuthorizedRoles()); - if (!authorized) { - // Then the user does not have any of the authorized roles - UnauthorizedQueryException qe = new UnauthorizedQueryException(DatawaveErrorCode.JOB_EXECUTION_UNAUTHORIZED, - MessageFormat.format("Requires one of: {0}", service.getAuthorizedRoles())); - response.addException(qe); - throw new UnauthorizedException(qe, response); - } } - - if (service.getRequiresAudit()) { - try { - MultivaluedMap requestMap = new MultivaluedMapImpl<>(); - requestMap.putAll(request.toMap()); - auditParameterBuilder.convertAndValidate(requestMap); - } catch (Exception e) { - QueryException qe = new QueryException(DatawaveErrorCode.QUERY_AUDITING_ERROR, e); - log.error(qe); - response.addException(qe.getBottomQueryException()); - } - } - - // Process the modification - Map trackingMap = connectionFactory.getTrackingMap(Thread.currentThread().getStackTrace()); - client = connectionFactory.getClient(modificationConfiguration.getPoolName(), priority, trackingMap); - service.setQueryService(queryService); - log.info("Processing modification request from user=" + user + ": \n" + request); - service.process(client, request, cache.getCachedMutableFieldList(), cbAuths, user); - return response; - } catch (DatawaveWebApplicationException e) { - throw e; - } catch (Exception e) { - QueryException qe = new QueryException(DatawaveErrorCode.MODIFICATION_ERROR, e); - log.error(qe); - response.addException(qe.getBottomQueryException()); - throw new DatawaveWebApplicationException(e, response); - } finally { - if (null != client) - try { - connectionFactory.returnClient(client); - } catch (Exception e) { - log.error("Error returning connection", e); - } + throw new DatawaveWebApplicationException(dme, response); } } diff --git a/web-services/modification/src/main/java/datawave/webservice/modification/QueryExecutorBeanService.java b/web-services/modification/src/main/java/datawave/webservice/modification/QueryExecutorBeanService.java new file mode 100644 index 00000000000..039ae0fd998 --- /dev/null +++ b/web-services/modification/src/main/java/datawave/webservice/modification/QueryExecutorBeanService.java @@ -0,0 +1,43 @@ +package datawave.webservice.modification; + +import java.util.List; +import java.util.Map; + +import datawave.modification.query.ModificationQueryService; +import datawave.security.authorization.ProxiedUserDetails; +import datawave.webservice.query.runner.QueryExecutorBean; +import datawave.webservice.query.util.MapUtils; +import datawave.webservice.result.BaseQueryResponse; +import datawave.webservice.result.GenericResponse; + +public class QueryExecutorBeanService implements ModificationQueryService { + private final QueryExecutorBean queryService; + + public QueryExecutorBeanService(QueryExecutorBean queryService) { + this.queryService = queryService; + } + + @Override + public GenericResponse createQuery(String logicName, Map> paramsToMap) { + return queryService.createQuery(logicName, MapUtils.toMultivaluedMap(paramsToMap)); + } + + @Override + public BaseQueryResponse next(String id) { + return queryService.next(id); + } + + @Override + public void close(String id) { + queryService.close(id); + } + + public ModificationQueryServiceFactory getFactory() { + return new ModificationQueryServiceFactory() { + @Override + public ModificationQueryService createService(ProxiedUserDetails userDetails) { + return QueryExecutorBeanService.this; + } + }; + } +} diff --git a/web-services/modification/src/main/java/datawave/webservice/modification/cache/ModificationCacheBean.java b/web-services/modification/src/main/java/datawave/webservice/modification/cache/ModificationCacheBean.java index 6b1cc1eb2b0..348529cf481 100644 --- a/web-services/modification/src/main/java/datawave/webservice/modification/cache/ModificationCacheBean.java +++ b/web-services/modification/src/main/java/datawave/webservice/modification/cache/ModificationCacheBean.java @@ -1,11 +1,6 @@ package datawave.webservice.modification.cache; -import static datawave.webservice.common.connection.AccumuloConnectionFactory.Priority; - import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; @@ -15,7 +10,6 @@ import javax.annotation.security.DeclareRoles; import javax.annotation.security.RolesAllowed; import javax.annotation.security.RunAs; -import javax.ejb.EJBException; import javax.ejb.LocalBean; import javax.ejb.Lock; import javax.ejb.LockType; @@ -27,11 +21,6 @@ import javax.ws.rs.Path; import javax.ws.rs.Produces; -import org.apache.accumulo.core.client.AccumuloClient; -import org.apache.accumulo.core.client.BatchScanner; -import org.apache.accumulo.core.data.Key; -import org.apache.accumulo.core.data.Range; -import org.apache.accumulo.core.data.Value; import org.apache.deltaspike.core.api.jmx.JmxManaged; import org.apache.deltaspike.core.api.jmx.MBean; import org.apache.hadoop.io.Text; @@ -39,11 +28,11 @@ import org.jboss.resteasy.annotations.GZIP; import datawave.configuration.spring.SpringBean; +import datawave.core.common.connection.AccumuloConnectionFactory; import datawave.interceptor.RequiredInterceptor; import datawave.interceptor.ResponseInterceptor; -import datawave.security.util.ScannerHelper; -import datawave.webservice.common.connection.AccumuloConnectionFactory; -import datawave.webservice.modification.configuration.ModificationConfiguration; +import datawave.modification.cache.ModificationCache; +import datawave.modification.configuration.ModificationConfiguration; import datawave.webservice.result.VoidResponse; import datawave.webservice.results.modification.MutableFieldListResponse; @@ -65,7 +54,7 @@ public class ModificationCacheBean { private Logger log = Logger.getLogger(this.getClass()); - private Map> cache = new HashMap<>(); + private ModificationCache cache; @Inject private AccumuloConnectionFactory connectionFactory; @@ -76,11 +65,7 @@ public class ModificationCacheBean { @PostConstruct public void init() { - if (modificationConfiguration != null) { - reloadMutableFieldCache(); - } else { - log.error("modificationConfiguration was null"); - } + cache = new ModificationCache(connectionFactory, modificationConfiguration); } /** @@ -102,53 +87,13 @@ public void init() { @GZIP @JmxManaged public VoidResponse reloadMutableFieldCache() { - this.clearCache(); - log.trace("cleared cache"); - final VoidResponse resp = new VoidResponse(); - AccumuloClient client = null; - BatchScanner s = null; - try { - Map trackingMap = connectionFactory.getTrackingMap(Thread.currentThread().getStackTrace()); - log.trace("getting mutable list from table " + this.modificationConfiguration.getTableName()); - log.trace("modificationConfiguration.getPoolName() = " + modificationConfiguration.getPoolName()); - client = connectionFactory.getClient(modificationConfiguration.getPoolName(), Priority.ADMIN, trackingMap); - log.trace("got connection"); - s = ScannerHelper.createBatchScanner(client, this.modificationConfiguration.getTableName(), - Collections.singleton(client.securityOperations().getUserAuthorizations(client.whoami())), 8); - s.setRanges(Collections.singleton(new Range())); - s.fetchColumnFamily(MODIFICATION_COLUMN); - for (Entry e : s) { - // Field name is in the row and datatype is in the colq. - String datatype = e.getKey().getColumnQualifier().toString(); - log.trace("datatype = " + datatype); - String fieldName = e.getKey().getRow().toString(); - log.trace("fieldname = " + fieldName); - if (null == cache.get(datatype)) - cache.put(datatype, new HashSet<>()); - cache.get(datatype).add(fieldName); - } - log.trace("cache size = " + cache.size()); - for (Entry> e : cache.entrySet()) { - log.trace("datatype = " + e.getKey() + ", fieldcount = " + e.getValue().size()); - } - } catch (Exception e) { - log.error("Error during initialization of ModificationCacheBean", e); - throw new EJBException("Error during initialization of ModificationCacheBean", e); - } finally { - if (null != s) - s.close(); - try { - connectionFactory.returnClient(client); - } catch (Exception e) { - log.error("Error returning connection to pool", e); - } - } - return resp; + this.cache.reloadMutableFieldCache(); + return new VoidResponse(); } @JmxManaged public String listMutableFields() { - return cache.toString(); + return cache.listMutableFields(); } /** @@ -161,8 +106,7 @@ public String listMutableFields() { * @return true if field is mutable for the given datatype */ public boolean isFieldMutable(String datatype, String field) { - log.trace("datatype = " + datatype + ", field = " + field); - return cache.get(datatype).contains(field); + return cache.isFieldMutable(datatype, field); } @GET @@ -173,7 +117,7 @@ public boolean isFieldMutable(String datatype, String field) { @Interceptors({RequiredInterceptor.class, ResponseInterceptor.class}) public List getMutableFieldList() { List lists = new ArrayList<>(); - for (Entry> entry : this.cache.entrySet()) { + for (Entry> entry : this.cache.getCachedMutableFieldList().entrySet()) { MutableFieldListResponse r = new MutableFieldListResponse(); r.setDatatype(entry.getKey()); r.setMutableFields(entry.getValue()); @@ -183,17 +127,15 @@ public List getMutableFieldList() { } public Map> getCachedMutableFieldList() { - log.trace("cache = " + cache); - return Collections.unmodifiableMap(cache); + return cache.getCachedMutableFieldList(); } public ModificationConfiguration getModificationConfiguration() { return modificationConfiguration; } - protected void clearCache() { - log.trace("cleared the cache"); - this.cache.clear(); + public ModificationCache getCache() { + return cache; } } diff --git a/web-services/modification/src/main/java/datawave/webservice/modification/cache/ModificationCacheMessageBean.java b/web-services/modification/src/main/java/datawave/webservice/modification/cache/ModificationCacheMessageBean.java index 52b92497645..ab7aab8b84d 100644 --- a/web-services/modification/src/main/java/datawave/webservice/modification/cache/ModificationCacheMessageBean.java +++ b/web-services/modification/src/main/java/datawave/webservice/modification/cache/ModificationCacheMessageBean.java @@ -16,9 +16,9 @@ import org.apache.log4j.Logger; import datawave.configuration.spring.SpringBean; -import datawave.webservice.modification.MutableMetadataHandler; -import datawave.webservice.modification.configuration.ModificationConfiguration; -import datawave.webservice.modification.configuration.ModificationServiceConfiguration; +import datawave.modification.MutableMetadataHandler; +import datawave.modification.configuration.ModificationConfiguration; +import datawave.modification.configuration.ModificationServiceConfiguration; @RunAs("InternalUser") @MessageDriven(name = "ModificationCacheMessageBean", diff --git a/web-services/pom.xml b/web-services/pom.xml index 809842f7252..91a81aba0f9 100644 --- a/web-services/pom.xml +++ b/web-services/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-parent - 6.14.0-SNAPSHOT + 7.0.0-SNAPSHOT gov.nsa.datawave.webservices datawave-ws-parent @@ -21,6 +21,7 @@ map-reduce map-reduce-status map-reduce-embedded + metrics atom query-websocket web-root @@ -46,6 +47,7 @@ 3.2 3.9.0 2.6.1 + 2.1.8 1.1 1.0.1 1.1 @@ -114,6 +116,11 @@ + + dnsjava + dnsjava + ${version.dnsjava} + gov.nsa.datawave datawave-core @@ -356,8 +363,12 @@ ${version.zookeeper} - * - * + org.slf4j + slf4j-log4j12 + + + log4j + log4j @@ -495,7 +506,14 @@ org.powermock powermock-module-junit4-rule-agent - 1.6.1 + 2.0.2 + test + true + + + org.powermock + powermock-reflect + 2.0.2 test true diff --git a/web-services/query-websocket/pom.xml b/web-services/query-websocket/pom.xml index 4a11f2ee103..bb6c8a9a8cd 100644 --- a/web-services/query-websocket/pom.xml +++ b/web-services/query-websocket/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 6.14.0-SNAPSHOT + 7.0.0-SNAPSHOT datawave-ws-query-websocket war diff --git a/web-services/query/pom.xml b/web-services/query/pom.xml index 36cd3f1b5db..f523cbb2e83 100644 --- a/web-services/query/pom.xml +++ b/web-services/query/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 6.14.0-SNAPSHOT + 7.0.0-SNAPSHOT datawave-ws-query ejb @@ -62,6 +62,12 @@ + + gov.nsa.datawave.core + datawave-core-query + ${project.version} + jboss + gov.nsa.datawave.microservice query-metric-api @@ -70,6 +76,12 @@ gov.nsa.datawave.webservices datawave-ws-client ${project.version} + + + jakarta.validation + jakarta.validation-api + + io.protostuff diff --git a/web-services/query/src/main/java/datawave/webservice/query/cache/CreatedQueryLogicCacheBean.java b/web-services/query/src/main/java/datawave/webservice/query/cache/CreatedQueryLogicCacheBean.java index 391e5646131..b23309bad13 100644 --- a/web-services/query/src/main/java/datawave/webservice/query/cache/CreatedQueryLogicCacheBean.java +++ b/web-services/query/src/main/java/datawave/webservice/query/cache/CreatedQueryLogicCacheBean.java @@ -27,8 +27,8 @@ import com.google.common.collect.Maps; import datawave.configuration.DatawaveEmbeddedProjectStageHolder; -import datawave.webservice.common.connection.AccumuloConnectionFactory; -import datawave.webservice.query.logic.QueryLogic; +import datawave.core.common.connection.AccumuloConnectionFactory; +import datawave.core.query.logic.QueryLogic; @Startup @Singleton diff --git a/web-services/query/src/main/java/datawave/webservice/query/cache/QueryCacheBean.java b/web-services/query/src/main/java/datawave/webservice/query/cache/QueryCacheBean.java index 93e2acefbef..e2158a046e9 100644 --- a/web-services/query/src/main/java/datawave/webservice/query/cache/QueryCacheBean.java +++ b/web-services/query/src/main/java/datawave/webservice/query/cache/QueryCacheBean.java @@ -22,7 +22,7 @@ import org.jboss.resteasy.annotations.GZIP; import datawave.configuration.DatawaveEmbeddedProjectStageHolder; -import datawave.webservice.query.logic.QueryLogic; +import datawave.core.query.logic.QueryLogic; import datawave.webservice.query.runner.QueryExecutorBean; import datawave.webservice.query.runner.RunningQuery; import datawave.webservice.result.VoidResponse; diff --git a/web-services/query/src/main/java/datawave/webservice/query/cache/QueryExpirationBean.java b/web-services/query/src/main/java/datawave/webservice/query/cache/QueryExpirationBean.java index 2fc63efbb05..f68bce9e461 100644 --- a/web-services/query/src/main/java/datawave/webservice/query/cache/QueryExpirationBean.java +++ b/web-services/query/src/main/java/datawave/webservice/query/cache/QueryExpirationBean.java @@ -18,8 +18,9 @@ import datawave.configuration.DatawaveEmbeddedProjectStageHolder; import datawave.configuration.spring.SpringBean; +import datawave.core.common.connection.AccumuloConnectionFactory; +import datawave.microservice.query.config.QueryExpirationProperties; import datawave.microservice.querymetric.QueryMetric; -import datawave.webservice.common.connection.AccumuloConnectionFactory; import datawave.webservice.query.exception.DatawaveErrorCode; import datawave.webservice.query.exception.QueryException; import datawave.webservice.query.metric.QueryMetricsBean; @@ -44,7 +45,7 @@ public class QueryExpirationBean { @Inject @SpringBean(refreshable = true) - private QueryExpirationConfiguration conf; + private QueryExpirationProperties conf; @Inject private AccumuloConnectionFactory connectionFactory; @@ -64,7 +65,7 @@ public void init() { } if (conf == null) { - throw new IllegalArgumentException("QueryExpirationConfiguration is null"); + throw new IllegalArgumentException("QueryExpirationProperties is null"); } } @@ -90,7 +91,7 @@ public void removeIdleOrExpired() { } long now = System.currentTimeMillis(); clearQueries(now); - qlCache.clearQueryLogics(now, conf.getCallTimeInMS()); + qlCache.clearQueryLogics(now, conf.getCallTimeoutMillis()); } private void clearQueries(long now) { @@ -161,11 +162,11 @@ private void clearQueries(long now) { private boolean isIdleTooLong(RunningQuery query, long currentTime) { long difference = currentTime - query.getLastUsed(); if (log.isDebugEnabled()) { - long countDown = (conf.getIdleTimeInMS() / 1000) - (difference / 1000); + long countDown = (conf.getIdleTimeoutMillis() / 1000) - (difference / 1000); log.debug("Query: " + query.getSettings().getOwner() + " - " + query.getSettings().getId() + " will be evicted in: " + countDown + " seconds."); } - return difference > conf.getIdleTimeInMS(); + return difference > conf.getIdleTimeoutMillis(); } /** @@ -186,7 +187,7 @@ private boolean isNextTooLong(RunningQuery query, long currentTime) { query.touch(); // Since we know we're still in a call, go ahead and reset the idle time. long difference = currentTime - query.getTimeOfCurrentCall(); - if (difference > conf.getCallTimeInMS()) { + if (difference > conf.getCallTimeoutMillis()) { log.warn("Query " + query.getSettings().getOwner() + " - " + query.getSettings().getId() + " has been in a call for " + (difference / 1000) + "s. We are evicting this query from the cache."); return true; diff --git a/web-services/query/src/main/java/datawave/webservice/query/cache/QueryExpirationConfiguration.java b/web-services/query/src/main/java/datawave/webservice/query/cache/QueryExpirationConfiguration.java deleted file mode 100644 index 077836799cc..00000000000 --- a/web-services/query/src/main/java/datawave/webservice/query/cache/QueryExpirationConfiguration.java +++ /dev/null @@ -1,91 +0,0 @@ -package datawave.webservice.query.cache; - -import datawave.configuration.RefreshableScope; - -/** - * Configuration file is located at: datawave/query/QueryExpiration.xml - */ -@RefreshableScope -public class QueryExpirationConfiguration { - - public static final int PAGE_TIMEOUT_MIN_DEFAULT = 60; - public static final int IDLE_TIME_MIN_DEFAULT = 15; - - private long idleTimeMinutes = IDLE_TIME_MIN_DEFAULT; - private long callTimeMinutes = PAGE_TIMEOUT_MIN_DEFAULT; - private long pageSizeShortCircuitCheckTimeMinutes = PAGE_TIMEOUT_MIN_DEFAULT / 2; - private long pageShortCircuitTimeoutMinutes = Math.round(0.97 * PAGE_TIMEOUT_MIN_DEFAULT); - private int maxLongRunningTimeoutRetries = 3; - - public long getIdleTimeMinutes() { - return idleTimeMinutes; - } - - public long getIdleTimeInMS() { - return idleTimeMinutes * 60 * 1000; - } - - public void setIdleTime(long idleTimeMinutes) { - this.idleTimeMinutes = idleTimeMinutes; - } - - public void setIdleTimeMinutes(long idleTimeMinutes) { - this.idleTimeMinutes = idleTimeMinutes; - } - - public long getCallTimeMinutes() { - return callTimeMinutes; - } - - public long getCallTimeInMS() { - return callTimeMinutes * 60 * 1000; - } - - public void setCallTime(long callTimeMinutes) { - this.callTimeMinutes = callTimeMinutes; - } - - public void setCallTimeMinutes(long callTimeMinutes) { - this.callTimeMinutes = callTimeMinutes; - } - - public float getPageSizeShortCircuitCheckTimeMinutes() { - return pageSizeShortCircuitCheckTimeMinutes; - } - - public long getPageSizeShortCircuitCheckTimeInMS() { - return pageSizeShortCircuitCheckTimeMinutes * 60 * 1000; - } - - public void setPageSizeShortCircuitCheckTime(long pageSizeShortCircuitCheckTimeMinutes) { - this.pageSizeShortCircuitCheckTimeMinutes = pageSizeShortCircuitCheckTimeMinutes; - } - - public void setPageSizeShortCircuitCheckTimeMinutes(long pageSizeShortCircuitCheckTimeMinutes) { - this.pageSizeShortCircuitCheckTimeMinutes = pageSizeShortCircuitCheckTimeMinutes; - } - - public long getPageShortCircuitTimeoutMinutes() { - return pageShortCircuitTimeoutMinutes; - } - - public long getPageShortCircuitTimeoutInMS() { - return pageShortCircuitTimeoutMinutes * 60 * 1000; - } - - public void setPageShortCircuitTimeout(long pageShortCircuitTimeoutMinutes) { - this.pageShortCircuitTimeoutMinutes = pageShortCircuitTimeoutMinutes; - } - - public void setPageShortCircuitTimeoutMinutes(long pageShortCircuitTimeoutMinutes) { - this.pageShortCircuitTimeoutMinutes = pageShortCircuitTimeoutMinutes; - } - - public int getMaxLongRunningTimeoutRetries() { - return maxLongRunningTimeoutRetries; - } - - public void setMaxLongRunningTimeoutRetries(int maxLongRunningTimeoutRetries) { - this.maxLongRunningTimeoutRetries = maxLongRunningTimeoutRetries; - } -} diff --git a/web-services/query/src/main/java/datawave/webservice/query/cache/RunningQueryTimingImpl.java b/web-services/query/src/main/java/datawave/webservice/query/cache/RunningQueryTimingImpl.java index 66a53b04cc3..968bb75b58e 100644 --- a/web-services/query/src/main/java/datawave/webservice/query/cache/RunningQueryTimingImpl.java +++ b/web-services/query/src/main/java/datawave/webservice/query/cache/RunningQueryTimingImpl.java @@ -1,5 +1,6 @@ package datawave.webservice.query.cache; +import datawave.microservice.query.config.QueryExpirationProperties; import datawave.webservice.query.runner.RunningQuery.RunningQueryTiming; public class RunningQueryTimingImpl implements RunningQueryTiming { @@ -13,9 +14,8 @@ public class RunningQueryTimingImpl implements RunningQueryTiming { // The maximum number of times to continue running a long running query after the timeout is reached. private int maxLongRunningTimeoutRetries = 3; - public RunningQueryTimingImpl(QueryExpirationConfiguration conf, int pageTimeout) { - this(conf.getCallTimeInMS(), conf.getPageSizeShortCircuitCheckTimeInMS(), conf.getPageShortCircuitTimeoutInMS(), - conf.getMaxLongRunningTimeoutRetries()); + public RunningQueryTimingImpl(QueryExpirationProperties conf, int pageTimeout) { + this(conf.getCallTimeoutMillis(), conf.getShortCircuitCheckTimeMillis(), conf.getShortCircuitTimeoutMillis(), conf.getMaxLongRunningTimeoutRetries()); if (pageTimeout > 0) { maxCallMs = pageTimeout * 60 * 1000; diff --git a/web-services/query/src/main/java/datawave/webservice/query/configuration/IdTranslatorConfiguration.java b/web-services/query/src/main/java/datawave/webservice/query/configuration/IdTranslatorConfiguration.java index 9c055795abe..3e1c0aa8ffa 100644 --- a/web-services/query/src/main/java/datawave/webservice/query/configuration/IdTranslatorConfiguration.java +++ b/web-services/query/src/main/java/datawave/webservice/query/configuration/IdTranslatorConfiguration.java @@ -8,8 +8,8 @@ import org.jboss.resteasy.specimpl.MultivaluedMapImpl; import org.springframework.stereotype.Component; +import datawave.microservice.query.QueryParameters; import datawave.query.data.UUIDType; -import datawave.webservice.query.QueryParameters; @Component("idTranslatorConfiguration") public class IdTranslatorConfiguration { @@ -42,7 +42,7 @@ public void setUuidTypes(List uuidTypes) { List goodTypes = new ArrayList<>(); if (uuidTypes != null) { for (UUIDType uuidType : uuidTypes) { - if ("LuceneUUIDEventQuery".equalsIgnoreCase(uuidType.getDefinedView("default"))) { + if ("LuceneUUIDEventQuery".equalsIgnoreCase(uuidType.getQueryLogic("default"))) { goodTypes.add(uuidType); } } diff --git a/web-services/query/src/main/java/datawave/webservice/query/configuration/LookupUUIDConfiguration.java b/web-services/query/src/main/java/datawave/webservice/query/configuration/LookupUUIDConfiguration.java index 504928006e7..7559e015ee9 100644 --- a/web-services/query/src/main/java/datawave/webservice/query/configuration/LookupUUIDConfiguration.java +++ b/web-services/query/src/main/java/datawave/webservice/query/configuration/LookupUUIDConfiguration.java @@ -7,8 +7,8 @@ import org.jboss.resteasy.specimpl.MultivaluedMapImpl; +import datawave.microservice.query.QueryParameters; import datawave.query.data.UUIDType; -import datawave.webservice.query.QueryParameters; import datawave.webservice.query.util.LookupUUIDConstants; /** diff --git a/web-services/query/src/main/java/datawave/webservice/query/configuration/QueryData.java b/web-services/query/src/main/java/datawave/webservice/query/configuration/QueryData.java deleted file mode 100644 index b984fa2afb9..00000000000 --- a/web-services/query/src/main/java/datawave/webservice/query/configuration/QueryData.java +++ /dev/null @@ -1,206 +0,0 @@ -package datawave.webservice.query.configuration; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.HashSet; -import java.util.List; - -import org.apache.accumulo.core.client.IteratorSetting; -import org.apache.accumulo.core.data.Range; -import org.apache.commons.lang3.builder.EqualsBuilder; -import org.apache.commons.lang3.builder.HashCodeBuilder; - -/** - * Class to encapsulate all required information to run a query. - */ -public class QueryData { - private String query; - private Collection ranges = new HashSet<>(); - private Collection columnFamilies = new HashSet<>(); - private List settings = new ArrayList<>(); - private boolean rebuildHashCode = true; - private int hashCode = -1; - - public QueryData() { - // empty constructor - } - - /** - * Full constructor - * - * @param query - * the query string - * @param ranges - * a collection of ranges - * @param columnFamilies - * a collection of column families - * @param settings - * a list of IteratorSetting - */ - public QueryData(String query, Collection ranges, Collection columnFamilies, List settings) { - this.query = query; - this.ranges = ranges; - this.columnFamilies = columnFamilies; - this.settings = settings; - } - - /** - * Copy constructor - * - * @param other - * another instance of QueryData - */ - public QueryData(QueryData other) { - this.query = other.query; - this.ranges = new HashSet<>(other.ranges); - this.columnFamilies = new HashSet<>(other.columnFamilies); - this.settings = new ArrayList<>(other.settings); - this.hashCode = other.hashCode; - this.rebuildHashCode = other.rebuildHashCode; - } - - @Deprecated(since = "6.5.0", forRemoval = true) - public QueryData(String query, Collection ranges, List settings) { - setQuery(query); - setRanges(ranges); - setSettings(settings); - } - - /** - * Weak copy constructor that updates the ranges - * - * @param other - * another QueryData - * @param ranges - * a collection of updated ranges - * @deprecated - */ - @Deprecated(since = "6.5.0", forRemoval = true) - public QueryData(QueryData other, Collection ranges) { - setQuery(other.getQuery()); - setSettings(other.getSettings()); - setRanges(ranges); - } - - @Deprecated(since = "6.5.0", forRemoval = true) - public QueryData(String queryString, List ranges, List settings, Collection columnFamilies) { - this(queryString, ranges, settings); - this.columnFamilies.addAll(columnFamilies); - } - - // builder style methods - - public QueryData withQuery(String query) { - this.query = query; - resetHashCode(); - return this; - } - - public QueryData withRanges(Collection ranges) { - this.ranges = ranges; - resetHashCode(); - return this; - } - - public QueryData withColumnFamilies(Collection columnFamilies) { - this.columnFamilies = columnFamilies; - resetHashCode(); - return this; - } - - public QueryData withSettings(List settings) { - this.settings = settings; - resetHashCode(); - return this; - } - - public void setSettings(List settings) { - this.settings = new ArrayList<>(settings); - resetHashCode(); - } - - public List getSettings() { - return settings; - } - - public void setQuery(String query) { - this.query = query; - resetHashCode(); - } - - public String getQuery() { - return query; - } - - public Collection getRanges() { - return ranges; - } - - public Collection getColumnFamilies() { - return columnFamilies; - } - - public void setColumnFamilies(Collection columnFamilies) { - this.columnFamilies = columnFamilies; - resetHashCode(); - } - - public void setRanges(Collection ranges) { - this.ranges = ranges; - resetHashCode(); - } - - public void addIterator(IteratorSetting cfg) { - this.settings.add(cfg); - resetHashCode(); - } - - @Override - public String toString() { - // @formatter:off - return new StringBuilder() - .append("Query: '").append(this.query) - .append("', Ranges: ").append(this.ranges) - .append(", Settings: ").append(this.settings) - .toString(); - // @formatter:on - } - - public boolean equals(Object o) { - if (o instanceof QueryData) { - QueryData other = (QueryData) o; - // @formatter:off - return new EqualsBuilder() - .append(query, other.query) - .append(ranges, other.ranges) - .append(columnFamilies, other.columnFamilies) - .append(settings, other.settings) - .isEquals(); - // @formatter:on - } - return false; - } - - @Override - public int hashCode() { - if (rebuildHashCode) { - // @formatter:off - hashCode = new HashCodeBuilder() - .append(query) - .append(ranges) - .append(columnFamilies) - .append(settings) - .hashCode(); - rebuildHashCode = false; - // @formatter:on - } - return hashCode; - } - - /** - * Method to reset the hashcode when an internal variable is updated - */ - private void resetHashCode() { - rebuildHashCode = true; - } -} diff --git a/web-services/query/src/main/java/datawave/webservice/query/dashboard/DashboardBean.java b/web-services/query/src/main/java/datawave/webservice/query/dashboard/DashboardBean.java index 3b2afca68be..72cb6ae12f2 100644 --- a/web-services/query/src/main/java/datawave/webservice/query/dashboard/DashboardBean.java +++ b/web-services/query/src/main/java/datawave/webservice/query/dashboard/DashboardBean.java @@ -33,12 +33,13 @@ import org.jboss.resteasy.annotations.GZIP; import datawave.configuration.DatawaveEmbeddedProjectStageHolder; +import datawave.core.common.connection.AccumuloConnectionFactory; +import datawave.core.common.extjs.ExtJsResponse; +import datawave.core.query.dashboard.DashboardSummary; import datawave.interceptor.ResponseInterceptor; import datawave.security.authorization.DatawavePrincipal; import datawave.security.util.ScannerHelper; import datawave.security.util.WSAuthorizationsUtil; -import datawave.webservice.common.connection.AccumuloConnectionFactory; -import datawave.webservice.common.extjs.ExtJsResponse; import datawave.webservice.query.runner.QueryExecutorBean; @Path("/Query/Metrics/dashboard") @@ -153,7 +154,15 @@ private Scanner createScanner(AccumuloClient accumuloClient) throws TableNotFoun } private AccumuloClient createClient() throws Exception { + Principal p = ctx.getCallerPrincipal(); + String userDn = null; + Collection proxyServers = null; + if (p instanceof DatawavePrincipal) { + DatawavePrincipal dp = (DatawavePrincipal) p; + userDn = dp.getUserDN().subjectDN(); + proxyServers = dp.getProxyServers(); + } Map trackingMap = connectionFactory.getTrackingMap(Thread.currentThread().getStackTrace()); - return connectionFactory.getClient(AccumuloConnectionFactory.Priority.LOW, trackingMap); + return connectionFactory.getClient(userDn, proxyServers, AccumuloConnectionFactory.Priority.LOW, trackingMap); } } diff --git a/web-services/query/src/main/java/datawave/webservice/query/dashboard/DashboardQuery.java b/web-services/query/src/main/java/datawave/webservice/query/dashboard/DashboardQuery.java index 5dd77c82563..2f2c240bc57 100644 --- a/web-services/query/src/main/java/datawave/webservice/query/dashboard/DashboardQuery.java +++ b/web-services/query/src/main/java/datawave/webservice/query/dashboard/DashboardQuery.java @@ -3,15 +3,15 @@ import java.text.ParseException; import java.util.Date; -import javax.ws.rs.core.MultivaluedMap; - import org.apache.commons.lang.time.DateUtils; -import org.jboss.resteasy.specimpl.MultivaluedMapImpl; -import datawave.webservice.common.extjs.ExtJsResponse; -import datawave.webservice.query.QueryParametersImpl; -import datawave.webservice.query.QueryPersistence; +import datawave.core.common.extjs.ExtJsResponse; +import datawave.core.query.dashboard.DashboardFields; +import datawave.core.query.dashboard.DashboardSummary; +import datawave.microservice.query.DefaultQueryParameters; +import datawave.microservice.query.QueryPersistence; import datawave.webservice.query.runner.QueryExecutor; +import datawave.webservice.query.util.MapUtils; public class DashboardQuery { @@ -33,9 +33,9 @@ private DashboardQuery() {} public static ExtJsResponse createQuery(QueryExecutor queryExecutor, String auths, Date beginDate, Date endDate, Date now) throws ParseException { - MultivaluedMap paramsMap = new MultivaluedMapImpl<>(); - paramsMap.putAll(QueryParametersImpl.paramsToMap(logicName, queryString, queryName, columnVisibility, beginDate, endDate, auths, - DateUtils.addDays(now, 1), pageSize, pageTimeout, maxResultsOverride, persistence, systemFrom, parameters, trace)); - return (ExtJsResponse) queryExecutor.createQueryAndNext(logicName, paramsMap); + return (ExtJsResponse) queryExecutor.createQueryAndNext(logicName, + MapUtils.toMultivaluedMap(DefaultQueryParameters.paramsToMap(logicName, queryString, queryName, columnVisibility, beginDate, endDate, + auths, DateUtils.addDays(now, 1), pageSize, pageTimeout, maxResultsOverride, persistence, systemFrom, parameters, + trace))); } } diff --git a/web-services/query/src/main/java/datawave/webservice/query/factory/Persister.java b/web-services/query/src/main/java/datawave/webservice/query/factory/Persister.java index 727f7bb9941..2f5363c930d 100644 --- a/web-services/query/src/main/java/datawave/webservice/query/factory/Persister.java +++ b/web-services/query/src/main/java/datawave/webservice/query/factory/Persister.java @@ -49,19 +49,19 @@ import com.google.protobuf.InvalidProtocolBufferException; import datawave.configuration.DatawaveEmbeddedProjectStageHolder; -import datawave.configuration.spring.SpringBean; +import datawave.core.common.connection.AccumuloConnectionFactory; +import datawave.core.common.connection.AccumuloConnectionFactory.Priority; +import datawave.core.query.util.QueryUtil; import datawave.marking.SecurityMarking; +import datawave.microservice.query.Query; +import datawave.microservice.query.QueryParameters; +import datawave.microservice.query.QueryPersistence; import datawave.query.iterator.QueriesTableAgeOffIterator; import datawave.security.authorization.DatawavePrincipal; import datawave.security.util.ScannerHelper; -import datawave.webservice.common.connection.AccumuloConnectionFactory; -import datawave.webservice.common.connection.AccumuloConnectionFactory.Priority; -import datawave.webservice.query.Query; -import datawave.webservice.query.QueryParameters; -import datawave.webservice.query.QueryPersistence; import datawave.webservice.query.result.event.ResponseObjectFactory; +import datawave.webservice.query.util.MapUtils; import datawave.webservice.query.util.QueryUncaughtExceptionHandler; -import datawave.webservice.query.util.QueryUtil; /** * Object that creates and updates QueryImpl objects using a table structure: @@ -104,13 +104,12 @@ public Q apply(final Entry entry) { protected EJBContext ctx; @Inject - @SpringBean(name = "ResponseObjectFactory") private ResponseObjectFactory responseObjectFactory; public Query create(String userDN, List dnList, SecurityMarking marking, String queryLogicName, QueryParameters qp, MultivaluedMap optionalQueryParameters) { Query q = responseObjectFactory.getQueryImpl(); - q.initialize(userDN, dnList, queryLogicName, qp, optionalQueryParameters); + q.initialize(userDN, dnList, queryLogicName, qp, MapUtils.toMultiValueMap(optionalQueryParameters)); q.setColumnVisibility(marking.toColumnVisibilityString()); q.setUncaughtExceptionHandler(new QueryUncaughtExceptionHandler()); Thread.currentThread().setUncaughtExceptionHandler(q.getUncaughtExceptionHandler()); @@ -145,7 +144,7 @@ private void create(Query query) { AccumuloClient c = null; try { Map trackingMap = connectionFactory.getTrackingMap(Thread.currentThread().getStackTrace()); - c = connectionFactory.getClient(Priority.ADMIN, trackingMap); + c = connectionFactory.getClient(null, null, Priority.ADMIN, trackingMap); tableCheck(c); try (BatchWriter writer = c.createBatchWriter(TABLE_NAME, new BatchWriterConfig().setMaxLatency(10, TimeUnit.SECONDS).setMaxMemory(10240L).setMaxWriteThreads(1))) { @@ -208,7 +207,7 @@ public void remove(Query query) throws Exception { BatchDeleter deleter = null; try { Map trackingMap = connectionFactory.getTrackingMap(Thread.currentThread().getStackTrace()); - c = connectionFactory.getClient(Priority.ADMIN, trackingMap); + c = connectionFactory.getClient(null, null, Priority.ADMIN, trackingMap); if (!c.tableOperations().exists(TABLE_NAME)) { return; } @@ -265,7 +264,7 @@ public List findById(String id) { try { Map trackingMap = connectionFactory.getTrackingMap(Thread.currentThread().getStackTrace()); - client = connectionFactory.getClient(Priority.ADMIN, trackingMap); + client = connectionFactory.getClient(null, null, Priority.ADMIN, trackingMap); tableCheck(client); IteratorSetting regex = new IteratorSetting(21, RegExFilter.class); @@ -313,7 +312,7 @@ public List findByName(String name) { AccumuloClient c = null; try { Map trackingMap = connectionFactory.getTrackingMap(Thread.currentThread().getStackTrace()); - c = connectionFactory.getClient(Priority.ADMIN, trackingMap); + c = connectionFactory.getClient(null, null, Priority.ADMIN, trackingMap); tableCheck(c); try (Scanner scanner = ScannerHelper.createScanner(c, TABLE_NAME, auths)) { Range range = new Range(shortName, shortName); @@ -359,7 +358,7 @@ public List findByUser() { AccumuloClient c = null; try { Map trackingMap = connectionFactory.getTrackingMap(Thread.currentThread().getStackTrace()); - c = connectionFactory.getClient(Priority.ADMIN, trackingMap); + c = connectionFactory.getClient(null, null, Priority.ADMIN, trackingMap); tableCheck(c); try (Scanner scanner = ScannerHelper.createScanner(c, TABLE_NAME, auths)) { Range range = new Range(sid, sid); @@ -411,7 +410,7 @@ public List findByUser(String user) { AccumuloClient c = null; try { Map trackingMap = connectionFactory.getTrackingMap(Thread.currentThread().getStackTrace()); - c = connectionFactory.getClient(Priority.ADMIN, trackingMap); + c = connectionFactory.getClient(null, null, Priority.ADMIN, trackingMap); tableCheck(c); try (Scanner scanner = ScannerHelper.createScanner(c, TABLE_NAME, auths)) { Range range = new Range(user, user); @@ -445,7 +444,7 @@ public List adminFindById(final String queryId) { try { final Map trackingMap = connectionFactory.getTrackingMap(Thread.currentThread().getStackTrace()); - client = connectionFactory.getClient(Priority.ADMIN, trackingMap); + client = connectionFactory.getClient(null, null, Priority.ADMIN, trackingMap); tableCheck(client); final IteratorSetting regex = new IteratorSetting(21, RegExFilter.class); diff --git a/web-services/query/src/main/java/datawave/webservice/query/hud/HudBean.java b/web-services/query/src/main/java/datawave/webservice/query/hud/HudBean.java index 2eea342a4a3..21cffcb0260 100644 --- a/web-services/query/src/main/java/datawave/webservice/query/hud/HudBean.java +++ b/web-services/query/src/main/java/datawave/webservice/query/hud/HudBean.java @@ -25,12 +25,12 @@ import com.google.gson.Gson; import datawave.configuration.DatawaveEmbeddedProjectStageHolder; +import datawave.microservice.query.Query; import datawave.microservice.querymetric.BaseQueryMetric; import datawave.microservice.querymetric.BaseQueryMetric.PageMetric; import datawave.microservice.querymetric.QueryMetricSummary; import datawave.microservice.querymetric.QueryMetricsSummaryResponse; import datawave.security.authorization.DatawavePrincipal; -import datawave.webservice.query.Query; import datawave.webservice.query.factory.Persister; import datawave.webservice.query.metric.QueryMetricsBean; import datawave.webservice.query.runner.QueryExecutorBean; diff --git a/web-services/query/src/main/java/datawave/webservice/query/hud/HudQuerySummaryBuilder.java b/web-services/query/src/main/java/datawave/webservice/query/hud/HudQuerySummaryBuilder.java index 52b17835b58..96786cbd377 100644 --- a/web-services/query/src/main/java/datawave/webservice/query/hud/HudQuerySummaryBuilder.java +++ b/web-services/query/src/main/java/datawave/webservice/query/hud/HudQuerySummaryBuilder.java @@ -1,6 +1,6 @@ package datawave.webservice.query.hud; -import datawave.webservice.query.Query; +import datawave.microservice.query.Query; /** * diff --git a/web-services/query/src/main/java/datawave/webservice/query/interceptor/QueryMetricsEnrichmentInterceptor.java b/web-services/query/src/main/java/datawave/webservice/query/interceptor/QueryMetricsEnrichmentInterceptor.java index 10f6c4481a5..4c18427d648 100644 --- a/web-services/query/src/main/java/datawave/webservice/query/interceptor/QueryMetricsEnrichmentInterceptor.java +++ b/web-services/query/src/main/java/datawave/webservice/query/interceptor/QueryMetricsEnrichmentInterceptor.java @@ -19,14 +19,14 @@ import org.jboss.resteasy.util.FindAnnotation; import datawave.configuration.DatawaveEmbeddedProjectStageHolder; +import datawave.core.query.logic.BaseQueryLogic; +import datawave.core.query.logic.QueryLogic; import datawave.microservice.querymetric.BaseQueryMetric; import datawave.microservice.querymetric.BaseQueryMetric.PageMetric; import datawave.resteasy.interceptor.BaseMethodStatsInterceptor; import datawave.webservice.query.annotation.EnrichQueryMetrics; import datawave.webservice.query.annotation.EnrichQueryMetrics.MethodType; import datawave.webservice.query.cache.QueryCache; -import datawave.webservice.query.logic.BaseQueryLogic; -import datawave.webservice.query.logic.QueryLogic; import datawave.webservice.query.metric.QueryMetricsBean; import datawave.webservice.query.runner.QueryExecutorBean; import datawave.webservice.query.runner.RunningQuery; diff --git a/web-services/query/src/main/java/datawave/webservice/query/logic/DatawaveRoleManager.java b/web-services/query/src/main/java/datawave/webservice/query/logic/DatawaveRoleManager.java deleted file mode 100644 index 3bd47bcd97f..00000000000 --- a/web-services/query/src/main/java/datawave/webservice/query/logic/DatawaveRoleManager.java +++ /dev/null @@ -1,43 +0,0 @@ -package datawave.webservice.query.logic; - -import java.security.Principal; -import java.util.Collection; -import java.util.Collections; -import java.util.HashSet; -import java.util.Set; - -import com.google.common.collect.Sets; - -import datawave.security.authorization.DatawavePrincipal; - -public class DatawaveRoleManager implements RoleManager { - - private Set requiredRoles; - - public DatawaveRoleManager() {} - - public DatawaveRoleManager(Collection requiredRoles) { - this.requiredRoles = Collections.unmodifiableSet(Sets.newHashSet(requiredRoles)); - } - - @Override - public boolean canRunQuery(QueryLogic queryLogic, Principal principal) { - if (principal instanceof DatawavePrincipal == false) - return false; - DatawavePrincipal datawavePrincipal = (DatawavePrincipal) principal; - if (requiredRoles != null && !requiredRoles.isEmpty()) { - Set usersRoles = new HashSet<>(datawavePrincipal.getPrimaryUser().getRoles()); - return usersRoles.containsAll(requiredRoles); - } - return true; - } - - public Set getRequiredRoles() { - return requiredRoles; - } - - public void setRequiredRoles(Set requiredRoles) { - this.requiredRoles = requiredRoles; - } - -} diff --git a/web-services/query/src/main/java/datawave/webservice/query/logic/EasyRoleManager.java b/web-services/query/src/main/java/datawave/webservice/query/logic/EasyRoleManager.java deleted file mode 100644 index 653eac3ffbf..00000000000 --- a/web-services/query/src/main/java/datawave/webservice/query/logic/EasyRoleManager.java +++ /dev/null @@ -1,23 +0,0 @@ -package datawave.webservice.query.logic; - -import java.security.Principal; -import java.util.Collections; -import java.util.Set; - -public class EasyRoleManager implements RoleManager { - - @Override - public boolean canRunQuery(QueryLogic queryLogic, Principal principal) { - return true; - } - - @Override - public void setRequiredRoles(Set requiredRoles) { - // TODO Auto-generated method stub - } - - @Override - public Set getRequiredRoles() { - return Collections.emptySet(); - } -} diff --git a/web-services/query/src/main/java/datawave/webservice/query/logic/QueryLogicFactory.java b/web-services/query/src/main/java/datawave/webservice/query/logic/QueryLogicFactory.java deleted file mode 100644 index 4ad56232d43..00000000000 --- a/web-services/query/src/main/java/datawave/webservice/query/logic/QueryLogicFactory.java +++ /dev/null @@ -1,23 +0,0 @@ -package datawave.webservice.query.logic; - -import java.security.Principal; -import java.util.List; - -public interface QueryLogicFactory { - - /** - * - * @param name - * name of query logic - * @param principal - * the principal - * @return new instance of QueryLogic class - * @throws IllegalArgumentException - * if query logic name does not exist - * @throws CloneNotSupportedException - * if the clone is not supported - */ - QueryLogic getQueryLogic(String name, Principal principal) throws IllegalArgumentException, CloneNotSupportedException; - - List> getQueryLogicList(); -} diff --git a/web-services/query/src/main/java/datawave/webservice/query/logic/QueryLogicFactoryConfiguration.java b/web-services/query/src/main/java/datawave/webservice/query/logic/QueryLogicFactoryConfiguration.java index c9f3cca3b0c..e722baf9272 100644 --- a/web-services/query/src/main/java/datawave/webservice/query/logic/QueryLogicFactoryConfiguration.java +++ b/web-services/query/src/main/java/datawave/webservice/query/logic/QueryLogicFactoryConfiguration.java @@ -2,6 +2,8 @@ import java.util.Map; +import datawave.core.query.logic.QueryLogic; + public class QueryLogicFactoryConfiguration { // The logicMap is the list of logics that can be loaded. // This is a map of logic name to bean name. diff --git a/web-services/query/src/main/java/datawave/webservice/query/logic/QueryLogicFactoryImpl.java b/web-services/query/src/main/java/datawave/webservice/query/logic/QueryLogicFactoryImpl.java index b39bc7ee4b5..07514cf7ba5 100644 --- a/web-services/query/src/main/java/datawave/webservice/query/logic/QueryLogicFactoryImpl.java +++ b/web-services/query/src/main/java/datawave/webservice/query/logic/QueryLogicFactoryImpl.java @@ -1,11 +1,11 @@ package datawave.webservice.query.logic; -import java.security.Principal; import java.util.ArrayList; import java.util.HashMap; -import java.util.Iterator; +import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Set; import javax.inject.Inject; @@ -13,6 +13,11 @@ import org.springframework.context.ApplicationContext; import datawave.configuration.spring.SpringBean; +import datawave.core.query.logic.QueryLogic; +import datawave.core.query.logic.QueryLogicFactory; +import datawave.security.authorization.DatawavePrincipal; +import datawave.security.authorization.ProxiedUserDetails; +import datawave.security.system.ServerPrincipal; import datawave.webservice.common.exception.UnauthorizedException; import datawave.webservice.result.VoidResponse; @@ -28,9 +33,22 @@ public class QueryLogicFactoryImpl implements QueryLogicFactory { @Inject private ApplicationContext applicationContext; + @Inject + @ServerPrincipal + private DatawavePrincipal serverPrincipal; + + @Override + public QueryLogic getQueryLogic(String name, ProxiedUserDetails currentUser) throws IllegalArgumentException, CloneNotSupportedException { + return getQueryLogic(name, currentUser, true); + } + @Override - public QueryLogic getQueryLogic(String queryLogic, Principal principal) throws IllegalArgumentException, CloneNotSupportedException { + public QueryLogic getQueryLogic(String name) throws IllegalArgumentException, CloneNotSupportedException { + return getQueryLogic(name, null, false); + } + public QueryLogic getQueryLogic(String queryLogic, ProxiedUserDetails currentUser, boolean checkRoles) + throws IllegalArgumentException, CloneNotSupportedException { String beanName = queryLogic; if (queryLogicFactoryConfiguration.hasLogicMap()) { beanName = queryLogicFactoryConfiguration.getLogicMap().get(queryLogic); @@ -42,7 +60,6 @@ public QueryLogic getQueryLogic(String queryLogic, Principal principal) throw QueryLogic logic; try { logic = (QueryLogic) applicationContext.getBean(beanName); - logic.setPrincipal(principal); } catch (ClassCastException | NoSuchBeanDefinitionException cce) { if (beanName.equals(queryLogic)) { throw new IllegalArgumentException("Logic name '" + queryLogic + "' does not exist in the configuration"); @@ -51,9 +68,9 @@ public QueryLogic getQueryLogic(String queryLogic, Principal principal) throw } } - if (!logic.canRunQuery(principal)) { - throw new UnauthorizedException(new IllegalAccessException("User does not have required role(s): " + logic.getRoleManager().getRequiredRoles()), - new VoidResponse()); + Set userRoles = new HashSet<>(currentUser.getPrimaryUser().getRoles()); + if (checkRoles && !logic.canRunQuery(userRoles)) { + throw new UnauthorizedException(new IllegalAccessException("User does not have required role(s): " + logic.getRequiredRoles()), new VoidResponse()); } logic.setLogicName(queryLogic); @@ -63,6 +80,10 @@ public QueryLogic getQueryLogic(String queryLogic, Principal principal) throw if (logic.getPageByteTrigger() == 0) { logic.setPageByteTrigger(queryLogicFactoryConfiguration.getPageByteTrigger()); } + + logic.setCurrentUser(currentUser); + logic.setServerUser(serverPrincipal); + return logic; } diff --git a/web-services/query/src/main/java/datawave/webservice/query/logic/RoleManager.java b/web-services/query/src/main/java/datawave/webservice/query/logic/RoleManager.java deleted file mode 100644 index ab15f595e8d..00000000000 --- a/web-services/query/src/main/java/datawave/webservice/query/logic/RoleManager.java +++ /dev/null @@ -1,14 +0,0 @@ -package datawave.webservice.query.logic; - -import java.security.Principal; -import java.util.Set; - -public interface RoleManager { - - boolean canRunQuery(QueryLogic queryLogic, Principal principal); - - void setRequiredRoles(Set requiredRoles); - - Set getRequiredRoles(); - -} diff --git a/web-services/query/src/main/java/datawave/webservice/query/logic/RoleManagerImpl.java b/web-services/query/src/main/java/datawave/webservice/query/logic/RoleManagerImpl.java deleted file mode 100644 index f42b5ee0330..00000000000 --- a/web-services/query/src/main/java/datawave/webservice/query/logic/RoleManagerImpl.java +++ /dev/null @@ -1,5 +0,0 @@ -package datawave.webservice.query.logic; - -public class RoleManagerImpl { - -} diff --git a/web-services/query/src/main/java/datawave/webservice/query/logic/WritesQueryMetrics.java b/web-services/query/src/main/java/datawave/webservice/query/logic/WritesQueryMetrics.java deleted file mode 100644 index 049bd144779..00000000000 --- a/web-services/query/src/main/java/datawave/webservice/query/logic/WritesQueryMetrics.java +++ /dev/null @@ -1,9 +0,0 @@ -package datawave.webservice.query.logic; - -import datawave.microservice.querymetric.BaseQueryMetric; - -public interface WritesQueryMetrics { - - void writeQueryMetrics(BaseQueryMetric metric); - -} diff --git a/web-services/query/src/main/java/datawave/webservice/query/metric/QueryMetricsBean.java b/web-services/query/src/main/java/datawave/webservice/query/metric/QueryMetricsBean.java index 3f11a5b3476..23f7e61c6a9 100644 --- a/web-services/query/src/main/java/datawave/webservice/query/metric/QueryMetricsBean.java +++ b/web-services/query/src/main/java/datawave/webservice/query/metric/QueryMetricsBean.java @@ -36,6 +36,8 @@ import datawave.annotation.Required; import datawave.configuration.DatawaveEmbeddedProjectStageHolder; import datawave.configuration.spring.SpringBean; +import datawave.core.query.map.QueryGeometryHandler; +import datawave.core.query.metric.QueryMetricHandler; import datawave.interceptor.RequiredInterceptor; import datawave.interceptor.ResponseInterceptor; import datawave.metrics.remote.RemoteQueryMetricService; @@ -46,7 +48,6 @@ import datawave.security.authorization.DatawavePrincipal; import datawave.webservice.query.exception.DatawaveErrorCode; import datawave.webservice.query.exception.QueryException; -import datawave.webservice.query.map.QueryGeometryHandler; import datawave.webservice.query.map.QueryGeometryResponse; @Path("/Query/Metrics") diff --git a/web-services/query/src/main/java/datawave/webservice/query/metric/QueryMetricsWriter.java b/web-services/query/src/main/java/datawave/webservice/query/metric/QueryMetricsWriter.java index a77b9f7f7fc..daf82f4b24e 100644 --- a/web-services/query/src/main/java/datawave/webservice/query/metric/QueryMetricsWriter.java +++ b/web-services/query/src/main/java/datawave/webservice/query/metric/QueryMetricsWriter.java @@ -39,6 +39,7 @@ import datawave.configuration.DatawaveEmbeddedProjectStageHolder; import datawave.configuration.RefreshEvent; import datawave.configuration.spring.SpringBean; +import datawave.core.query.metric.QueryMetricHandler; import datawave.metrics.remote.RemoteQueryMetricService; import datawave.microservice.querymetric.BaseQueryMetric; import datawave.microservice.querymetric.BaseQueryMetric.Lifecycle; diff --git a/web-services/query/src/main/java/datawave/webservice/query/predicate/AuthorizationsPredicate.java b/web-services/query/src/main/java/datawave/webservice/query/predicate/AuthorizationsPredicate.java deleted file mode 100644 index c90f0b69ebd..00000000000 --- a/web-services/query/src/main/java/datawave/webservice/query/predicate/AuthorizationsPredicate.java +++ /dev/null @@ -1,53 +0,0 @@ -package datawave.webservice.query.predicate; - -import java.util.function.Predicate; - -import org.apache.accumulo.core.security.Authorizations; -import org.apache.accumulo.core.security.ColumnVisibility; -import org.apache.accumulo.core.security.VisibilityEvaluator; -import org.apache.accumulo.core.security.VisibilityParseException; - -/** - * This is a predicate that will test the auths against a specified visibility (as defined by accumulo's ColumnVisibility). In addition to the visibility, one - * can specify that only the first of the authorizations is matched (presumably the user). - */ -public class AuthorizationsPredicate implements Predicate { - - // A visibility string to be matched against the auths being used for the query - private ColumnVisibility visibility; - - public AuthorizationsPredicate() {} - - public AuthorizationsPredicate(String visibility) { - setVisibility(visibility); - } - - @Override - public boolean test(Authorizations auths) { - // match the visibility against the auths. - ColumnVisibility vis = getVisibility(); - VisibilityEvaluator ve = new VisibilityEvaluator(auths); - try { - return (ve.evaluate(vis)); - } catch (VisibilityParseException e) { - throw new RuntimeException(e); - } - } - - public ColumnVisibility getVisibility() { - return visibility; - } - - public void setVisibility(ColumnVisibility visibility) { - this.visibility = visibility; - } - - public void setVisibility(String visibility) { - setVisibility(new ColumnVisibility(visibility)); - } - - @Override - public String toString() { - return "(auths =~ " + visibility + ')'; - } -} diff --git a/web-services/query/src/main/java/datawave/webservice/query/remote/RemoteQueryServiceImpl.java b/web-services/query/src/main/java/datawave/webservice/query/remote/RemoteQueryServiceImpl.java index 63433ea70e2..0e354bac945 100644 --- a/web-services/query/src/main/java/datawave/webservice/query/remote/RemoteQueryServiceImpl.java +++ b/web-services/query/src/main/java/datawave/webservice/query/remote/RemoteQueryServiceImpl.java @@ -22,10 +22,11 @@ import com.fasterxml.jackson.databind.ObjectReader; +import datawave.core.query.remote.RemoteQueryService; import datawave.security.auth.DatawaveAuthenticationMechanism; import datawave.security.authorization.DatawavePrincipal; +import datawave.security.authorization.ProxiedUserDetails; import datawave.webservice.common.remote.RemoteHttpService; -import datawave.webservice.common.remote.RemoteQueryService; import datawave.webservice.result.BaseQueryResponse; import datawave.webservice.result.GenericResponse; import datawave.webservice.result.VoidResponse; @@ -73,16 +74,16 @@ public void init() { } @Override - public GenericResponse createQuery(String queryLogicName, Map> queryParameters, Object callerObject) { + public GenericResponse createQuery(String queryLogicName, Map> queryParameters, ProxiedUserDetails callerObject) { return query(CREATE, queryLogicName, queryParameters, callerObject); } @Override - public GenericResponse planQuery(String queryLogicName, Map> queryParameters, Object callerObject) { + public GenericResponse planQuery(String queryLogicName, Map> queryParameters, ProxiedUserDetails callerObject) { return query(PLAN, queryLogicName, queryParameters, callerObject); } - private GenericResponse query(String endPoint, String queryLogicName, Map> queryParameters, Object callerObject) { + private GenericResponse query(String endPoint, String queryLogicName, Map> queryParameters, ProxiedUserDetails callerObject) { init(); final DatawavePrincipal principal = getDatawavePrincipal(callerObject); @@ -118,7 +119,7 @@ private GenericResponse query(String endPoint, String queryLogicName, Ma } @Override - public BaseQueryResponse next(String id, Object callerObject) { + public BaseQueryResponse next(String id, ProxiedUserDetails callerObject) { init(); final DatawavePrincipal principal = getDatawavePrincipal(callerObject); @@ -133,7 +134,7 @@ public BaseQueryResponse next(String id, Object callerObject) { } @Override - public VoidResponse close(String id, Object callerObject) { + public VoidResponse close(String id, ProxiedUserDetails callerObject) { init(); final DatawavePrincipal principal = getDatawavePrincipal(callerObject); @@ -148,7 +149,7 @@ public VoidResponse close(String id, Object callerObject) { } @Override - public GenericResponse planQuery(String id, Object callerObject) { + public GenericResponse planQuery(String id, ProxiedUserDetails callerObject) { init(); final DatawavePrincipal principal = getDatawavePrincipal(callerObject); @@ -174,7 +175,7 @@ public URI getQueryMetricsURI(String id) { } - private DatawavePrincipal getDatawavePrincipal(Object callerObject) { + private DatawavePrincipal getDatawavePrincipal(ProxiedUserDetails callerObject) { if (callerObject instanceof DatawavePrincipal) { return (DatawavePrincipal) callerObject; } diff --git a/web-services/query/src/main/java/datawave/webservice/query/runner/AccumuloConnectionRequestBean.java b/web-services/query/src/main/java/datawave/webservice/query/runner/AccumuloConnectionRequestBean.java index 0be1a28c60c..8d2c794a541 100644 --- a/web-services/query/src/main/java/datawave/webservice/query/runner/AccumuloConnectionRequestBean.java +++ b/web-services/query/src/main/java/datawave/webservice/query/runner/AccumuloConnectionRequestBean.java @@ -1,76 +1,29 @@ package datawave.webservice.query.runner; -import java.security.Principal; -import java.util.Map; -import java.util.concurrent.ConcurrentHashMap; - import javax.annotation.Resource; import javax.ejb.EJBContext; import javax.inject.Singleton; -import org.apache.accumulo.core.util.Pair; import org.apache.log4j.Logger; +import datawave.core.query.runner.AccumuloConnectionRequestMap; + /** * For storing a map of queryId to Thread that is requesting an AccumuloConnection */ @Singleton // CDI singleton -public class AccumuloConnectionRequestBean { +public class AccumuloConnectionRequestBean extends AccumuloConnectionRequestMap { private static Logger log = Logger.getLogger(AccumuloConnectionRequestBean.class); @Resource private EJBContext ctx; - private Map> getConnectionThreadMap = new ConcurrentHashMap<>(); + private AccumuloConnectionRequestMap getConnectionThreadMap = new AccumuloConnectionRequestMap(); public boolean cancelConnectionRequest(String id) { - return cancelConnectionRequest(id, ctx.getCallerPrincipal()); + return cancelConnectionRequest(id, ctx.getCallerPrincipal().getName()); } - public boolean cancelConnectionRequest(String id, Principal principal) { - // this call checks that the Principal used for the connection request and th connection cancel are the same - // if query is waiting for an accumulo connection in create or reset, then interrupt it - boolean connectionRequestCanceled = false; - try { - Pair connectionRequestPair = getConnectionThreadMap.get(id); - if (connectionRequestPair != null) { - String connectionRequestPrincipalName = principal.getName(); - String connectionCancelPrincipalName = connectionRequestPair.getFirst().getName(); - if (connectionRequestPrincipalName.equals(connectionCancelPrincipalName)) { - connectionRequestPair.getSecond().interrupt(); - connectionRequestCanceled = true; - } - } - } catch (Exception e) { - log.error(e.getMessage(), e); - } - return connectionRequestCanceled; - } - - public boolean adminCancelConnectionRequest(String id) { - // it is assumed that admin status is already checked, so this call does not check the calling Principals - // if query is waiting for an accumulo connection in create or reset, then interrupt it - boolean connectionRequestCanceled = false; - try { - Pair connectionRequestPair = getConnectionThreadMap.get(id); - if (connectionRequestPair != null) { - connectionRequestPair.getSecond().interrupt(); - connectionRequestCanceled = true; - } - } catch (Exception e) { - log.error(e.getMessage(), e); - } - return connectionRequestCanceled; - } - - public void requestBegin(String id) { - Pair connectionRequestPair = new Pair<>(ctx.getCallerPrincipal(), Thread.currentThread()); - getConnectionThreadMap.put(id, connectionRequestPair); - } - - public void requestEnd(String id) { - getConnectionThreadMap.remove(id); - } } diff --git a/web-services/query/src/main/java/datawave/webservice/query/runner/BasicQueryBean.java b/web-services/query/src/main/java/datawave/webservice/query/runner/BasicQueryBean.java index 02058f9d6c9..4cba1a8b22b 100644 --- a/web-services/query/src/main/java/datawave/webservice/query/runner/BasicQueryBean.java +++ b/web-services/query/src/main/java/datawave/webservice/query/runner/BasicQueryBean.java @@ -1,7 +1,6 @@ package datawave.webservice.query.runner; import java.lang.reflect.Method; -import java.security.Principal; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -44,19 +43,18 @@ import datawave.annotation.GenerateQuerySessionId; import datawave.annotation.Required; import datawave.configuration.DatawaveEmbeddedProjectStageHolder; -import datawave.configuration.spring.SpringBean; +import datawave.core.query.logic.BaseQueryLogic; +import datawave.core.query.logic.QueryLogic; +import datawave.core.query.logic.QueryLogicFactory; import datawave.interceptor.RequiredInterceptor; import datawave.interceptor.ResponseInterceptor; +import datawave.microservice.query.Query; import datawave.resteasy.interceptor.CreateQuerySessionIDFilter; import datawave.security.authorization.AuthorizationException; import datawave.security.authorization.DatawavePrincipal; import datawave.security.authorization.UserOperations; import datawave.security.util.WSAuthorizationsUtil; -import datawave.webservice.query.Query; import datawave.webservice.query.exception.QueryException; -import datawave.webservice.query.logic.BaseQueryLogic; -import datawave.webservice.query.logic.QueryLogic; -import datawave.webservice.query.logic.QueryLogicFactory; import datawave.webservice.query.result.event.ResponseObjectFactory; import datawave.webservice.query.result.logic.QueryLogicDescription; import datawave.webservice.result.BaseQueryResponse; @@ -109,7 +107,6 @@ public class BasicQueryBean { private SessionContext sessionContext; @Inject - @SpringBean(name = "ResponseObjectFactory") private ResponseObjectFactory responseObjectFactory; @PostConstruct @@ -166,10 +163,10 @@ public QueryWizardStep1Response showQueryWizardStep1() { if (exampleQueries != null) { d.setExampleQueries(new ArrayList<>(exampleQueries)); } - Set requiredRoles = l.getRoleManager().getRequiredRoles(); + Set requiredRoles = l.getRequiredRoles(); if (requiredRoles != null) { List requiredRolesList = new ArrayList<>(); - requiredRolesList.addAll(l.getRoleManager().getRequiredRoles()); + requiredRolesList.addAll(l.getRequiredRoles()); d.setRequiredRoles(requiredRolesList); } @@ -265,10 +262,10 @@ public QueryWizardStep2Response showQueryWizardStep2(MultivaluedMap(exampleQueries)); } - Set requiredRoles = l.getRoleManager().getRequiredRoles(); + Set requiredRoles = l.getRequiredRoles(); if (requiredRoles != null) { List requiredRolesList = new ArrayList<>(); - requiredRolesList.addAll(l.getRoleManager().getRequiredRoles()); + requiredRolesList.addAll(l.getRequiredRoles()); d.setRequiredRoles(requiredRolesList); } @@ -307,8 +304,8 @@ public QueryWizardStep2Response showQueryWizardStep2(MultivaluedMap logic = queryLogicFactory.getQueryLogic(logicName, principal); + QueryLogic logic = queryLogicFactory.getQueryLogic(logicName, (DatawavePrincipal) principal); // the query principal is our local principal unless the query logic has a different user operations - DatawavePrincipal queryPrincipal = (logic.getUserOperations() == null) ? (DatawavePrincipal) principal - : logic.getUserOperations().getRemoteUser((DatawavePrincipal) principal); + DatawavePrincipal queryPrincipal = (DatawavePrincipal) ((logic.getUserOperations() == null) ? principal + : logic.getUserOperations().getRemoteUser((DatawavePrincipal) principal)); userAuths = WSAuthorizationsUtil.buildUserAuthorizationString(queryPrincipal); } catch (Exception e) { log.error("Failed to get user query authorizations", e); diff --git a/web-services/query/src/main/java/datawave/webservice/query/runner/NoOpQueryPredictor.java b/web-services/query/src/main/java/datawave/webservice/query/runner/NoOpQueryPredictor.java deleted file mode 100644 index 20f0435b71d..00000000000 --- a/web-services/query/src/main/java/datawave/webservice/query/runner/NoOpQueryPredictor.java +++ /dev/null @@ -1,14 +0,0 @@ -package datawave.webservice.query.runner; - -import java.util.Set; - -import datawave.microservice.querymetric.BaseQueryMetric; -import datawave.microservice.querymetric.BaseQueryMetric.Prediction; - -public class NoOpQueryPredictor implements QueryPredictor { - - @Override - public Set predict(BaseQueryMetric query) throws PredictionException { - return null; - } -} diff --git a/web-services/query/src/main/java/datawave/webservice/query/runner/QueryExecutor.java b/web-services/query/src/main/java/datawave/webservice/query/runner/QueryExecutor.java index 2f3a6e1ccc6..56ecc391850 100644 --- a/web-services/query/src/main/java/datawave/webservice/query/runner/QueryExecutor.java +++ b/web-services/query/src/main/java/datawave/webservice/query/runner/QueryExecutor.java @@ -7,7 +7,7 @@ import javax.ws.rs.core.StreamingOutput; import javax.ws.rs.core.UriInfo; -import datawave.webservice.query.QueryPersistence; +import datawave.microservice.query.QueryPersistence; import datawave.webservice.result.BaseQueryResponse; import datawave.webservice.result.GenericResponse; import datawave.webservice.result.QueryImplListResponse; diff --git a/web-services/query/src/main/java/datawave/webservice/query/runner/QueryExecutorBean.java b/web-services/query/src/main/java/datawave/webservice/query/runner/QueryExecutorBean.java index 0e35675483e..4044facb91e 100644 --- a/web-services/query/src/main/java/datawave/webservice/query/runner/QueryExecutorBean.java +++ b/web-services/query/src/main/java/datawave/webservice/query/runner/QueryExecutorBean.java @@ -77,6 +77,8 @@ import org.apache.log4j.Logger; import org.jboss.resteasy.annotations.GZIP; import org.jboss.resteasy.specimpl.MultivaluedMapImpl; +import org.springframework.util.LinkedMultiValueMap; +import org.springframework.util.MultiValueMap; import org.springframework.util.StringUtils; import com.codahale.metrics.annotation.Timed; @@ -97,9 +99,23 @@ import datawave.annotation.Required; import datawave.configuration.DatawaveEmbeddedProjectStageHolder; import datawave.configuration.spring.SpringBean; +import datawave.core.common.audit.PrivateAuditConstants; +import datawave.core.common.connection.AccumuloConnectionFactory; +import datawave.core.query.cache.ResultsPage; +import datawave.core.query.logic.QueryLogic; +import datawave.core.query.logic.QueryLogicFactory; +import datawave.core.query.logic.QueryLogicTransformer; +import datawave.core.query.predict.QueryPredictor; +import datawave.core.query.util.QueryUtil; import datawave.interceptor.RequiredInterceptor; import datawave.interceptor.ResponseInterceptor; import datawave.marking.SecurityMarking; +import datawave.microservice.query.Query; +import datawave.microservice.query.QueryImpl; +import datawave.microservice.query.QueryImpl.Parameter; +import datawave.microservice.query.QueryParameters; +import datawave.microservice.query.QueryPersistence; +import datawave.microservice.query.config.QueryExpirationProperties; import datawave.microservice.querymetric.BaseQueryMetric; import datawave.microservice.querymetric.BaseQueryMetric.PageMetric; import datawave.microservice.querymetric.BaseQueryMetric.Prediction; @@ -113,8 +129,6 @@ import datawave.webservice.common.audit.AuditBean; import datawave.webservice.common.audit.AuditParameters; import datawave.webservice.common.audit.Auditor.AuditType; -import datawave.webservice.common.audit.PrivateAuditConstants; -import datawave.webservice.common.connection.AccumuloConnectionFactory; import datawave.webservice.common.exception.BadRequestException; import datawave.webservice.common.exception.DatawaveWebApplicationException; import datawave.webservice.common.exception.NoResultsException; @@ -122,18 +136,11 @@ import datawave.webservice.common.exception.PreConditionFailedException; import datawave.webservice.common.exception.QueryCanceledException; import datawave.webservice.common.exception.UnauthorizedException; -import datawave.webservice.query.Query; -import datawave.webservice.query.QueryImpl; -import datawave.webservice.query.QueryImpl.Parameter; -import datawave.webservice.query.QueryParameters; -import datawave.webservice.query.QueryPersistence; import datawave.webservice.query.annotation.EnrichQueryMetrics; import datawave.webservice.query.cache.ClosedQueryCache; import datawave.webservice.query.cache.CreatedQueryLogicCacheBean; import datawave.webservice.query.cache.QueryCache; -import datawave.webservice.query.cache.QueryExpirationConfiguration; import datawave.webservice.query.cache.QueryTraceCache; -import datawave.webservice.query.cache.ResultsPage; import datawave.webservice.query.cache.RunningQueryTimingImpl; import datawave.webservice.query.configuration.LookupUUIDConfiguration; import datawave.webservice.query.exception.BadRequestQueryException; @@ -144,18 +151,15 @@ import datawave.webservice.query.exception.QueryException; import datawave.webservice.query.exception.UnauthorizedQueryException; import datawave.webservice.query.factory.Persister; -import datawave.webservice.query.logic.QueryLogic; -import datawave.webservice.query.logic.QueryLogicFactory; -import datawave.webservice.query.logic.QueryLogicTransformer; import datawave.webservice.query.metric.QueryMetricsBean; import datawave.webservice.query.result.event.ResponseObjectFactory; import datawave.webservice.query.result.logic.QueryLogicDescription; import datawave.webservice.query.util.GetUUIDCriteria; import datawave.webservice.query.util.LookupUUIDUtil; +import datawave.webservice.query.util.MapUtils; import datawave.webservice.query.util.NextContentCriteria; import datawave.webservice.query.util.PostUUIDCriteria; import datawave.webservice.query.util.QueryUncaughtExceptionHandler; -import datawave.webservice.query.util.QueryUtil; import datawave.webservice.query.util.UIDQueryCriteria; import datawave.webservice.result.BaseQueryResponse; import datawave.webservice.result.BaseResponse; @@ -214,7 +218,7 @@ public class QueryExecutorBean implements QueryExecutor { @Inject @SpringBean(refreshable = true) - private QueryExpirationConfiguration queryExpirationConf; + private QueryExpirationProperties queryExpirationConf; @Inject private Persister persister; @@ -233,7 +237,6 @@ public class QueryExecutorBean implements QueryExecutor { private SecurityMarking marking; @Inject - @SpringBean(name = "ResponseObjectFactory") private ResponseObjectFactory responseObjectFactory; private LookupUUIDUtil lookupUUIDUtil; @@ -260,7 +263,7 @@ public class QueryExecutorBean implements QueryExecutor { private ClosedQueryCache closedQueryCache; private final int PAGE_TIMEOUT_MIN = 1; - private final int PAGE_TIMEOUT_MAX = QueryExpirationConfiguration.PAGE_TIMEOUT_MIN_DEFAULT; + private final int PAGE_TIMEOUT_MAX = 60; private final String UUID_REGEX_RULE = "[a-fA-F\\d-]+"; private final String INVALID_PAGESIZE = "page.size"; @@ -346,10 +349,10 @@ public QueryLogicResponse listQueryLogic() { if (exampleQueries != null) { d.setExampleQueries(new ArrayList<>(exampleQueries)); } - Set requiredRoles = l.getRoleManager().getRequiredRoles(); + Set requiredRoles = l.getRequiredRoles(); if (requiredRoles != null) { List requiredRolesList = new ArrayList<>(); - requiredRolesList.addAll(l.getRoleManager().getRequiredRoles()); + requiredRolesList.addAll(l.getRequiredRoles()); d.setRequiredRoles(requiredRolesList); } @@ -415,6 +418,32 @@ private void handleIncorrectPageSize() { throwBadRequest(DatawaveErrorCode.INVALID_PAGE_SIZE, response); } + /** + * Setup the caller data in the QueryData object + * + * @param p + * @param qd + * @return qd + */ + private QueryData setUserData(Principal p, QueryData qd) { + // Find out who/what called this method + qd.proxyServers = null; + qd.p = p; + qd.userDn = qd.p.getName(); + qd.userid = qd.userDn; + qd.dnList = Collections.singletonList(qd.userid); + if (qd.p instanceof DatawavePrincipal) { + DatawavePrincipal dp = (DatawavePrincipal) qd.p; + qd.userid = dp.getShortName(); + qd.userDn = dp.getUserDN().subjectDN(); + String[] dns = dp.getDNs(); + Arrays.sort(dns); + qd.dnList = Arrays.asList(dns); + qd.proxyServers = dp.getProxyServers(); + } + return qd; + } + /** * This method will provide some initial query validation for the define and create query calls. * @@ -437,7 +466,7 @@ private QueryData validateQuery(String queryLogicName, MultivaluedMap response = new GenericResponse<>(); - response.addException(qe); - throw new UnauthorizedException(qe, response); - } + // Verify that the calling principal has access to the query logic iff being called externally (i.e. Principal instanceof DatawavePrincipal) + if (qd.p instanceof DatawavePrincipal && !qd.logic.containsDNWithAccess(qd.dnList)) { + UnauthorizedQueryException qe = new UnauthorizedQueryException("None of the DNs used have access to this query logic: " + qd.dnList, 401); + GenericResponse response = new GenericResponse<>(); + response.addException(qe); + throw new UnauthorizedException(qe, response); } log.trace(qd.userid + " has authorizations " + ((qd.p instanceof DatawavePrincipal) ? ((DatawavePrincipal) qd.p).getAuthorizations() : "")); @@ -613,9 +631,8 @@ public GenericResponse defineQuery(@Required("logicName") @PathParam("lo // will not exist when reset is called. RunningQuery rq; try { - MultivaluedMap optionalQueryParameters = new MultivaluedMapImpl<>(); - optionalQueryParameters.putAll(qp.getUnknownParameters(queryParameters)); - Query q = persister.create(qd.userDn, qd.dnList, marking, queryLogicName, qp, optionalQueryParameters); + Map> optionalQueryParameters = qp.getUnknownParameters(MapUtils.toMultiValueMap(queryParameters)); + Query q = persister.create(qd.userDn, qd.dnList, marking, queryLogicName, qp, MapUtils.toMultivaluedMap(optionalQueryParameters)); response.setResult(q.getId().toString()); boolean shouldTraceQuery = shouldTraceQuery(qp.getQuery(), qd.userid, false); if (shouldTraceQuery) { @@ -686,9 +703,8 @@ public GenericResponse createQuery(@Required("logicName") @PathParam("lo AuditType auditType = qd.logic.getAuditType(null); try { - MultivaluedMap optionalQueryParameters = new MultivaluedMapImpl<>(); - optionalQueryParameters.putAll(qp.getUnknownParameters(queryParameters)); - q = persister.create(qd.userDn, qd.dnList, marking, queryLogicName, qp, optionalQueryParameters); + Map> optionalQueryParameters = qp.getUnknownParameters(MapUtils.toMultiValueMap(queryParameters)); + q = persister.create(qd.userDn, qd.dnList, marking, queryLogicName, qp, MapUtils.toMultivaluedMap(optionalQueryParameters)); auditType = qd.logic.getAuditType(q); } finally { queryParameters.add(PrivateAuditConstants.AUDIT_TYPE, auditType.name()); @@ -708,7 +724,7 @@ public GenericResponse createQuery(@Required("logicName") @PathParam("lo if (!queryParameters.containsKey(AuditParameters.AUDIT_ID) && q != null) { queryParameters.putSingle(AuditParameters.AUDIT_ID, q.getId().toString()); } - auditor.audit(queryParameters); + auditor.audit(MapUtils.toMultiValueMap(queryParameters)); } catch (IllegalArgumentException e) { log.error("Error validating audit parameters", e); BadRequestQueryException qe = new BadRequestQueryException(DatawaveErrorCode.MISSING_REQUIRED_PARAMETER, e); @@ -726,9 +742,9 @@ public GenericResponse createQuery(@Required("logicName") @PathParam("lo priority = qd.logic.getConnectionPriority(); Map trackingMap = connectionFactory.getTrackingMap(Thread.currentThread().getStackTrace()); q.populateTrackingMap(trackingMap); - accumuloConnectionRequestBean.requestBegin(q.getId().toString()); + accumuloConnectionRequestBean.requestBegin(q.getId().toString(), qd.userDn, trackingMap); try { - client = connectionFactory.getClient(qd.logic.getConnPoolName(), priority, trackingMap); + client = connectionFactory.getClient(qd.userDn, qd.proxyServers, qd.logic.getConnPoolName(), priority, trackingMap); } finally { accumuloConnectionRequestBean.requestEnd(q.getId().toString()); } @@ -857,9 +873,8 @@ public GenericResponse planQuery(@Required("logicName") @PathParam("logi AuditType auditType = qd.logic.getAuditType(null); try { - MultivaluedMap optionalQueryParameters = new MultivaluedMapImpl<>(); - optionalQueryParameters.putAll(qp.getUnknownParameters(queryParameters)); - q = persister.create(qd.userDn, qd.dnList, marking, queryLogicName, qp, optionalQueryParameters); + Map> optionalQueryParameters = qp.getUnknownParameters(MapUtils.toMultiValueMap(queryParameters)); + q = persister.create(qd.userDn, qd.dnList, marking, queryLogicName, qp, MapUtils.toMultivaluedMap(optionalQueryParameters)); auditType = qd.logic.getAuditType(q); } finally { queryParameters.add(PrivateAuditConstants.AUDIT_TYPE, auditType.name()); @@ -880,7 +895,7 @@ public GenericResponse planQuery(@Required("logicName") @PathParam("logi if (!queryParameters.containsKey(AuditParameters.AUDIT_ID)) { queryParameters.putSingle(AuditParameters.AUDIT_ID, q.getId().toString()); } - auditor.audit(queryParameters); + auditor.audit(MapUtils.toMultiValueMap(queryParameters)); } catch (IllegalArgumentException e) { log.error("Error validating audit parameters", e); BadRequestQueryException qe = new BadRequestQueryException(DatawaveErrorCode.MISSING_REQUIRED_PARAMETER, e); @@ -898,9 +913,9 @@ public GenericResponse planQuery(@Required("logicName") @PathParam("logi priority = qd.logic.getConnectionPriority(); Map trackingMap = connectionFactory.getTrackingMap(Thread.currentThread().getStackTrace()); q.populateTrackingMap(trackingMap); - accumuloConnectionRequestBean.requestBegin(q.getId().toString()); + accumuloConnectionRequestBean.requestBegin(q.getId().toString(), qd.userDn, trackingMap); try { - client = connectionFactory.getClient(qd.logic.getConnPoolName(), priority, trackingMap); + client = connectionFactory.getClient(qd.userDn, qd.proxyServers, qd.logic.getConnPoolName(), priority, trackingMap); } finally { accumuloConnectionRequestBean.requestEnd(q.getId().toString()); } @@ -911,10 +926,10 @@ public GenericResponse planQuery(@Required("logicName") @PathParam("logi } else { qd.logic.preInitialize(q, WSAuthorizationsUtil.buildAuthorizations(null)); } - DatawavePrincipal queryPrincipal = (qd.logic.getUserOperations() == null) ? (DatawavePrincipal) qd.p - : qd.logic.getUserOperations().getRemoteUser((DatawavePrincipal) qd.p); + DatawavePrincipal queryPrincipal = (DatawavePrincipal) ((qd.logic.getUserOperations() == null) ? qd.p + : qd.logic.getUserOperations().getRemoteUser((DatawavePrincipal) qd.p)); // the overall principal (the one with combined auths across remote user operations) is our own user operations bean - DatawavePrincipal overallPrincipal = userOperationsBean.getRemoteUser((DatawavePrincipal) qd.p); + DatawavePrincipal overallPrincipal = (DatawavePrincipal) userOperationsBean.getRemoteUser((DatawavePrincipal) qd.p); Set calculatedAuths = WSAuthorizationsUtil.getDowngradedAuthorizations(qp.getAuths(), overallPrincipal, queryPrincipal); String plan = qd.logic.getPlan(client, q, calculatedAuths, expandFields, expandValues); response.setResult(plan); @@ -992,9 +1007,8 @@ public GenericResponse predictQuery(@Required("logicName") @PathParam("l if (predictor != null) { try { qp.setPersistenceMode(QueryPersistence.TRANSIENT); - MultivaluedMap optionalQueryParameters = new MultivaluedMapImpl<>(); - optionalQueryParameters.putAll(qp.getUnknownParameters(queryParameters)); - Query q = persister.create(qd.userDn, qd.dnList, marking, queryLogicName, qp, optionalQueryParameters); + Map> optionalQueryParameters = qp.getUnknownParameters(MapUtils.toMultiValueMap(queryParameters)); + Query q = persister.create(qd.userDn, qd.dnList, marking, queryLogicName, qp, MapUtils.toMultivaluedMap(optionalQueryParameters)); BaseQueryMetric metric = metricFactory.createMetric(); metric.populate(q); @@ -1082,7 +1096,7 @@ private List getQueryByName(String name) throws Exception { } // will throw IllegalArgumentException if not defined - QueryLogic logic = queryLogicFactory.getQueryLogic(q.getQueryLogicName(), p); + QueryLogic logic = queryLogicFactory.getQueryLogic(q.getQueryLogicName(), (DatawavePrincipal) p); AccumuloConnectionFactory.Priority priority = logic.getConnectionPriority(); RunningQuery query = new RunningQuery(metrics, null, priority, logic, q, q.getQueryAuthorizations(), p, new RunningQueryTimingImpl(queryExpirationConf, qp.getPageTimeout()), this.predictor, this.userOperationsBean, this.metricFactory); @@ -1119,7 +1133,7 @@ private RunningQuery getQueryById(String id, Principal principal) throws Excepti Query q = queries.get(0); // will throw IllegalArgumentException if not defined - QueryLogic logic = queryLogicFactory.getQueryLogic(q.getQueryLogicName(), principal); + QueryLogic logic = queryLogicFactory.getQueryLogic(q.getQueryLogicName(), (DatawavePrincipal) principal); AccumuloConnectionFactory.Priority priority = logic.getConnectionPriority(); query = new RunningQuery(metrics, null, priority, logic, q, q.getQueryAuthorizations(), principal, new RunningQueryTimingImpl(queryExpirationConf, qp.getPageTimeout()), this.predictor, this.userOperationsBean, @@ -1154,7 +1168,8 @@ private RunningQuery adminGetQueryById(String id) throws Exception { final String auths = q.getQueryAuthorizations(); // will throw IllegalArgumentException if not defined - final QueryLogic logic = queryLogicFactory.getQueryLogic(q.getQueryLogicName(), ctx.getCallerPrincipal()); + Principal principal = ctx.getCallerPrincipal(); + final QueryLogic logic = queryLogicFactory.getQueryLogic(q.getQueryLogicName(), (DatawavePrincipal) principal); final AccumuloConnectionFactory.Priority priority = logic.getConnectionPriority(); query = RunningQuery.createQueryWithAuthorizations(metrics, null, priority, logic, q, auths, new RunningQueryTimingImpl(queryExpirationConf, qp.getPageTimeout()), this.predictor, this.metricFactory); @@ -1222,14 +1237,12 @@ public VoidResponse reset(@Required("id") @PathParam("id") String id) { query.closeConnection(connectionFactory); } else { AuditType auditType = query.getLogic().getAuditType(query.getSettings()); - MultivaluedMap queryParameters = new MultivaluedMapImpl<>(); - queryParameters.putAll(query.getSettings().toMap()); - - queryParameters.putSingle(PrivateAuditConstants.AUDIT_TYPE, auditType.name()); - queryParameters.putSingle(PrivateAuditConstants.LOGIC_CLASS, query.getLogic().getLogicName()); - queryParameters.putSingle(PrivateAuditConstants.USER_DN, query.getSettings().getUserDN()); - queryParameters.putSingle(PrivateAuditConstants.COLUMN_VISIBILITY, query.getSettings().getColumnVisibility()); + MultiValueMap queryParameters = new LinkedMultiValueMap<>(query.getSettings().toMap()); + queryParameters.set(PrivateAuditConstants.AUDIT_TYPE, auditType.name()); + queryParameters.set(PrivateAuditConstants.LOGIC_CLASS, query.getLogic().getLogicName()); + queryParameters.set(PrivateAuditConstants.USER_DN, query.getSettings().getUserDN()); + queryParameters.set(PrivateAuditConstants.COLUMN_VISIBILITY, query.getSettings().getColumnVisibility()); if (!auditType.equals(AuditType.NONE)) { try { try { @@ -1242,7 +1255,7 @@ public VoidResponse reset(@Required("id") @PathParam("id") String id) { } // if the user didn't set an audit id, use the query id if (!queryParameters.containsKey(AuditParameters.AUDIT_ID)) { - queryParameters.putSingle(AuditParameters.AUDIT_ID, id); + queryParameters.set(AuditParameters.AUDIT_ID, id); } auditor.audit(queryParameters); } catch (IllegalArgumentException e) { @@ -1263,9 +1276,10 @@ public VoidResponse reset(@Required("id") @PathParam("id") String id) { priority = query.getConnectionPriority(); Map trackingMap = connectionFactory.getTrackingMap(Thread.currentThread().getStackTrace()); query.getSettings().populateTrackingMap(trackingMap); - accumuloConnectionRequestBean.requestBegin(id); + QueryData qd = setUserData(ctx.getCallerPrincipal(), new QueryData()); + accumuloConnectionRequestBean.requestBegin(id, qd.userDn, trackingMap); try { - client = connectionFactory.getClient(query.getLogic().getConnPoolName(), priority, trackingMap); + client = connectionFactory.getClient(qd.userDn, qd.proxyServers, query.getLogic().getConnPoolName(), priority, trackingMap); } finally { accumuloConnectionRequestBean.requestEnd(id); } @@ -1466,11 +1480,11 @@ private T lookupContentByUUID(String uuidType, String uuid, MultivaluedMap T lookupContentByUUIDBatch(MultivaluedMap queryParamet } // Create the criteria for looking up the respective events, which we need to get the shard IDs and column families // required for the content lookup - final PostUUIDCriteria criteria = new PostUUIDCriteria(uuidPairs, queryParameters); + final PostUUIDCriteria criteria = new PostUUIDCriteria(uuidPairs, MapUtils.toMultiValueMap(queryParameters)); // Set the HTTP headers if a streamed response is required if (streamingOutput) { @@ -1608,11 +1622,11 @@ T lookupUUID(String uuidType, String uuid, MultivaluedMap que try { // Construct the criteria used to perform the query final GetUUIDCriteria criteria; - final String view = (null != matchingType) ? matchingType.getDefinedView(uuidTypeContext) : null; + final String view = (null != matchingType) ? matchingType.getQueryLogic(uuidTypeContext) : null; if ((LookupUUIDUtil.UID_QUERY.equals(view) || LookupUUIDUtil.LOOKUP_UID_QUERY.equals(view))) { - criteria = new UIDQueryCriteria(uuid, uuidType, queryParameters); + criteria = new UIDQueryCriteria(uuid, uuidType, MapUtils.toMultiValueMap(queryParameters)); } else { - criteria = new GetUUIDCriteria(uuid, uuidType, queryParameters); + criteria = new GetUUIDCriteria(uuid, uuidType, MapUtils.toMultiValueMap(queryParameters)); } // Add the HTTP headers in case streaming is required @@ -1679,7 +1693,7 @@ public T lookupUUIDBatch(MultivaluedMap queryParameters, @Req if (!StringUtils.isEmpty(streaming)) { streamingOutput = Boolean.parseBoolean(streaming); } - final PostUUIDCriteria criteria = new PostUUIDCriteria(uuidPairs, queryParameters); + final PostUUIDCriteria criteria = new PostUUIDCriteria(uuidPairs, MapUtils.toMultiValueMap(queryParameters)); if (streamingOutput) { criteria.setStreamingOutputHeaders(httpHeaders); } @@ -2174,8 +2188,9 @@ public VoidResponse close(@Required("id") @PathParam("id") String id) { private VoidResponse close(String id, Principal principal) { VoidResponse response = new VoidResponse(); try { - boolean connectionRequestCanceled = accumuloConnectionRequestBean.cancelConnectionRequest(id, principal); - Pair,AccumuloClient> tuple = qlCache.pollIfOwnedBy(id, ((DatawavePrincipal) principal).getShortName()); + QueryData qd = setUserData(ctx.getCallerPrincipal(), new QueryData()); + boolean connectionRequestCanceled = accumuloConnectionRequestBean.cancelConnectionRequest(id, qd.userDn); + Pair,AccumuloClient> tuple = qlCache.pollIfOwnedBy(id, qd.userid); if (!id.matches(UUID_REGEX_RULE)) { log.error("Invalid query id: " + id); GenericResponse genericResponse = new GenericResponse<>(); @@ -2221,6 +2236,8 @@ private VoidResponse close(String id, Principal principal) { response.addException(qe.getBottomQueryException()); int statusCode = qe.getBottomQueryException().getStatusCode(); throw new DatawaveWebApplicationException(qe, response, statusCode); + } catch (Throwable t) { + throw t; } } @@ -2331,7 +2348,8 @@ public VoidResponse cancel(@Required("id") @PathParam("id") String id) { VoidResponse response = new VoidResponse(); try { boolean connectionRequestCanceled = accumuloConnectionRequestBean.cancelConnectionRequest(id); - Pair,AccumuloClient> tuple = qlCache.pollIfOwnedBy(id, ctx.getCallerPrincipal().getName()); + QueryData qd = setUserData(ctx.getCallerPrincipal(), new QueryData()); + Pair,AccumuloClient> tuple = qlCache.pollIfOwnedBy(id, qd.userid); if (tuple == null) { try { @@ -2716,7 +2734,8 @@ public GenericResponse duplicateQuery(@PathParam("id") String id, @Requi // TODO: add validation for all these sets // maybe set variables instead of stuffing in query if (newQueryLogicName != null) { - q.setQueryLogicName(queryLogicFactory.getQueryLogic(newQueryLogicName, ctx.getCallerPrincipal()).getLogicName()); + Principal principal = ctx.getCallerPrincipal(); + q.setQueryLogicName(queryLogicFactory.getQueryLogic(newQueryLogicName, (DatawavePrincipal) principal).getLogicName()); } if (newQuery != null) { q.setQuery(newQuery); @@ -2738,6 +2757,7 @@ public GenericResponse duplicateQuery(@PathParam("id") String id, @Requi } if (newMaxResultsOverride != null) { q.setMaxResultsOverride(newMaxResultsOverride); + q.setMaxResultsOverridden(true); } if (newPageTimeout != null) { q.setPageTimeout(newPageTimeout); @@ -2752,8 +2772,7 @@ public GenericResponse duplicateQuery(@PathParam("id") String id, @Requi } } } - MultivaluedMap newSettings = new MultivaluedMapImpl<>(); - newSettings.putAll(q.toMap()); + MultivaluedMap newSettings = MapUtils.toMultivaluedMap(q.toMap()); newSettings.putSingle(QueryParameters.QUERY_PERSISTENCE, persistence.name()); return createQuery(q.getQueryLogicName(), newSettings); } catch (DatawaveWebApplicationException e) { @@ -2799,7 +2818,7 @@ public GenericResponse duplicateQuery(@PathParam("id") String id, @Requi * @param parameters * - optional parameters to the query, a semi-colon separated list name=value pairs (optional, auditing required if changed) * @see datawave.webservice.query.runner.QueryExecutorBean#updateQuery(String, String, String, String, java.util.Date, java.util.Date, String, - * java.util.Date, Integer, Integer, Long, datawave.webservice.query.QueryPersistence, String) + * java.util.Date, Integer, Integer, Long, datawave.microservice.query.QueryPersistence, String) * * @return {@code datawave.webservice.result.GenericResponse} * @RequestHeader X-ProxiedEntitiesChain use when proxying request for user, by specifying a chain of DNs of the identities to proxy @@ -2892,11 +2911,10 @@ private void updateQuery(GenericResponse response, RunningQuery runningQ AuditType auditType = runningQuery.getLogic().getAuditType(runningQuery.getSettings()); if (!auditType.equals(AuditType.NONE)) { try { - MultivaluedMap queryParameters = new MultivaluedMapImpl<>(); - queryParameters.putAll(duplicate.toMap()); + MultiValueMap queryParameters = new LinkedMultiValueMap<>(duplicate.toMap()); // if the user didn't set an audit id, use the query id if (!queryParameters.containsKey(AuditParameters.AUDIT_ID)) { - queryParameters.putSingle(AuditParameters.AUDIT_ID, q.getId().toString()); + queryParameters.set(AuditParameters.AUDIT_ID, q.getId().toString()); } auditor.audit(queryParameters); } catch (IllegalArgumentException e) { @@ -2934,11 +2952,11 @@ private void updateQuery(GenericResponse response, RunningQuery runningQ } private void updateQueryParams(Query q, String queryLogicName, String query, Date beginDate, Date endDate, String queryAuthorizations, Date expirationDate, - Integer pagesize, Integer pageTimeout, Long maxResultsOverride, String parameters) throws CloneNotSupportedException { + Integer pagesize, Integer pageTimeout, Long maxResultsOverride, String parameters) throws QueryException, CloneNotSupportedException { Principal p = ctx.getCallerPrincipal(); // TODO: add validation for all these sets if (queryLogicName != null) { - QueryLogic logic = queryLogicFactory.getQueryLogic(queryLogicName, p); + QueryLogic logic = queryLogicFactory.getQueryLogic(queryLogicName, (DatawavePrincipal) p); q.setQueryLogicName(logic.getLogicName()); } if (query != null) { @@ -2964,6 +2982,7 @@ private void updateQueryParams(Query q, String queryLogicName, String query, Dat } if (maxResultsOverride != null) { q.setMaxResultsOverride(maxResultsOverride); + q.setMaxResultsOverridden(true); } if (parameters != null) { Set params = new HashSet<>(); @@ -3273,7 +3292,7 @@ public StreamingOutput execute(@PathParam("logicName") String logicName, Multiva // Find the response class Class responseClass; try { - QueryLogic l = queryLogicFactory.getQueryLogic(logicName, p); + QueryLogic l = queryLogicFactory.getQueryLogic(logicName, (DatawavePrincipal) p); QueryLogicTransformer t = l.getEnrichedTransformer(q); BaseResponse refResponse = t.createResponse(emptyList); responseClass = refResponse.getClass(); diff --git a/web-services/query/src/main/java/datawave/webservice/query/runner/RunningQuery.java b/web-services/query/src/main/java/datawave/webservice/query/runner/RunningQuery.java index b9f2a3d8a93..3b44d65d656 100644 --- a/web-services/query/src/main/java/datawave/webservice/query/runner/RunningQuery.java +++ b/web-services/query/src/main/java/datawave/webservice/query/runner/RunningQuery.java @@ -4,7 +4,6 @@ import java.util.ArrayList; import java.util.Collections; import java.util.List; -import java.util.Map; import java.util.Set; import java.util.UUID; import java.util.concurrent.ArrayBlockingQueue; @@ -23,6 +22,15 @@ import org.apache.log4j.Logger; import org.jboss.logging.NDC; +import datawave.core.common.connection.AccumuloConnectionFactory; +import datawave.core.query.cache.ResultsPage; +import datawave.core.query.configuration.GenericQueryConfiguration; +import datawave.core.query.logic.BaseQueryLogic; +import datawave.core.query.logic.QueryLogic; +import datawave.core.query.logic.WritesQueryMetrics; +import datawave.core.query.logic.WritesResultCardinalities; +import datawave.core.query.predict.QueryPredictor; +import datawave.microservice.query.Query; import datawave.microservice.querymetric.BaseQueryMetric; import datawave.microservice.querymetric.BaseQueryMetric.Prediction; import datawave.microservice.querymetric.QueryMetric; @@ -32,20 +40,11 @@ import datawave.security.authorization.UserOperations; import datawave.security.authorization.remote.RemoteUserOperationsImpl; import datawave.security.util.WSAuthorizationsUtil; -import datawave.webservice.common.connection.AccumuloConnectionFactory; import datawave.webservice.common.connection.WrappedAccumuloClient; -import datawave.webservice.query.Query; -import datawave.webservice.query.QueryImpl; import datawave.webservice.query.cache.AbstractRunningQuery; -import datawave.webservice.query.cache.ResultsPage; -import datawave.webservice.query.configuration.GenericQueryConfiguration; import datawave.webservice.query.data.ObjectSizeOf; import datawave.webservice.query.exception.DatawaveErrorCode; import datawave.webservice.query.exception.QueryException; -import datawave.webservice.query.logic.BaseQueryLogic; -import datawave.webservice.query.logic.QueryLogic; -import datawave.webservice.query.logic.WritesQueryMetrics; -import datawave.webservice.query.logic.WritesResultCardinalities; import datawave.webservice.query.metric.QueryMetricsBean; import datawave.webservice.query.result.event.EventBase; import datawave.webservice.query.util.QueryUncaughtExceptionHandler; @@ -130,8 +129,8 @@ public RunningQuery(QueryMetricsBean queryMetrics, AccumuloClient client, Accumu } else { logic.preInitialize(settings, WSAuthorizationsUtil.buildAuthorizations(null)); } - DatawavePrincipal queryPrincipal = (logic.getUserOperations() == null) ? (DatawavePrincipal) principal - : logic.getUserOperations().getRemoteUser((DatawavePrincipal) principal); + DatawavePrincipal queryPrincipal = (DatawavePrincipal) ((logic.getUserOperations() == null) ? principal + : logic.getUserOperations().getRemoteUser((DatawavePrincipal) principal)); // the overall principal (the one with combined auths across remote user operations) is our own user operations (probably the UserOperationsBean) // don't call remote user operations if it's asked not to DatawavePrincipal overallPrincipal = (userOperations == null @@ -587,7 +586,7 @@ public ResultsPage next() throws Exception { log.info("Returning final empty page"); terminateResultsThread(); // This query is done, we have no more results to return. - return new ResultsPage(); + return new ResultsPage(Collections.emptyList(), ResultsPage.Status.NONE); } } } diff --git a/web-services/query/src/main/java/datawave/webservice/query/util/AbstractUUIDLookupCriteria.java b/web-services/query/src/main/java/datawave/webservice/query/util/AbstractUUIDLookupCriteria.java index 6026bcdab5d..2f490dabdfb 100644 --- a/web-services/query/src/main/java/datawave/webservice/query/util/AbstractUUIDLookupCriteria.java +++ b/web-services/query/src/main/java/datawave/webservice/query/util/AbstractUUIDLookupCriteria.java @@ -1,11 +1,11 @@ package datawave.webservice.query.util; import javax.ws.rs.core.HttpHeaders; -import javax.ws.rs.core.MultivaluedMap; -import org.jboss.resteasy.specimpl.MultivaluedMapImpl; +import org.springframework.util.LinkedMultiValueMap; +import org.springframework.util.MultiValueMap; -import datawave.webservice.query.Query; +import datawave.microservice.query.Query; /** * Abstract implementation of criteria used for UUID lookup queries @@ -14,7 +14,7 @@ public abstract class AbstractUUIDLookupCriteria { private boolean allEventLookup; private boolean contentLookup; private HttpHeaders headersForStreamedResponse; - private MultivaluedMap queryParameters; + private MultiValueMap queryParameters; private String uuidTypeContext; @@ -26,12 +26,12 @@ public abstract class AbstractUUIDLookupCriteria { */ public AbstractUUIDLookupCriteria(final Query settings) { if (null != settings) { - this.queryParameters = new MultivaluedMapImpl<>(); + this.queryParameters = new LinkedMultiValueMap<>(); this.queryParameters.putAll(settings.toMap()); } } - public AbstractUUIDLookupCriteria(final MultivaluedMap queryParameters) { + public AbstractUUIDLookupCriteria(final MultiValueMap queryParameters) { this.queryParameters = queryParameters; } @@ -70,7 +70,7 @@ public void setStreamingOutputHeaders(final HttpHeaders headers) { this.headersForStreamedResponse = headers; } - public MultivaluedMap getQueryParameters() { + public MultiValueMap getQueryParameters() { return queryParameters; } diff --git a/web-services/query/src/main/java/datawave/webservice/query/util/GetUUIDCriteria.java b/web-services/query/src/main/java/datawave/webservice/query/util/GetUUIDCriteria.java index 6c581ac376f..6ea639ce523 100644 --- a/web-services/query/src/main/java/datawave/webservice/query/util/GetUUIDCriteria.java +++ b/web-services/query/src/main/java/datawave/webservice/query/util/GetUUIDCriteria.java @@ -1,6 +1,6 @@ package datawave.webservice.query.util; -import javax.ws.rs.core.MultivaluedMap; +import org.springframework.util.MultiValueMap; /** * Lookup criteria for one and only one UUID @@ -9,7 +9,7 @@ public class GetUUIDCriteria extends AbstractUUIDLookupCriteria { protected final String uuid; protected final String uuidType; - public GetUUIDCriteria(final String uuid, final String uuidType, MultivaluedMap queryParameters) { + public GetUUIDCriteria(final String uuid, final String uuidType, MultiValueMap queryParameters) { super(queryParameters); this.uuid = uuid; diff --git a/web-services/query/src/main/java/datawave/webservice/query/util/LookupUUIDUtil.java b/web-services/query/src/main/java/datawave/webservice/query/util/LookupUUIDUtil.java index 79fec7b1250..425476a073c 100644 --- a/web-services/query/src/main/java/datawave/webservice/query/util/LookupUUIDUtil.java +++ b/web-services/query/src/main/java/datawave/webservice/query/util/LookupUUIDUtil.java @@ -24,6 +24,14 @@ import org.apache.log4j.Logger; import org.jboss.resteasy.specimpl.MultivaluedMapImpl; +import datawave.core.query.logic.QueryLogic; +import datawave.core.query.logic.QueryLogicFactory; +import datawave.core.query.util.QueryUtil; +import datawave.microservice.query.DefaultQueryParameters; +import datawave.microservice.query.Query; +import datawave.microservice.query.QueryImpl; +import datawave.microservice.query.QueryParameters; +import datawave.microservice.query.QueryPersistence; import datawave.query.data.UUIDType; import datawave.security.authorization.DatawavePrincipal; import datawave.security.authorization.UserOperations; @@ -33,16 +41,9 @@ import datawave.webservice.common.audit.AuditParameters; import datawave.webservice.common.exception.DatawaveWebApplicationException; import datawave.webservice.common.exception.NoResultsException; -import datawave.webservice.query.Query; -import datawave.webservice.query.QueryImpl; -import datawave.webservice.query.QueryParameters; -import datawave.webservice.query.QueryParametersImpl; -import datawave.webservice.query.QueryPersistence; import datawave.webservice.query.configuration.LookupUUIDConfiguration; import datawave.webservice.query.exception.DatawaveErrorCode; import datawave.webservice.query.exception.QueryException; -import datawave.webservice.query.logic.QueryLogic; -import datawave.webservice.query.logic.QueryLogicFactory; import datawave.webservice.query.result.event.EventBase; import datawave.webservice.query.result.event.FieldBase; import datawave.webservice.query.result.event.Metadata; @@ -381,7 +382,7 @@ public T createUUIDQueryAndNext(final AbstractUUIDLookupCriteria unvalidated queryParameters.putSingle(QueryParameters.QUERY_NAME, queryName); try { - queryParameters.putSingle(QueryParameters.QUERY_BEGIN, QueryParametersImpl.formatDate(this.beginAsDate)); + queryParameters.putSingle(QueryParameters.QUERY_BEGIN, DefaultQueryParameters.formatDate(this.beginAsDate)); } catch (ParseException e) { throw new RuntimeException("Unable to format new query begin date: " + this.beginAsDate); } @@ -391,7 +392,7 @@ public T createUUIDQueryAndNext(final AbstractUUIDLookupCriteria unvalidated queryParameters.remove(QueryParameters.QUERY_END); } try { - queryParameters.putSingle(QueryParameters.QUERY_END, QueryParametersImpl.formatDate(endDate)); + queryParameters.putSingle(QueryParameters.QUERY_END, DefaultQueryParameters.formatDate(endDate)); } catch (ParseException e) { throw new RuntimeException("Unable to format new query end date: " + endDate); } @@ -401,7 +402,7 @@ public T createUUIDQueryAndNext(final AbstractUUIDLookupCriteria unvalidated queryParameters.remove(QueryParameters.QUERY_EXPIRATION); } try { - queryParameters.putSingle(QueryParameters.QUERY_EXPIRATION, QueryParametersImpl.formatDate(expireDate)); + queryParameters.putSingle(QueryParameters.QUERY_EXPIRATION, DefaultQueryParameters.formatDate(expireDate)); } catch (ParseException e) { throw new RuntimeException("Unable to format new query expr date: " + expireDate); } @@ -439,7 +440,7 @@ public T createUUIDQueryAndNext(final AbstractUUIDLookupCriteria unvalidated return response; } - public Query createSettings(MultivaluedMap queryParameters) { + public Query createSettings(Map> queryParameters) { log.debug("Initial query parameters: " + queryParameters); Query query = responseObjectFactory.getQueryImpl(); if (queryParameters != null) { @@ -447,7 +448,11 @@ public Query createSettings(MultivaluedMap queryParameters) { if (defaultOptionalParams != null) { expandedQueryParameters.putAll(defaultOptionalParams); } - String delimitedParams = queryParameters.getFirst(QueryParameters.QUERY_PARAMS); + List params = queryParameters.get(QueryParameters.QUERY_PARAMS); + String delimitedParams = null; + if (params != null && !params.isEmpty()) { + delimitedParams = params.get(0); + } if (delimitedParams != null) { for (QueryImpl.Parameter pm : QueryUtil.parseParameters(delimitedParams)) { expandedQueryParameters.putSingle(pm.getParameterName(), pm.getParameterValue()); @@ -465,25 +470,29 @@ public Query createSettings(MultivaluedMap queryParameters) { return query; } - public String getAuths(String logicName, MultivaluedMap queryParameters, String queryAuths, Principal principal) { + public String getAuths(String logicName, Map> queryParameters, String queryAuths, Principal principal) { String userAuths; try { - QueryLogic logic = queryLogicFactory.getQueryLogic(logicName, principal); + QueryLogic logic = queryLogicFactory.getQueryLogic(logicName, (DatawavePrincipal) principal); Query settings = createSettings(queryParameters); if (queryAuths == null) { logic.preInitialize(settings, WSAuthorizationsUtil.buildAuthorizations(((DatawavePrincipal) principal).getAuthorizations())); } else { logic.preInitialize(settings, WSAuthorizationsUtil.buildAuthorizations(Collections.singleton(WSAuthorizationsUtil.splitAuths(queryAuths)))); } + // the query principal is our local principal unless the query logic has a different user operations DatawavePrincipal queryPrincipal = (logic.getUserOperations() == null) ? (DatawavePrincipal) principal : logic.getUserOperations().getRemoteUser((DatawavePrincipal) principal); // the overall principal (the one with combined auths across remote user operations) is our own user operations (probably the UserOperationsBean) // don't call remote user operations if it's asked not to - DatawavePrincipal overallPrincipal = (userOperations == null - || "false".equalsIgnoreCase(queryParameters.getFirst(RemoteUserOperationsImpl.INCLUDE_REMOTE_SERVICES))) - ? (DatawavePrincipal) principal - : userOperations.getRemoteUser((DatawavePrincipal) principal); + String includeRemoteServices = "true"; + if (queryParameters.get(RemoteUserOperationsImpl.INCLUDE_REMOTE_SERVICES) != null + && !queryParameters.get(RemoteUserOperationsImpl.INCLUDE_REMOTE_SERVICES).isEmpty()) { + includeRemoteServices = queryParameters.get(RemoteUserOperationsImpl.INCLUDE_REMOTE_SERVICES).get(0); + } + DatawavePrincipal overallPrincipal = (userOperations == null || "false".equalsIgnoreCase(includeRemoteServices)) ? (DatawavePrincipal) principal + : userOperations.getRemoteUser((DatawavePrincipal) principal); if (queryAuths != null) { userAuths = WSAuthorizationsUtil.downgradeUserAuths(queryAuths, overallPrincipal, queryPrincipal); } else { @@ -753,18 +762,18 @@ private EventQueryResponseBase lookupPagedContent(final String queryName, final queryParameters.putSingle(QueryParameters.QUERY_NAME, queryName); queryParameters.putSingle(QueryParameters.QUERY_STRING, contentQuery.toString()); try { - queryParameters.putSingle(QueryParameters.QUERY_BEGIN, QueryParametersImpl.formatDate(this.beginAsDate)); + queryParameters.putSingle(QueryParameters.QUERY_BEGIN, DefaultQueryParameters.formatDate(this.beginAsDate)); } catch (ParseException e1) { throw new RuntimeException("Error formatting begin date: " + this.beginAsDate); } try { - queryParameters.putSingle(QueryParameters.QUERY_END, QueryParametersImpl.formatDate(endDate)); + queryParameters.putSingle(QueryParameters.QUERY_END, DefaultQueryParameters.formatDate(endDate)); } catch (ParseException e1) { throw new RuntimeException("Error formatting end date: " + endDate); } queryParameters.putSingle(QueryParameters.QUERY_AUTHORIZATIONS, userAuths); try { - queryParameters.putSingle(QueryParameters.QUERY_EXPIRATION, QueryParametersImpl.formatDate(expireDate)); + queryParameters.putSingle(QueryParameters.QUERY_EXPIRATION, DefaultQueryParameters.formatDate(expireDate)); } catch (ParseException e1) { throw new RuntimeException("Error formatting expr date: " + expireDate); } @@ -868,18 +877,18 @@ private StreamingOutput lookupStreamedContent(final String queryName, final Abst queryParameters.putSingle(QueryParameters.QUERY_NAME, queryName); queryParameters.putSingle(QueryParameters.QUERY_STRING, contentQuery.toString()); try { - queryParameters.putSingle(QueryParameters.QUERY_BEGIN, QueryParametersImpl.formatDate(this.beginAsDate)); + queryParameters.putSingle(QueryParameters.QUERY_BEGIN, DefaultQueryParameters.formatDate(this.beginAsDate)); } catch (ParseException e1) { throw new RuntimeException("Error formatting begin date: " + this.beginAsDate); } try { - queryParameters.putSingle(QueryParameters.QUERY_END, QueryParametersImpl.formatDate(endDate)); + queryParameters.putSingle(QueryParameters.QUERY_END, DefaultQueryParameters.formatDate(endDate)); } catch (ParseException e1) { throw new RuntimeException("Error formatting end date: " + endDate); } queryParameters.putSingle(QueryParameters.QUERY_AUTHORIZATIONS, userAuths); try { - queryParameters.putSingle(QueryParameters.QUERY_EXPIRATION, QueryParametersImpl.formatDate(expireDate)); + queryParameters.putSingle(QueryParameters.QUERY_EXPIRATION, DefaultQueryParameters.formatDate(expireDate)); } catch (ParseException e1) { throw new RuntimeException("Error formatting expr date: " + expireDate); } @@ -987,7 +996,7 @@ private AbstractUUIDLookupCriteria validateLookupCriteria(final AbstractUUIDLook if (null != uuidType) { // Assign the query logic name if undefined if (null == logicName) { - logicName = uuidType.getDefinedView(criteria.getUUIDTypeContext()); + logicName = uuidType.getQueryLogic(criteria.getUUIDTypeContext()); } // Increment the UUID type/value count @@ -1047,7 +1056,7 @@ private AbstractUUIDLookupCriteria validateLookupCriteria(final AbstractUUIDLook params = params + ';' + PARAM_HIT_LIST + ':' + true; } - criteria.getQueryParameters().putSingle(QueryParameters.QUERY_PARAMS, params); + criteria.getQueryParameters().set(QueryParameters.QUERY_PARAMS, params); // All is well, so return the validated criteria return criteria; @@ -1112,8 +1121,8 @@ else if ((null == uuid) || uuid.isEmpty()) { throw new DatawaveWebApplicationException(new IllegalArgumentException(message), errorReponse); } // Reject conflicting logic name - else if ((null != logicName) && !logicName.equals(matchingUuidType.getDefinedView(lookupContext))) { - final String message = "Multiple UUID types '" + logicName + "' and '" + matchingUuidType.getDefinedView(lookupContext) + "' not " + else if ((null != logicName) && !logicName.equals(matchingUuidType.getQueryLogic(lookupContext))) { + final String message = "Multiple UUID types '" + logicName + "' and '" + matchingUuidType.getQueryLogic(lookupContext) + "' not " + " supported within the same lookup request"; final GenericResponse errorReponse = new GenericResponse<>(); errorReponse.addMessage(message); diff --git a/web-services/query/src/main/java/datawave/webservice/query/util/MapUtils.java b/web-services/query/src/main/java/datawave/webservice/query/util/MapUtils.java new file mode 100644 index 00000000000..da2da2bc963 --- /dev/null +++ b/web-services/query/src/main/java/datawave/webservice/query/util/MapUtils.java @@ -0,0 +1,31 @@ +package datawave.webservice.query.util; + +import java.util.List; +import java.util.Map; + +import javax.ws.rs.core.MultivaluedMap; + +import org.jboss.resteasy.specimpl.MultivaluedMapImpl; +import org.springframework.util.LinkedMultiValueMap; +import org.springframework.util.MultiValueMap; + +public class MapUtils { + + public static MultiValueMap toMultiValueMap(MultivaluedMap multivaluedMap) { + MultiValueMap multiValueMap = null; + if (multivaluedMap != null) { + multiValueMap = new LinkedMultiValueMap<>(); + multivaluedMap.forEach(multiValueMap::put); + } + return multiValueMap; + } + + public static MultivaluedMap toMultivaluedMap(Map> multiValueMap) { + MultivaluedMap multivaluedMap = null; + if (multiValueMap != null) { + multivaluedMap = new MultivaluedMapImpl<>(); + multiValueMap.forEach(multivaluedMap::put); + } + return multivaluedMap; + } +} diff --git a/web-services/query/src/main/java/datawave/webservice/query/util/NextContentCriteria.java b/web-services/query/src/main/java/datawave/webservice/query/util/NextContentCriteria.java index 1166bf62e2c..086aca6022a 100644 --- a/web-services/query/src/main/java/datawave/webservice/query/util/NextContentCriteria.java +++ b/web-services/query/src/main/java/datawave/webservice/query/util/NextContentCriteria.java @@ -1,8 +1,8 @@ package datawave.webservice.query.util; -import javax.ws.rs.core.MultivaluedMap; +import org.springframework.util.MultiValueMap; -import datawave.webservice.query.Query; +import datawave.microservice.query.Query; /** * Lookup criteria for paging through content results @@ -10,7 +10,7 @@ public class NextContentCriteria extends AbstractUUIDLookupCriteria { private final String queryId; - public NextContentCriteria(final String queryId, MultivaluedMap queryParameters) { + public NextContentCriteria(final String queryId, MultiValueMap queryParameters) { super(queryParameters); this.queryId = queryId; } diff --git a/web-services/query/src/main/java/datawave/webservice/query/util/PostUUIDCriteria.java b/web-services/query/src/main/java/datawave/webservice/query/util/PostUUIDCriteria.java index 5e369481a7f..32e3e1ea0d2 100644 --- a/web-services/query/src/main/java/datawave/webservice/query/util/PostUUIDCriteria.java +++ b/web-services/query/src/main/java/datawave/webservice/query/util/PostUUIDCriteria.java @@ -1,6 +1,6 @@ package datawave.webservice.query.util; -import javax.ws.rs.core.MultivaluedMap; +import org.springframework.util.MultiValueMap; /** * Lookup criteria for one or more UUIDs @@ -8,7 +8,7 @@ public class PostUUIDCriteria extends AbstractUUIDLookupCriteria { private final String uuidPairs; - public PostUUIDCriteria(final String uuidPairs, MultivaluedMap queryParameters) { + public PostUUIDCriteria(final String uuidPairs, MultiValueMap queryParameters) { super(queryParameters); this.uuidPairs = uuidPairs; } diff --git a/web-services/query/src/main/java/datawave/webservice/query/util/UIDQueryCriteria.java b/web-services/query/src/main/java/datawave/webservice/query/util/UIDQueryCriteria.java index e5109f734db..d2ee65cc689 100644 --- a/web-services/query/src/main/java/datawave/webservice/query/util/UIDQueryCriteria.java +++ b/web-services/query/src/main/java/datawave/webservice/query/util/UIDQueryCriteria.java @@ -1,12 +1,12 @@ package datawave.webservice.query.util; -import javax.ws.rs.core.MultivaluedMap; +import org.springframework.util.MultiValueMap; /** * Criteria for one and only one UIDQuery-based lookup */ public class UIDQueryCriteria extends GetUUIDCriteria { - public UIDQueryCriteria(final String uuid, final String uuidType, MultivaluedMap queryParameters) { + public UIDQueryCriteria(final String uuid, final String uuidType, MultiValueMap queryParameters) { super(uuid, uuidType, queryParameters); } diff --git a/web-services/query/src/test/java/datawave/webservice/query/cache/CreatedQueryLogicCacheBeanTest.java b/web-services/query/src/test/java/datawave/webservice/query/cache/CreatedQueryLogicCacheBeanTest.java index 0de451eb3ec..cd1963d5655 100644 --- a/web-services/query/src/test/java/datawave/webservice/query/cache/CreatedQueryLogicCacheBeanTest.java +++ b/web-services/query/src/test/java/datawave/webservice/query/cache/CreatedQueryLogicCacheBeanTest.java @@ -17,8 +17,8 @@ import com.google.common.collect.Sets; +import datawave.core.query.logic.QueryLogic; import datawave.webservice.query.cache.CreatedQueryLogicCacheBean.Triple; -import datawave.webservice.query.logic.QueryLogic; /** * diff --git a/web-services/query/src/test/java/datawave/webservice/query/cache/QueryCacheBeanTest.java b/web-services/query/src/test/java/datawave/webservice/query/cache/QueryCacheBeanTest.java index 6d7b4565a7d..bf35cd3316f 100644 --- a/web-services/query/src/test/java/datawave/webservice/query/cache/QueryCacheBeanTest.java +++ b/web-services/query/src/test/java/datawave/webservice/query/cache/QueryCacheBeanTest.java @@ -20,11 +20,11 @@ import org.powermock.api.easymock.annotation.Mock; import org.powermock.modules.junit4.PowerMockRunner; +import datawave.core.common.connection.AccumuloConnectionFactory; +import datawave.core.query.logic.QueryLogic; +import datawave.microservice.authorization.util.AuthorizationsUtil; +import datawave.microservice.query.QueryImpl; import datawave.microservice.querymetric.QueryMetricFactoryImpl; -import datawave.security.util.WSAuthorizationsUtil; -import datawave.webservice.common.connection.AccumuloConnectionFactory; -import datawave.webservice.query.QueryImpl; -import datawave.webservice.query.logic.QueryLogic; import datawave.webservice.query.runner.RunningQuery; @RunWith(PowerMockRunner.class) @@ -126,7 +126,7 @@ public void testGetRunningQueries() throws Exception { expect(logic.isLongRunningQuery()).andReturn(false); expect(logic.getResultLimit(q)).andReturn(-1L); expect(logic.getMaxResults()).andReturn(-1L); - logic.preInitialize(q, WSAuthorizationsUtil.buildAuthorizations(null)); + logic.preInitialize(q, AuthorizationsUtil.buildAuthorizations(null)); expect(logic.getUserOperations()).andReturn(null); PowerMock.replayAll(); diff --git a/web-services/query/src/test/java/datawave/webservice/query/cache/QueryExpirationBeanTest.java b/web-services/query/src/test/java/datawave/webservice/query/cache/QueryExpirationBeanTest.java index 0b47891b7fe..31856b666bd 100644 --- a/web-services/query/src/test/java/datawave/webservice/query/cache/QueryExpirationBeanTest.java +++ b/web-services/query/src/test/java/datawave/webservice/query/cache/QueryExpirationBeanTest.java @@ -15,9 +15,10 @@ import com.google.common.cache.Cache; +import datawave.core.common.connection.AccumuloConnectionFactory; +import datawave.microservice.query.QueryImpl; +import datawave.microservice.query.config.QueryExpirationProperties; import datawave.microservice.querymetric.QueryMetricFactoryImpl; -import datawave.webservice.common.connection.AccumuloConnectionFactory; -import datawave.webservice.query.QueryImpl; import datawave.webservice.query.runner.RunningQuery; @RunWith(PowerMockRunner.class) @@ -70,11 +71,11 @@ public void testRemoveIdleOrExpired() throws Exception { private QueryExpirationBean createBean(int expireTime) throws IllegalArgumentException, IllegalAccessException { QueryExpirationBean bean = new QueryExpirationBean(); - QueryExpirationConfiguration expirationConfiguration = new QueryExpirationConfiguration(); - setInternalState(expirationConfiguration, "idleTimeMinutes", expireTime); - setInternalState(expirationConfiguration, "callTimeMinutes", expireTime); + QueryExpirationProperties expirationConfiguration = new QueryExpirationProperties(); + setInternalState(expirationConfiguration, "idleTimeout", expireTime); + setInternalState(expirationConfiguration, "callTimeout", expireTime); - setInternalState(bean, QueryExpirationConfiguration.class, expirationConfiguration); + setInternalState(bean, QueryExpirationProperties.class, expirationConfiguration); setInternalState(bean, QueryCache.class, queryCache); setInternalState(bean, CreatedQueryLogicCacheBean.class, qlCache); setInternalState(bean, AccumuloConnectionFactory.class, connFactory); diff --git a/web-services/query/src/test/java/datawave/webservice/query/cache/RunningQueryTimingImplTest.java b/web-services/query/src/test/java/datawave/webservice/query/cache/RunningQueryTimingImplTest.java index 6de7b91f018..fc42b757d02 100644 --- a/web-services/query/src/test/java/datawave/webservice/query/cache/RunningQueryTimingImplTest.java +++ b/web-services/query/src/test/java/datawave/webservice/query/cache/RunningQueryTimingImplTest.java @@ -8,6 +8,8 @@ import org.junit.Before; import org.junit.Test; +import datawave.microservice.query.config.QueryExpirationProperties; + /** * */ @@ -26,8 +28,8 @@ public void setUp() throws Exception {} public void tearDown() throws Exception {} @Test - public void testQueryExpirationConfigurationDefaults() { - QueryExpirationConfiguration conf = new QueryExpirationConfiguration(); + public void testQueryExpirationPropertiesDefaults() { + QueryExpirationProperties conf = new QueryExpirationProperties(); RunningQueryTimingImpl timing = new RunningQueryTimingImpl(conf, -1); assertEquals(60 * 60 * 1000, timing.getMaxCallMs()); @@ -36,11 +38,11 @@ public void testQueryExpirationConfigurationDefaults() { } @Test - public void testQueryExpirationConfiguration() { - QueryExpirationConfiguration conf = new QueryExpirationConfiguration(); - conf.setCallTime(10); - conf.setPageShortCircuitTimeout(9); - conf.setPageSizeShortCircuitCheckTime(5); + public void testQueryExpirationProperties() { + QueryExpirationProperties conf = new QueryExpirationProperties(); + conf.setCallTimeout(10); + conf.setShortCircuitTimeout(9); + conf.setShortCircuitCheckTime(5); RunningQueryTimingImpl timing = new RunningQueryTimingImpl(conf, -1); assertEquals(10 * 60 * 1000, timing.getMaxCallMs()); @@ -49,8 +51,8 @@ public void testQueryExpirationConfiguration() { } @Test - public void testQueryExpirationConfigurationWithTimeout() { - QueryExpirationConfiguration conf = new QueryExpirationConfiguration(); + public void testQueryExpirationPropertiesWithTimeout() { + QueryExpirationProperties conf = new QueryExpirationProperties(); RunningQueryTimingImpl timing = new RunningQueryTimingImpl(conf, 20); assertEquals(20 * 60 * 1000, timing.getMaxCallMs()); diff --git a/web-services/query/src/test/java/datawave/webservice/query/cache/TestQueryLogic.java b/web-services/query/src/test/java/datawave/webservice/query/cache/TestQueryLogic.java index 2accc32ff35..15c72cbac69 100644 --- a/web-services/query/src/test/java/datawave/webservice/query/cache/TestQueryLogic.java +++ b/web-services/query/src/test/java/datawave/webservice/query/cache/TestQueryLogic.java @@ -5,11 +5,11 @@ import org.apache.accumulo.core.client.AccumuloClient; import org.apache.accumulo.core.security.Authorizations; -import datawave.webservice.common.connection.AccumuloConnectionFactory.Priority; -import datawave.webservice.query.Query; -import datawave.webservice.query.configuration.GenericQueryConfiguration; -import datawave.webservice.query.logic.BaseQueryLogic; -import datawave.webservice.query.logic.QueryLogicTransformer; +import datawave.core.common.connection.AccumuloConnectionFactory; +import datawave.core.query.configuration.GenericQueryConfiguration; +import datawave.core.query.logic.BaseQueryLogic; +import datawave.core.query.logic.QueryLogicTransformer; +import datawave.microservice.query.Query; public class TestQueryLogic extends BaseQueryLogic { @@ -28,8 +28,8 @@ public String getPlan(AccumuloClient client, Query settings, Set } @Override - public Priority getConnectionPriority() { - return Priority.NORMAL; + public AccumuloConnectionFactory.Priority getConnectionPriority() { + return AccumuloConnectionFactory.Priority.NORMAL; } @Override diff --git a/web-services/query/src/test/java/datawave/webservice/query/configuration/GenericQueryConfigurationMockTest.java b/web-services/query/src/test/java/datawave/webservice/query/configuration/GenericQueryConfigurationMockTest.java index 528e6c38a60..e6e46656383 100644 --- a/web-services/query/src/test/java/datawave/webservice/query/configuration/GenericQueryConfigurationMockTest.java +++ b/web-services/query/src/test/java/datawave/webservice/query/configuration/GenericQueryConfigurationMockTest.java @@ -19,7 +19,9 @@ import org.powermock.api.easymock.annotation.Mock; import org.powermock.modules.junit4.PowerMockRunner; -import datawave.webservice.query.logic.BaseQueryLogic; +import datawave.core.query.configuration.GenericQueryConfiguration; +import datawave.core.query.configuration.QueryData; +import datawave.core.query.logic.BaseQueryLogic; @RunWith(PowerMockRunner.class) public class GenericQueryConfigurationMockTest { @@ -40,8 +42,8 @@ public class GenericQueryConfigurationMockTest { public void setup() { this.config = new GenericQueryConfiguration() { @Override - public Iterator getQueries() { - return super.getQueries(); + public Iterator getQueriesIter() { + return super.getQueriesIter(); } }; } @@ -68,6 +70,8 @@ public void testConstructor_WithConfiguredLogic() { @Test public void testCanRunQuery_HappyPath() { + expect(this.authorizations.getAuthorizations()).andReturn(Collections.emptyList()); + // Run the test PowerMock.replayAll(); GenericQueryConfiguration subject = new GenericQueryConfiguration() {}; diff --git a/web-services/query/src/test/java/datawave/webservice/query/configuration/GenericQueryConfigurationTest.java b/web-services/query/src/test/java/datawave/webservice/query/configuration/GenericQueryConfigurationTest.java index 93e2f403c44..1e9afaaa5e3 100644 --- a/web-services/query/src/test/java/datawave/webservice/query/configuration/GenericQueryConfigurationTest.java +++ b/web-services/query/src/test/java/datawave/webservice/query/configuration/GenericQueryConfigurationTest.java @@ -8,6 +8,7 @@ import org.apache.accumulo.core.security.Authorizations; import org.junit.Test; +import datawave.core.query.configuration.GenericQueryConfiguration; import datawave.util.TableName; public class GenericQueryConfigurationTest { diff --git a/web-services/query/src/test/java/datawave/webservice/query/configuration/QueryDataTest.java b/web-services/query/src/test/java/datawave/webservice/query/configuration/QueryDataTest.java index 4445284b079..8e42387ebf2 100644 --- a/web-services/query/src/test/java/datawave/webservice/query/configuration/QueryDataTest.java +++ b/web-services/query/src/test/java/datawave/webservice/query/configuration/QueryDataTest.java @@ -12,19 +12,23 @@ import org.apache.accumulo.core.data.Range; import org.junit.Test; +import datawave.core.query.configuration.QueryData; + public class QueryDataTest { @Test public void testCopyConstructor() { + String tableName = "SHARD"; String query = "FOO == 'bar'"; Collection ranges = Collections.singleton(new Range(new Key("row"), true, new Key("row\0"), false)); Collection columnFamilies = Collections.singleton("FOO"); List settings = new ArrayList<>(); settings.add(new IteratorSetting(20, "iterator", "QueryIterator.class")); - QueryData original = new QueryData(query, ranges, columnFamilies, settings); + QueryData original = new QueryData(tableName, query, ranges, columnFamilies, settings); QueryData copy = new QueryData(original); + assertEquals(original.getTableName(), copy.getTableName()); assertEquals(original.getQuery(), copy.getQuery()); assertEquals(original.getRanges(), copy.getRanges()); assertEquals(original.getColumnFamilies(), copy.getColumnFamilies()); diff --git a/web-services/query/src/test/java/datawave/webservice/query/configuration/TestBaseQueryLogic.java b/web-services/query/src/test/java/datawave/webservice/query/configuration/TestBaseQueryLogic.java index c0b6228eddf..e903fbdd148 100644 --- a/web-services/query/src/test/java/datawave/webservice/query/configuration/TestBaseQueryLogic.java +++ b/web-services/query/src/test/java/datawave/webservice/query/configuration/TestBaseQueryLogic.java @@ -24,14 +24,15 @@ import com.google.common.collect.Sets; +import datawave.core.common.connection.AccumuloConnectionFactory.Priority; +import datawave.core.query.configuration.GenericQueryConfiguration; +import datawave.core.query.logic.BaseQueryLogic; +import datawave.core.query.logic.QueryLogicTransformer; +import datawave.microservice.query.Query; +import datawave.microservice.query.QueryImpl; import datawave.security.authorization.DatawavePrincipal; +import datawave.security.authorization.ProxiedUserDetails; import datawave.webservice.common.audit.Auditor; -import datawave.webservice.common.connection.AccumuloConnectionFactory.Priority; -import datawave.webservice.query.Query; -import datawave.webservice.query.logic.BaseQueryLogic; -import datawave.webservice.query.logic.EasyRoleManager; -import datawave.webservice.query.logic.QueryLogicTransformer; -import datawave.webservice.query.logic.RoleManager; @RunWith(PowerMockRunner.class) public class TestBaseQueryLogic { @@ -57,15 +58,17 @@ public void testConstructor_Copy() throws Exception { expect(this.copy.getPageByteTrigger()).andReturn(1024L); expect(this.copy.getCollectQueryMetrics()).andReturn(false); expect(this.copy.getConnPoolName()).andReturn("connPool1"); - expect(this.copy.getPrincipal()).andReturn(null); - RoleManager roleManager = new EasyRoleManager(); - expect(this.copy.getRoleManager()).andReturn(roleManager); + expect(this.copy.getRequiredRoles()).andReturn(null); expect(this.copy.getSelectorExtractor()).andReturn(null); + expect(this.copy.getCurrentUser()).andReturn(null); + expect(this.copy.getServerUser()).andReturn(null); expect(this.copy.getResponseEnricherBuilder()).andReturn(null); - DatawavePrincipal principal = new DatawavePrincipal(); - expect(this.copy.getPrincipal()).andReturn(principal).anyTimes(); + ProxiedUserDetails principal = new DatawavePrincipal(); + expect(this.copy.getCurrentUser()).andReturn(principal).anyTimes(); // setup expectations for GenericQueryConfig + expect(config.getQuery()).andReturn(new QueryImpl()); + expect(config.isCheckpointable()).andReturn(false); expect(config.getAuthorizations()).andReturn(null).anyTimes(); expect(config.getQueryString()).andReturn("FOO == 'bar'").anyTimes(); expect(config.getBeginDate()).andReturn(null).anyTimes(); @@ -75,8 +78,10 @@ public void testConstructor_Copy() throws Exception { expect(config.getTableName()).andReturn("tableName").anyTimes(); expect(config.getBypassAccumulo()).andReturn(false).anyTimes(); expect(config.getAccumuloPassword()).andReturn("env:PASS").anyTimes(); + expect(config.isReduceResults()).andReturn(false).anyTimes(); expect(config.getClient()).andReturn(null).anyTimes(); - expect(config.getQueries()).andReturn(Collections.emptyIterator()).anyTimes(); + expect(config.getQueries()).andReturn(Collections.emptyList()).anyTimes(); + expect(config.getQueriesIter()).andReturn(Collections.emptyIterator()).anyTimes(); expect(this.copy.getConfig()).andReturn(config).anyTimes(); // Run the test diff --git a/web-services/query/src/test/java/datawave/webservice/query/interceptor/QueryMetricsEnrichmentInterceptorTest.java b/web-services/query/src/test/java/datawave/webservice/query/interceptor/QueryMetricsEnrichmentInterceptorTest.java index dcde8af0c33..1f8646b757c 100644 --- a/web-services/query/src/test/java/datawave/webservice/query/interceptor/QueryMetricsEnrichmentInterceptorTest.java +++ b/web-services/query/src/test/java/datawave/webservice/query/interceptor/QueryMetricsEnrichmentInterceptorTest.java @@ -45,13 +45,13 @@ import com.google.common.io.CountingOutputStream; +import datawave.core.query.logic.BaseQueryLogic; import datawave.microservice.querymetric.BaseQueryMetric.PageMetric; import datawave.microservice.querymetric.QueryMetric; import datawave.security.util.DnUtils; import datawave.webservice.query.annotation.EnrichQueryMetrics; import datawave.webservice.query.cache.QueryCache; import datawave.webservice.query.interceptor.QueryMetricsEnrichmentInterceptor.QueryCall; -import datawave.webservice.query.logic.BaseQueryLogic; import datawave.webservice.query.metric.QueryMetricsBean; import datawave.webservice.query.runner.RunningQuery; import datawave.webservice.result.BaseQueryResponse; diff --git a/web-services/query/src/test/java/datawave/webservice/query/logic/BaseQueryLogicTest.java b/web-services/query/src/test/java/datawave/webservice/query/logic/BaseQueryLogicTest.java index a70bcbb9306..27b0a83d7ef 100644 --- a/web-services/query/src/test/java/datawave/webservice/query/logic/BaseQueryLogicTest.java +++ b/web-services/query/src/test/java/datawave/webservice/query/logic/BaseQueryLogicTest.java @@ -9,11 +9,13 @@ import org.apache.accumulo.core.security.Authorizations; import org.junit.Test; +import datawave.core.common.connection.AccumuloConnectionFactory; +import datawave.core.query.configuration.GenericQueryConfiguration; +import datawave.core.query.logic.BaseQueryLogic; +import datawave.core.query.logic.QueryLogicTransformer; +import datawave.microservice.query.Query; import datawave.security.authorization.DatawavePrincipal; import datawave.webservice.common.audit.Auditor; -import datawave.webservice.common.connection.AccumuloConnectionFactory; -import datawave.webservice.query.Query; -import datawave.webservice.query.configuration.GenericQueryConfiguration; public class BaseQueryLogicTest { @@ -30,7 +32,7 @@ public void testCopyConstructor() { original.setPageByteTrigger(123456L); original.setCollectQueryMetrics(false); original.setAuthorizedDNs(Collections.singleton("dn=authorized1")); - original.setPrincipal(new DatawavePrincipal("user")); + original.setCurrentUser(new DatawavePrincipal("user")); BaseQueryLogicImpl copy = new BaseQueryLogicImpl(original); assertEquals(original.getLogicName(), copy.getLogicName()); @@ -43,7 +45,7 @@ public void testCopyConstructor() { assertEquals(original.getPageByteTrigger(), copy.getPageByteTrigger()); assertEquals(original.getCollectQueryMetrics(), copy.getCollectQueryMetrics()); assertEquals(original.getAuthorizedDNs(), copy.getAuthorizedDNs()); - assertEquals(original.getPrincipal(), copy.getPrincipal()); + assertEquals(original.getCurrentUser(), copy.getCurrentUser()); } class BaseQueryLogicImpl extends BaseQueryLogic { diff --git a/web-services/query/src/test/java/datawave/webservice/query/logic/ConfiguredQueryLogicFactoryBeanTest.java b/web-services/query/src/test/java/datawave/webservice/query/logic/ConfiguredQueryLogicFactoryBeanTest.java index 1215653bfeb..f50d1276a7a 100644 --- a/web-services/query/src/test/java/datawave/webservice/query/logic/ConfiguredQueryLogicFactoryBeanTest.java +++ b/web-services/query/src/test/java/datawave/webservice/query/logic/ConfiguredQueryLogicFactoryBeanTest.java @@ -11,6 +11,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.TreeMap; import javax.ejb.EJBContext; @@ -28,6 +29,8 @@ import org.springframework.beans.factory.xml.XmlBeanDefinitionReader; import org.springframework.context.support.ClassPathXmlApplicationContext; +import datawave.core.query.logic.BaseQueryLogic; +import datawave.core.query.logic.QueryLogic; import datawave.security.authorization.DatawavePrincipal; import datawave.security.authorization.DatawaveUser; import datawave.security.authorization.DatawaveUser.UserType; @@ -102,8 +105,10 @@ public void testGetQueryLogic_HasRequiredRoles() throws Exception { QueryLogicFactoryConfiguration qlfc = new QueryLogicFactoryConfiguration(); qlfc.setMaxPageSize(25); qlfc.setPageByteTrigger(1024L); - this.logic.setPrincipal(altPrincipal); + this.logic.setServerUser(altPrincipal); this.logic.setLogicName(queryName); + expect(altPrincipal.getPrimaryUser()).andReturn( + new DatawaveUser(SubjectIssuerDNPair.of("CN=Poe Edgar Allan eapoe, OU=acme", ""), UserType.USER, null, null, null, 0L)); expect(this.logic.getMaxPageSize()).andReturn(25); expect(this.logic.getPageByteTrigger()).andReturn(1024L); expect(this.applicationContext.getBean(mappedQueryName)).andReturn(this.logic); @@ -134,8 +139,10 @@ public void testGetQueryLogic_propertyOverride() throws Exception { Map> rolesMap = new HashMap<>(); rolesMap.put(queryName, roles); - this.logic.setPrincipal(altPrincipal); + this.logic.setServerUser(altPrincipal); this.logic.setLogicName(queryName); + expect(altPrincipal.getPrimaryUser()).andReturn( + new DatawaveUser(SubjectIssuerDNPair.of("CN=Poe Edgar Allan eapoe, OU=acme", ""), UserType.USER, null, null, null, 0L)); expect(this.logic.getMaxPageSize()).andReturn(0); expect(this.logic.getPageByteTrigger()).andReturn(0L); this.logic.setMaxPageSize(25); diff --git a/web-services/query/src/test/java/datawave/webservice/query/logic/DatawaveRoleManagerTest.java b/web-services/query/src/test/java/datawave/webservice/query/logic/DatawaveRoleManagerTest.java deleted file mode 100644 index 4f35d637577..00000000000 --- a/web-services/query/src/test/java/datawave/webservice/query/logic/DatawaveRoleManagerTest.java +++ /dev/null @@ -1,139 +0,0 @@ -package datawave.webservice.query.logic; - -import java.security.Principal; -import java.util.Collection; -import java.util.HashSet; -import java.util.Set; - -import org.junit.Assert; -import org.junit.Before; -import org.junit.Test; - -import com.google.common.collect.Lists; - -import datawave.security.authorization.DatawavePrincipal; -import datawave.security.authorization.DatawaveUser; -import datawave.security.authorization.DatawaveUser.UserType; -import datawave.security.authorization.SubjectIssuerDNPair; -import datawave.security.util.DnUtils; - -public class DatawaveRoleManagerTest { - - private DatawaveRoleManager drm; - private DatawavePrincipal datawavePrincipal; - private Principal p; - - @Before - public void beforeEachTest() { - System.setProperty(DnUtils.NPE_OU_PROPERTY, "iamnotaperson"); - System.setProperty("dw.metadatahelper.all.auths", "A,B,C,D"); - createAndSetWithSingleRole(); - } - - private void createAndSetWithSingleRole() { - - String dn = "dn1"; - String issuerDN = "idn"; - SubjectIssuerDNPair combinedDN = SubjectIssuerDNPair.of(dn, issuerDN); - Collection roles = Lists.newArrayList("REQ_ROLE_1"); - - DatawaveUser user = new DatawaveUser(combinedDN, UserType.USER, null, roles, null, System.currentTimeMillis()); - datawavePrincipal = new DatawavePrincipal(Lists.newArrayList(user)); - } - - private void createAndSetWithTwoRoles() { - - String dn = "dn1"; - String issuerDN = "idn"; - SubjectIssuerDNPair combinedDn1 = SubjectIssuerDNPair.of(dn, issuerDN); - String combinedDN = dn + "<" + issuerDN + ">"; - String dn2 = "dn2"; - String combinedDN2 = dn2 + "<" + issuerDN + ">"; - SubjectIssuerDNPair combinedDn2 = SubjectIssuerDNPair.of(dn2, issuerDN); - - DatawaveUser u1 = new DatawaveUser(combinedDn1, UserType.USER, null, getFirstRole(), null, System.currentTimeMillis()); - DatawaveUser u2 = new DatawaveUser(combinedDn2, UserType.SERVER, null, getSecondRole(), null, System.currentTimeMillis()); - - datawavePrincipal = new DatawavePrincipal(Lists.newArrayList(u1, u2)); - } - - public Set getFirstRole() { - Set datawaveRoles = new HashSet<>(); - datawaveRoles.add("REQ_ROLE_1"); - return datawaveRoles; - } - - public Set getSecondRole() { - Set datawaveRoles = new HashSet<>(); - datawaveRoles.add("REQ_ROLE_2"); - return datawaveRoles; - } - - public Set getAllRoles() { - Set datawaveRoles = new HashSet<>(); - datawaveRoles.add("REQ_ROLE_1"); - datawaveRoles.add("REQ_ROLE_2"); - return datawaveRoles; - } - - @Test - public void testEmptyConstructor() { - - drm = new DatawaveRoleManager(); - - Set gottenRoles = drm.getRequiredRoles(); - Assert.assertNull(gottenRoles); - - drm.setRequiredRoles(getFirstRole()); - gottenRoles = drm.getRequiredRoles(); - - Assert.assertTrue(gottenRoles.contains("REQ_ROLE_1")); - Assert.assertFalse(gottenRoles.contains("REQ_ROLE_2")); - } - - @Test - public void testBasicsLoadedConstructor() { - - drm = new DatawaveRoleManager(getFirstRole()); - - Set gottenRoles = drm.getRequiredRoles(); - Assert.assertTrue(gottenRoles.contains("REQ_ROLE_1")); - Assert.assertFalse(gottenRoles.contains("REQ_ROLE_2")); - } - - @Test - public void testCanRunQuery() { - - drm = new DatawaveRoleManager(getFirstRole()); - - // Expect false when passing in a null Principal object - boolean canRun = drm.canRunQuery(null, null); - Assert.assertFalse(canRun); - - // Modify the principal and set the required roles to null - p = datawavePrincipal; - Assert.assertNotEquals(null, p); - drm.setRequiredRoles(null); - - // This test should pass when setting requiredRoles to null - canRun = drm.canRunQuery(null, p); - Assert.assertTrue(canRun); - - // Now set up a test that requires roles to run - drm.setRequiredRoles(getFirstRole()); - canRun = drm.canRunQuery(null, p); - Assert.assertTrue(canRun); - - // Now add a second required role check - drm.setRequiredRoles(getAllRoles()); - canRun = drm.canRunQuery(null, p); - Assert.assertFalse(canRun); - - // Recreate the principal with two roles and check - createAndSetWithTwoRoles(); - p = datawavePrincipal; - drm.setRequiredRoles(getFirstRole()); - canRun = drm.canRunQuery(null, p); - Assert.assertTrue(canRun); - } -} diff --git a/web-services/query/src/test/java/datawave/webservice/query/logic/QueryLogicFactoryBeanTest.java b/web-services/query/src/test/java/datawave/webservice/query/logic/QueryLogicFactoryBeanTest.java index 06867401594..757523b6ab3 100644 --- a/web-services/query/src/test/java/datawave/webservice/query/logic/QueryLogicFactoryBeanTest.java +++ b/web-services/query/src/test/java/datawave/webservice/query/logic/QueryLogicFactoryBeanTest.java @@ -30,6 +30,8 @@ import org.springframework.beans.factory.xml.XmlBeanDefinitionReader; import org.springframework.context.support.ClassPathXmlApplicationContext; +import datawave.core.query.logic.BaseQueryLogic; +import datawave.core.query.logic.QueryLogic; import datawave.security.authorization.DatawavePrincipal; import datawave.security.authorization.DatawaveUser; import datawave.security.authorization.DatawaveUser.UserType; @@ -103,8 +105,10 @@ public void testGetQueryLogic_HasRequiredRoles() throws Exception { QueryLogicFactoryConfiguration qlfc = new QueryLogicFactoryConfiguration(); qlfc.setMaxPageSize(25); qlfc.setPageByteTrigger(1024L); - this.logic.setPrincipal(altPrincipal); + this.logic.setCurrentUser(altPrincipal); this.logic.setLogicName(queryName); + expect(altPrincipal.getPrimaryUser()).andReturn( + new DatawaveUser(SubjectIssuerDNPair.of("CN=Poe Edgar Allan eapoe, OU=acme", ""), UserType.USER, null, null, null, 0L)); expect(this.logic.getMaxPageSize()).andReturn(25); expect(this.logic.getPageByteTrigger()).andReturn(1024L); expect(this.applicationContext.getBean(queryName)).andReturn(this.logic); @@ -135,8 +139,10 @@ public void testGetQueryLogic_propertyOverride() throws Exception { Map> rolesMap = new HashMap<>(); rolesMap.put(queryName, roles); - this.logic.setPrincipal(altPrincipal); + this.logic.setServerUser(altPrincipal); this.logic.setLogicName(queryName); + expect(altPrincipal.getPrimaryUser()).andReturn( + new DatawaveUser(SubjectIssuerDNPair.of("CN=Poe Edgar Allan eapoe, OU=acme", ""), UserType.USER, null, null, null, 0L)); expect(this.logic.getMaxPageSize()).andReturn(0); expect(this.logic.getPageByteTrigger()).andReturn(0L); this.logic.setMaxPageSize(25); diff --git a/web-services/query/src/test/java/datawave/webservice/query/logic/TestLegacyBaseQueryLogicTransformer.java b/web-services/query/src/test/java/datawave/webservice/query/logic/TestLegacyBaseQueryLogicTransformer.java index 97a648c225a..0a8a5353a79 100644 --- a/web-services/query/src/test/java/datawave/webservice/query/logic/TestLegacyBaseQueryLogicTransformer.java +++ b/web-services/query/src/test/java/datawave/webservice/query/logic/TestLegacyBaseQueryLogicTransformer.java @@ -15,8 +15,9 @@ import org.powermock.api.easymock.annotation.Mock; import org.powermock.modules.junit4.PowerMockRunner; +import datawave.core.query.cache.ResultsPage; +import datawave.core.query.logic.BaseQueryLogicTransformer; import datawave.marking.MarkingFunctions; -import datawave.webservice.query.cache.ResultsPage; import datawave.webservice.query.result.event.EventBase; import datawave.webservice.result.BaseQueryResponse; diff --git a/web-services/query/src/test/java/datawave/webservice/query/logic/TestQueryLogic.java b/web-services/query/src/test/java/datawave/webservice/query/logic/TestQueryLogic.java index 9255f0c30bf..0869d891fa9 100644 --- a/web-services/query/src/test/java/datawave/webservice/query/logic/TestQueryLogic.java +++ b/web-services/query/src/test/java/datawave/webservice/query/logic/TestQueryLogic.java @@ -6,9 +6,11 @@ import org.apache.accumulo.core.security.Authorizations; import org.junit.Ignore; -import datawave.webservice.common.connection.AccumuloConnectionFactory; -import datawave.webservice.query.Query; -import datawave.webservice.query.configuration.GenericQueryConfiguration; +import datawave.core.common.connection.AccumuloConnectionFactory; +import datawave.core.query.configuration.GenericQueryConfiguration; +import datawave.core.query.logic.BaseQueryLogic; +import datawave.core.query.logic.QueryLogicTransformer; +import datawave.microservice.query.Query; @Ignore public class TestQueryLogic extends BaseQueryLogic { diff --git a/web-services/query/src/test/java/datawave/webservice/query/logic/composite/CompositeQueryLogicTest.java b/web-services/query/src/test/java/datawave/webservice/query/logic/composite/CompositeQueryLogicTest.java index 8901b62a511..985609f1c48 100644 --- a/web-services/query/src/test/java/datawave/webservice/query/logic/composite/CompositeQueryLogicTest.java +++ b/web-services/query/src/test/java/datawave/webservice/query/logic/composite/CompositeQueryLogicTest.java @@ -27,31 +27,32 @@ import com.google.common.collect.HashMultimap; +import datawave.core.common.connection.AccumuloConnectionFactory.Priority; +import datawave.core.query.cache.ResultsPage; +import datawave.core.query.cache.ResultsPage.Status; +import datawave.core.query.configuration.GenericQueryConfiguration; +import datawave.core.query.exception.EmptyObjectException; +import datawave.core.query.logic.BaseQueryLogic; +import datawave.core.query.logic.BaseQueryLogicTransformer; +import datawave.core.query.logic.QueryLogic; +import datawave.core.query.logic.QueryLogicTransformer; +import datawave.core.query.logic.composite.CompositeLogicException; +import datawave.core.query.logic.composite.CompositeQueryLogic; +import datawave.core.query.logic.filtered.FilteredQueryLogic; import datawave.marking.MarkingFunctions; +import datawave.microservice.query.Query; +import datawave.microservice.query.QueryImpl; import datawave.security.authorization.AuthorizationException; import datawave.security.authorization.DatawavePrincipal; import datawave.security.authorization.DatawaveUser; import datawave.security.authorization.DatawaveUser.UserType; +import datawave.security.authorization.ProxiedUserDetails; import datawave.security.authorization.SubjectIssuerDNPair; import datawave.security.authorization.UserOperations; import datawave.security.util.DnUtils; import datawave.user.AuthorizationsListBase; import datawave.user.DefaultAuthorizationsList; -import datawave.webservice.common.connection.AccumuloConnectionFactory.Priority; -import datawave.webservice.query.Query; -import datawave.webservice.query.QueryImpl; -import datawave.webservice.query.cache.ResultsPage; -import datawave.webservice.query.cache.ResultsPage.Status; -import datawave.webservice.query.configuration.GenericQueryConfiguration; -import datawave.webservice.query.exception.EmptyObjectException; import datawave.webservice.query.exception.QueryException; -import datawave.webservice.query.logic.BaseQueryLogic; -import datawave.webservice.query.logic.BaseQueryLogicTransformer; -import datawave.webservice.query.logic.DatawaveRoleManager; -import datawave.webservice.query.logic.EasyRoleManager; -import datawave.webservice.query.logic.QueryLogic; -import datawave.webservice.query.logic.QueryLogicTransformer; -import datawave.webservice.query.logic.filtered.FilteredQueryLogic; import datawave.webservice.query.result.EdgeQueryResponseBase; import datawave.webservice.query.result.edge.EdgeBase; import datawave.webservice.result.BaseQueryResponse; @@ -347,7 +348,7 @@ public Set getExampleQueries() { public static class TestUserOperations implements UserOperations { @Override - public AuthorizationsListBase listEffectiveAuthorizations(Object callerObject) throws AuthorizationException { + public AuthorizationsListBase listEffectiveAuthorizations(ProxiedUserDetails callerObject) throws AuthorizationException { DatawavePrincipal p = (DatawavePrincipal) callerObject; DefaultAuthorizationsList authList = new DefaultAuthorizationsList(); DatawaveUser primaryUser = p.getPrimaryUser(); @@ -362,7 +363,7 @@ public AuthorizationsListBase listEffectiveAuthorizations(Object callerObject) t } @Override - public GenericResponse flushCachedCredentials(Object callerObject) { + public GenericResponse flushCachedCredentials(ProxiedUserDetails callerObject) { return new GenericResponse<>(); } } @@ -493,7 +494,7 @@ public void testClone() throws Exception { c.setQueryLogics(logics); c = (CompositeQueryLogic) c.clone(); - c.setPrincipal(principal); + c.setCurrentUser(principal); c.initialize(null, settings, Collections.singleton(auths)); c.getTransformer(settings); @@ -517,7 +518,7 @@ public void testInitializeOKWithSameQueryLogicAndTableNames() throws Exception { CompositeQueryLogic c = new CompositeQueryLogic(); c.setQueryLogics(logics); - c.setPrincipal(principal); + c.setCurrentUser(principal); c.initialize(null, settings, Collections.singleton(auths)); c.getTransformer(settings); @@ -551,7 +552,7 @@ public String getTableName() { CompositeQueryLogic c = new CompositeQueryLogic(); c.setQueryLogics(logics); - c.setPrincipal(principal); + c.setCurrentUser(principal); c.initialize(null, settings, Collections.singleton(auths)); c.getTransformer(settings); @@ -576,7 +577,7 @@ public void testInitialize() throws Exception { CompositeQueryLogic c = new CompositeQueryLogic(); c.setQueryLogics(logics); - c.setPrincipal(principal); + c.setCurrentUser(principal); c.initialize(null, settings, Collections.singleton(auths)); c.getTransformer(settings); @@ -607,7 +608,7 @@ public GenericQueryConfiguration initialize(AccumuloClient connection, Query set CompositeQueryLogic c = new CompositeQueryLogic(); c.setQueryLogics(logics); - c.setPrincipal(principal); + c.setCurrentUser(principal); c.initialize(null, settings, Collections.singleton(auths)); Assert.assertEquals(1, c.getInitializedLogics().size()); @@ -630,7 +631,7 @@ public void testInitializeOKWithFilter() throws Exception { CompositeQueryLogic c = new CompositeQueryLogic(); c.setQueryLogics(logics); - c.setPrincipal(principal); + c.setCurrentUser(principal); c.initialize(null, settings, Collections.singleton(auths)); Assert.assertEquals(1, c.getInitializedLogics().size()); @@ -664,7 +665,7 @@ public GenericQueryConfiguration initialize(AccumuloClient connection, Query set // testing that we fail despite allMustInitialize to false because the filtered logic does not count c.setAllMustInitialize(false); - c.setPrincipal(principal); + c.setCurrentUser(principal); c.initialize(null, settings, Collections.singleton(auths)); } @@ -692,7 +693,7 @@ public GenericQueryConfiguration initialize(AccumuloClient connection, Query set c.setAllMustInitialize(true); c.setQueryLogics(logics); - c.setPrincipal(principal); + c.setCurrentUser(principal); c.initialize(null, settings, Collections.singleton(auths)); } @@ -726,7 +727,7 @@ public GenericQueryConfiguration initialize(AccumuloClient connection, Query set CompositeQueryLogic c = new CompositeQueryLogic(); c.setQueryLogics(logics); - c.setPrincipal(principal); + c.setCurrentUser(principal); c.initialize(null, settings, Collections.singleton(auths)); } @@ -761,7 +762,7 @@ public GenericQueryConfiguration initialize(AccumuloClient connection, Query set c.setAllMustInitialize(true); c.setQueryLogics(logics); - c.setPrincipal(principal); + c.setCurrentUser(principal); c.initialize(null, settings, Collections.singleton(auths)); c.getTransformer(settings); @@ -798,7 +799,7 @@ public GenericQueryConfiguration initialize(AccumuloClient connection, Query set c.setAllMustInitialize(true); c.setQueryLogics(logics); - c.setPrincipal(principal); + c.setCurrentUser(principal); try { c.initialize(null, settings, Collections.singleton(auths)); @@ -825,7 +826,7 @@ public void testInitializeWithDifferentResponseTypes() throws Exception { CompositeQueryLogic c = new CompositeQueryLogic(); c.setQueryLogics(logics); - c.setPrincipal(principal); + c.setCurrentUser(principal); c.initialize(null, settings, Collections.singleton(auths)); c.getTransformer(settings); @@ -848,7 +849,7 @@ public void testCloseWithNoSetup() throws Exception { CompositeQueryLogic c = new CompositeQueryLogic(); c.setQueryLogics(logics); - c.setPrincipal(principal); + c.setCurrentUser(principal); c.initialize(null, settings, Collections.singleton(auths)); c.getTransformer(settings); @@ -888,7 +889,7 @@ public void testQueryLogic() throws Exception { * RunningQuery.setupConnection() */ c.setQueryLogics(logics); - c.setPrincipal(principal); + c.setCurrentUser(principal); c.initialize(null, settings, Collections.singleton(auths)); c.setupQuery(null); TransformIterator iter = c.getTransformIterator(settings); @@ -952,7 +953,7 @@ public void testQueryLogicWithEmptyEvent() throws Exception { * RunningQuery.setupConnection() */ c.setQueryLogics(logics); - c.setPrincipal(principal); + c.setCurrentUser(principal); c.initialize((AccumuloClient) null, (Query) settings, Collections.singleton(auths)); c.setupQuery(null); TransformIterator iter = c.getTransformIterator((Query) settings); @@ -1016,7 +1017,7 @@ public void testQueryLogicShortCircuitExecution() throws Exception { * RunningQuery.setupConnection() */ c.setQueryLogics(logics); - c.setPrincipal(principal); + c.setCurrentUser(principal); c.setShortCircuitExecution(true); c.initialize((AccumuloClient) null, (Query) settings, Collections.singleton(auths)); c.setupQuery(null); @@ -1084,7 +1085,7 @@ public void testQueryLogicShortCircuitExecutionWithEmptyEvent() throws Exception * RunningQuery.setupConnection() */ c.setQueryLogics(logics); - c.setPrincipal(principal); + c.setCurrentUser(principal); c.setShortCircuitExecution(true); c.initialize((AccumuloClient) null, (Query) settings, Collections.singleton(auths)); c.setupQuery(null); @@ -1149,7 +1150,7 @@ public void testQueryLogicShortCircuitExecutionHitsSecondLogic() throws Exceptio * RunningQuery.setupConnection() */ c.setQueryLogics(logics); - c.setPrincipal(principal); + c.setCurrentUser(principal); c.setShortCircuitExecution(true); c.initialize((AccumuloClient) null, (Query) settings, Collections.singleton(auths)); c.setupQuery(null); @@ -1218,7 +1219,7 @@ public void testQueryLogicWithNextFailure() throws Exception { * RunningQuery.setupConnection() */ c.setQueryLogics(logics); - c.setPrincipal(principal); + c.setCurrentUser(principal); c.initialize(null, settings, Collections.singleton(auths)); c.setupQuery(null); TransformIterator iter = c.getTransformIterator(settings); @@ -1269,7 +1270,7 @@ public void testQueryLogicWithMaxResultsOverride() throws Exception { * RunningQuery.setupConnection() */ c.setQueryLogics(logics); - c.setPrincipal(principal); + c.setCurrentUser(principal); c.initialize(null, settings, Collections.singleton(auths)); c.setupQuery(null); TransformIterator iter = c.getTransformIterator(settings); @@ -1333,7 +1334,7 @@ public void testQueryLogicNoDataLogic1() throws Exception { * RunningQuery.setupConnection() */ c.setQueryLogics(logics); - c.setPrincipal(principal); + c.setCurrentUser(principal); c.initialize(null, settings, Collections.singleton(auths)); c.setupQuery(null); TransformIterator iter = c.getTransformIterator(settings); @@ -1397,7 +1398,7 @@ public void testQueryLogicNoDataLogic2() throws Exception { * RunningQuery.setupConnection() */ c.setQueryLogics(logics); - c.setPrincipal(principal); + c.setCurrentUser(principal); c.initialize(null, settings, Collections.singleton(auths)); c.setupQuery(null); TransformIterator iter = c.getTransformIterator(settings); @@ -1449,7 +1450,7 @@ public void testQueryLogicNoData() throws Exception { * RunningQuery.setupConnection() */ c.setQueryLogics(logics); - c.setPrincipal(principal); + c.setCurrentUser(principal); c.initialize(null, settings, Collections.singleton(auths)); c.setupQuery(null); TransformIterator iter = c.getTransformIterator(settings); @@ -1476,20 +1477,16 @@ public void testCanRunQueryLogic() throws Exception { TestQueryLogic logic1 = new TestQueryLogic(); HashSet roles = new HashSet<>(); roles.add("TESTROLE"); - logic1.setRoleManager(new DatawaveRoleManager(roles)); + logic1.setRequiredRoles(roles); TestQueryLogic2 logic2 = new TestQueryLogic2(); - logic2.setRoleManager(new EasyRoleManager()); + logic2.setRequiredRoles(Collections.emptySet()); logics.put("TestQueryLogic", logic1); logics.put("TestQueryLogic2", logic2); CompositeQueryLogic c = new CompositeQueryLogic(); c.setQueryLogics(logics); - DatawaveUser u = new DatawaveUser(SubjectIssuerDNPair.of("CN=Other User Name ouser, OU=acme", "CN=ca, OU=acme"), UserType.USER, null, - Collections.singleton("TESTROLE"), null, 0L); - DatawavePrincipal p = new DatawavePrincipal(Collections.singletonList(u)); - - Assert.assertTrue(c.canRunQuery(p)); + Assert.assertTrue(c.canRunQuery(Collections.singleton("TESTROLE"))); Assert.assertEquals(2, c.getQueryLogics().size()); } @@ -1499,22 +1496,18 @@ public void testCanRunQueryLogic2() throws Exception { TestQueryLogic logic1 = new TestQueryLogic(); HashSet roles = new HashSet<>(); roles.add("TESTROLE"); - logic1.setRoleManager(new DatawaveRoleManager(roles)); + logic1.setRequiredRoles(roles); TestQueryLogic2 logic2 = new TestQueryLogic2(); HashSet roles2 = new HashSet<>(); roles2.add("NONTESTROLE"); - logic2.setRoleManager(new DatawaveRoleManager(roles2)); + logic2.setRequiredRoles(roles2); logics.put("TestQueryLogic", logic1); logics.put("TestQueryLogic2", logic2); CompositeQueryLogic c = new CompositeQueryLogic(); c.setQueryLogics(logics); - DatawaveUser u = new DatawaveUser(SubjectIssuerDNPair.of("CN=Other User Name ouser, OU=acme", "CN=ca, OU=acme"), UserType.USER, null, - Collections.singleton("TESTROLE"), null, 0L); - DatawavePrincipal p = new DatawavePrincipal(Collections.singletonList(u)); - - Assert.assertTrue(c.canRunQuery(p)); + Assert.assertTrue(c.canRunQuery(Collections.singleton("TESTROLE"))); Assert.assertEquals(1, c.getQueryLogics().size()); } @@ -1524,22 +1517,18 @@ public void testCannotRunQueryLogic2() throws Exception { TestQueryLogic logic1 = new TestQueryLogic(); HashSet roles = new HashSet<>(); roles.add("NONTESTROLE"); - logic1.setRoleManager(new DatawaveRoleManager(roles)); + logic1.setRequiredRoles(roles); TestQueryLogic2 logic2 = new TestQueryLogic2(); HashSet roles2 = new HashSet<>(); roles2.add("NONTESTROLE"); - logic2.setRoleManager(new DatawaveRoleManager(roles2)); + logic2.setRequiredRoles(roles2); logics.put("TestQueryLogic", logic1); logics.put("TestQueryLogic2", logic2); CompositeQueryLogic c = new CompositeQueryLogic(); c.setQueryLogics(logics); - DatawaveUser u = new DatawaveUser(SubjectIssuerDNPair.of("CN=Other User Name ouser, OU=acme", "CN=ca, OU=acme"), UserType.USER, null, - Collections.singleton("TESTROLE"), null, 0L); - DatawavePrincipal p = new DatawavePrincipal(Collections.singletonList(u)); - - Assert.assertFalse(c.canRunQuery(p)); + Assert.assertFalse(c.canRunQuery(Collections.singleton("TESTROLE"))); Assert.assertEquals(0, c.getQueryLogics().size()); } @@ -1602,7 +1591,7 @@ public void testAuthorizationsUpdate() throws Exception { * RunningQuery.setupConnection() */ c.setQueryLogics(logics); - c.setPrincipal(principal); + c.setCurrentUser(principal); c.initialize(null, settings, Collections.singleton(auths)); c.setupQuery(null); TransformIterator iter = c.getTransformIterator(settings); diff --git a/web-services/query/src/test/java/datawave/webservice/query/logic/filtered/FilteredQueryLogicTest.java b/web-services/query/src/test/java/datawave/webservice/query/logic/filtered/FilteredQueryLogicTest.java index e58770424d3..7d8ac10a341 100644 --- a/web-services/query/src/test/java/datawave/webservice/query/logic/filtered/FilteredQueryLogicTest.java +++ b/web-services/query/src/test/java/datawave/webservice/query/logic/filtered/FilteredQueryLogicTest.java @@ -12,10 +12,12 @@ import org.junit.Test; import org.powermock.api.easymock.PowerMock; -import datawave.webservice.query.Query; -import datawave.webservice.query.QueryImpl; -import datawave.webservice.query.configuration.GenericQueryConfiguration; -import datawave.webservice.query.logic.QueryLogic; +import datawave.core.query.configuration.GenericQueryConfiguration; +import datawave.core.query.logic.QueryLogic; +import datawave.core.query.logic.filtered.FilteredQueryLogic; +import datawave.core.query.logic.filtered.QueryLogicFilterByAuth; +import datawave.microservice.query.Query; +import datawave.microservice.query.QueryImpl; public class FilteredQueryLogicTest { diff --git a/web-services/query/src/test/java/datawave/webservice/query/logic/filtered/QueryLogicFilterByAuthTest.java b/web-services/query/src/test/java/datawave/webservice/query/logic/filtered/QueryLogicFilterByAuthTest.java index 80f4305191e..47ce96a8b5b 100644 --- a/web-services/query/src/test/java/datawave/webservice/query/logic/filtered/QueryLogicFilterByAuthTest.java +++ b/web-services/query/src/test/java/datawave/webservice/query/logic/filtered/QueryLogicFilterByAuthTest.java @@ -9,6 +9,8 @@ import org.apache.accumulo.core.security.Authorizations; import org.junit.Test; +import datawave.core.query.logic.filtered.QueryLogicFilterByAuth; + public class QueryLogicFilterByAuthTest { @Test public void testDefaults() { diff --git a/web-services/query/src/test/java/datawave/webservice/query/logic/filtered/QueryLogicFilterByParameterTest.java b/web-services/query/src/test/java/datawave/webservice/query/logic/filtered/QueryLogicFilterByParameterTest.java index 33da980f9f9..53f86d087f2 100644 --- a/web-services/query/src/test/java/datawave/webservice/query/logic/filtered/QueryLogicFilterByParameterTest.java +++ b/web-services/query/src/test/java/datawave/webservice/query/logic/filtered/QueryLogicFilterByParameterTest.java @@ -5,7 +5,8 @@ import org.junit.Test; -import datawave.webservice.query.QueryImpl; +import datawave.core.query.logic.filtered.QueryLogicFilterByParameter; +import datawave.microservice.query.QueryImpl; public class QueryLogicFilterByParameterTest { @Test diff --git a/web-services/query/src/test/java/datawave/webservice/query/runner/ExtendedQueryExecutorBeanTest.java b/web-services/query/src/test/java/datawave/webservice/query/runner/ExtendedQueryExecutorBeanTest.java index 9623ea3d322..7782e671a5a 100644 --- a/web-services/query/src/test/java/datawave/webservice/query/runner/ExtendedQueryExecutorBeanTest.java +++ b/web-services/query/src/test/java/datawave/webservice/query/runner/ExtendedQueryExecutorBeanTest.java @@ -21,6 +21,7 @@ import java.util.Collections; import java.util.Date; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -65,8 +66,23 @@ import com.google.common.collect.Multimap; import com.google.common.collect.Sets; +import datawave.core.common.audit.PrivateAuditConstants; +import datawave.core.common.connection.AccumuloConnectionFactory; +import datawave.core.common.connection.AccumuloConnectionFactory.Priority; +import datawave.core.query.cache.ResultsPage; +import datawave.core.query.configuration.GenericQueryConfiguration; +import datawave.core.query.logic.BaseQueryLogic; +import datawave.core.query.logic.QueryLogic; +import datawave.core.query.logic.QueryLogicFactory; +import datawave.core.query.logic.QueryLogicTransformer; import datawave.marking.ColumnVisibilitySecurityMarking; import datawave.marking.SecurityMarking; +import datawave.microservice.query.DefaultQueryParameters; +import datawave.microservice.query.Query; +import datawave.microservice.query.QueryImpl; +import datawave.microservice.query.QueryParameters; +import datawave.microservice.query.QueryPersistence; +import datawave.microservice.query.config.QueryExpirationProperties; import datawave.microservice.querymetric.QueryMetric; import datawave.microservice.querymetric.QueryMetricFactory; import datawave.microservice.querymetric.QueryMetricFactoryImpl; @@ -74,51 +90,33 @@ import datawave.security.authorization.DatawavePrincipal; import datawave.security.authorization.DatawaveUser; import datawave.security.authorization.SubjectIssuerDNPair; -import datawave.security.authorization.UserOperations; import datawave.security.authorization.remote.RemoteUserOperationsImpl; import datawave.security.user.UserOperationsBean; import datawave.security.util.WSAuthorizationsUtil; import datawave.webservice.common.audit.AuditBean; import datawave.webservice.common.audit.AuditParameters; import datawave.webservice.common.audit.Auditor.AuditType; -import datawave.webservice.common.audit.PrivateAuditConstants; -import datawave.webservice.common.connection.AccumuloConnectionFactory; -import datawave.webservice.common.connection.AccumuloConnectionFactory.Priority; import datawave.webservice.common.exception.BadRequestException; import datawave.webservice.common.exception.DatawaveWebApplicationException; import datawave.webservice.common.exception.NoResultsException; -import datawave.webservice.query.Query; -import datawave.webservice.query.QueryImpl; -import datawave.webservice.query.QueryParameters; -import datawave.webservice.query.QueryParametersImpl; -import datawave.webservice.query.QueryPersistence; import datawave.webservice.query.cache.ClosedQueryCache; import datawave.webservice.query.cache.CreatedQueryLogicCacheBean; import datawave.webservice.query.cache.QueryCache; -import datawave.webservice.query.cache.QueryExpirationConfiguration; import datawave.webservice.query.cache.QueryTraceCache; import datawave.webservice.query.cache.QueryTraceCache.CacheListener; import datawave.webservice.query.cache.QueryTraceCache.PatternWrapper; -import datawave.webservice.query.cache.ResultsPage; -import datawave.webservice.query.configuration.GenericQueryConfiguration; import datawave.webservice.query.configuration.LookupUUIDConfiguration; import datawave.webservice.query.exception.BadRequestQueryException; import datawave.webservice.query.exception.DatawaveErrorCode; import datawave.webservice.query.exception.NoResultsQueryException; import datawave.webservice.query.exception.QueryException; import datawave.webservice.query.factory.Persister; -import datawave.webservice.query.logic.BaseQueryLogic; -import datawave.webservice.query.logic.DatawaveRoleManager; -import datawave.webservice.query.logic.EasyRoleManager; -import datawave.webservice.query.logic.QueryLogic; -import datawave.webservice.query.logic.QueryLogicFactory; import datawave.webservice.query.logic.QueryLogicFactoryImpl; -import datawave.webservice.query.logic.QueryLogicTransformer; -import datawave.webservice.query.logic.RoleManager; import datawave.webservice.query.metric.QueryMetricsBean; import datawave.webservice.query.result.event.ResponseObjectFactory; import datawave.webservice.query.util.GetUUIDCriteria; import datawave.webservice.query.util.LookupUUIDUtil; +import datawave.webservice.query.util.MapUtils; import datawave.webservice.query.util.QueryUncaughtExceptionHandler; import datawave.webservice.result.BaseQueryResponse; import datawave.webservice.result.DefaultEventQueryResponse; @@ -232,7 +230,7 @@ public class ExtendedQueryExecutorBeanTest { @Mock UserOperationsBean userOperations; - QueryExpirationConfiguration queryExpirationConf; + QueryExpirationProperties queryExpirationConf; @BeforeClass public static void setup() throws Exception {} @@ -242,10 +240,10 @@ public void setupBefore() throws Exception { queryLogic2 = PowerMock.createMock(QuerySyntaxParserQueryLogic.class); - queryExpirationConf = new QueryExpirationConfiguration(); - queryExpirationConf.setPageSizeShortCircuitCheckTime(45); - queryExpirationConf.setPageShortCircuitTimeout(58); - queryExpirationConf.setCallTime(60); + queryExpirationConf = new QueryExpirationProperties(); + queryExpirationConf.setShortCircuitCheckTime(45); + queryExpirationConf.setShortCircuitTimeout(58); + queryExpirationConf.setIdleTimeout(60); } @SuppressWarnings({"rawtypes", "unchecked"}) @@ -398,8 +396,8 @@ public void testAdminCancel_LookupAccumuloQuery() throws Exception { setInternalState(subject, ClosedQueryCache.class, closedCache); setInternalState(subject, Persister.class, persister); setInternalState(subject, QueryLogicFactory.class, queryLogicFactory); - setInternalState(subject, QueryExpirationConfiguration.class, queryExpirationConf); - setInternalState(subject, QueryParameters.class, new QueryParametersImpl()); + setInternalState(subject, QueryExpirationProperties.class, queryExpirationConf); + setInternalState(subject, QueryParameters.class, new DefaultQueryParameters()); setInternalState(subject, QueryMetricFactory.class, new QueryMetricFactoryImpl()); setInternalState(connectionRequestBean, EJBContext.class, context); setInternalState(subject, AccumuloConnectionRequestBean.class, connectionRequestBean); @@ -476,13 +474,18 @@ public void testAdminClose_NullTupleReturnedAndQueryExceptionThrown() throws Exc public void testCancel_HappyPath() throws Exception { // Set local test input String userName = "userName"; + String userSid = "userSid"; UUID queryId = UUID.randomUUID(); // Set expectations of the create logic expect(this.connectionRequestBean.cancelConnectionRequest(queryId.toString())).andReturn(false); expect(this.context.getCallerPrincipal()).andReturn(this.principal).anyTimes(); expect(this.principal.getName()).andReturn(userName); - expect(this.qlCache.pollIfOwnedBy(queryId.toString(), userName)).andReturn(this.tuple); + expect(this.principal.getUserDN()).andReturn(SubjectIssuerDNPair.of(userName)); + expect(this.principal.getDNs()).andReturn(new String[] {userName}); + expect(this.principal.getShortName()).andReturn(userSid); + expect(this.principal.getProxyServers()).andReturn(new ArrayList<>(0)).anyTimes(); + expect(this.qlCache.pollIfOwnedBy(queryId.toString(), userSid)).andReturn(this.tuple); this.closedCache.remove(queryId.toString()); expect(this.tuple.getFirst()).andReturn((QueryLogic) this.queryLogic1); this.queryLogic1.close(); @@ -520,7 +523,10 @@ public void testCancel_NullTupleReturnedAndQueryExceptionThrown() throws Excepti expect(this.connectionRequestBean.cancelConnectionRequest(queryId.toString())).andReturn(false); expect(this.context.getCallerPrincipal()).andReturn(this.principal).times(2); expect(this.principal.getName()).andReturn(userName); - expect(this.qlCache.pollIfOwnedBy(queryId.toString(), userName)).andReturn(null); + expect(this.principal.getUserDN()).andReturn(SubjectIssuerDNPair.of(userName)); + expect(this.principal.getDNs()).andReturn(new String[] {userName}); + expect(this.principal.getShortName()).andReturn(userSid); + expect(this.qlCache.pollIfOwnedBy(queryId.toString(), userSid)).andReturn(null); expect(this.closedCache.exists(queryId.toString())).andReturn(false); expect(this.principal.getName()).andReturn(userName); expect(this.principal.getShortName()).andReturn(userSid); @@ -563,9 +569,11 @@ public void testCancel_RunningQueryFoundInCache() throws Exception { expect(this.connectionRequestBean.cancelConnectionRequest(queryId.toString())).andReturn(false); expect(this.context.getCallerPrincipal()).andReturn(this.principal).times(2); expect(this.principal.getName()).andReturn(userName); - expect(this.qlCache.pollIfOwnedBy(queryId.toString(), userName)).andReturn(null); + expect(this.qlCache.pollIfOwnedBy(queryId.toString(), userSid)).andReturn(null); expect(this.principal.getName()).andReturn(userName); - expect(this.principal.getShortName()).andReturn(userSid); + expect(this.principal.getUserDN()).andReturn(SubjectIssuerDNPair.of(userName)); + expect(this.principal.getDNs()).andReturn(new String[] {userName}); + expect(this.principal.getShortName()).andReturn(userSid).anyTimes(); expect(this.principal.getProxyServers()).andReturn(new ArrayList<>(0)).anyTimes(); expect(this.principal.getAuthorizations()).andReturn((Collection) Arrays.asList(Arrays.asList(queryAuthorizations))); expect(this.cache.get(queryId.toString())).andReturn(this.runningQuery); @@ -606,10 +614,12 @@ public void testClose_NullTupleReturnedFromQueryLogicCache() throws Exception { String queryAuthorizations = "AUTH_1"; // Set expectations - expect(this.connectionRequestBean.cancelConnectionRequest(queryId.toString(), this.principal)).andReturn(false); + expect(this.connectionRequestBean.cancelConnectionRequest(queryId.toString(), userName.toLowerCase())).andReturn(false); expect(this.context.getCallerPrincipal()).andReturn(this.principal).anyTimes(); - expect(this.principal.getName()).andReturn(userName); - expect(this.principal.getShortName()).andReturn(userSid).times(2); + expect(this.principal.getName()).andReturn(userName).anyTimes(); + expect(this.principal.getShortName()).andReturn(userSid).anyTimes(); + expect(this.principal.getDNs()).andReturn(new String[] {userName}); + expect(this.principal.getUserDN()).andReturn(SubjectIssuerDNPair.of(userName)); expect(this.principal.getProxyServers()).andReturn(new ArrayList<>(0)).anyTimes(); expect(this.principal.getAuthorizations()).andReturn((Collection) Arrays.asList(Arrays.asList(queryAuthorizations))); expect(this.qlCache.pollIfOwnedBy(queryId.toString(), userSid)).andReturn(null); @@ -641,14 +651,18 @@ public void testClose_NullTupleReturnedFromQueryLogicCache() throws Exception { public void testClose_UncheckedException() throws Exception { // Set local test input String userSid = "userSid"; + String userName = "userName"; UUID queryId = UUID.randomUUID(); // Set expectations - expect(this.connectionRequestBean.cancelConnectionRequest(queryId.toString(), this.principal)).andReturn(false); + expect(this.connectionRequestBean.cancelConnectionRequest(queryId.toString(), userName.toLowerCase())).andReturn(false); expect(this.context.getCallerPrincipal()).andReturn(this.principal).anyTimes(); + expect(this.principal.getName()).andReturn(userName); expect(this.principal.getShortName()).andReturn(userSid); + expect(this.principal.getDNs()).andReturn(new String[] {userName}); expect(this.principal.getProxyServers()).andReturn(new ArrayList<>(0)).anyTimes(); expect(this.qlCache.pollIfOwnedBy(queryId.toString(), userSid)).andReturn(this.tuple); + expect(this.principal.getUserDN()).andReturn(SubjectIssuerDNPair.of(userName)); expect(this.tuple.getFirst()).andReturn((QueryLogic) this.queryLogic1); this.queryLogic1.close(); PowerMock.expectLastCall().andThrow(ILLEGAL_STATE_EXCEPTION); @@ -692,7 +706,7 @@ public void testCreateQueryAndNext_HappyPath() throws Exception { boolean trace = false; String userName = "userName"; String userSid = "userSid"; - String userDN = "userdn"; + String userDN = "userDN"; SubjectIssuerDNPair userDNpair = SubjectIssuerDNPair.of(userDN); List dnList = Collections.singletonList(userDN); UUID queryId = UUID.randomUUID(); @@ -701,33 +715,31 @@ public void testCreateQueryAndNext_HappyPath() throws Exception { HashMap> authsMap = new HashMap<>(); authsMap.put("userdn", Arrays.asList(queryAuthorizations)); - MultivaluedMap queryParameters = new MultivaluedMapImpl<>(); - queryParameters.putSingle(QueryParameters.QUERY_STRING, query); - queryParameters.putSingle(QueryParameters.QUERY_NAME, queryName); - queryParameters.putSingle(QueryParameters.QUERY_LOGIC_NAME, queryLogicName); - queryParameters.putSingle(QueryParameters.QUERY_BEGIN, QueryParametersImpl.formatDate(beginDate)); - queryParameters.putSingle(QueryParameters.QUERY_END, QueryParametersImpl.formatDate(endDate)); - queryParameters.putSingle(QueryParameters.QUERY_EXPIRATION, QueryParametersImpl.formatDate(expirationDate)); - queryParameters.putSingle(QueryParameters.QUERY_AUTHORIZATIONS, queryAuthorizations); - queryParameters.putSingle(QueryParameters.QUERY_PAGESIZE, String.valueOf(pagesize)); - queryParameters.putSingle(QueryParameters.QUERY_PAGETIMEOUT, String.valueOf(pageTimeout)); - queryParameters.putSingle(QueryParameters.QUERY_PERSISTENCE, persistenceMode.name()); - queryParameters.putSingle(QueryParameters.QUERY_TRACE, String.valueOf(trace)); - queryParameters.putSingle(ColumnVisibilitySecurityMarking.VISIBILITY_MARKING, queryVisibility); - queryParameters.putSingle("valid", "param"); + MultiValueMap queryParameters = new LinkedMultiValueMap<>(); + queryParameters.add(QueryParameters.QUERY_STRING, query); + queryParameters.add(QueryParameters.QUERY_NAME, queryName); + queryParameters.add(QueryParameters.QUERY_LOGIC_NAME, queryLogicName); + queryParameters.add(QueryParameters.QUERY_BEGIN, DefaultQueryParameters.formatDate(beginDate)); + queryParameters.add(QueryParameters.QUERY_END, DefaultQueryParameters.formatDate(endDate)); + queryParameters.add(QueryParameters.QUERY_EXPIRATION, DefaultQueryParameters.formatDate(expirationDate)); + queryParameters.add(QueryParameters.QUERY_AUTHORIZATIONS, queryAuthorizations); + queryParameters.add(QueryParameters.QUERY_PAGESIZE, String.valueOf(pagesize)); + queryParameters.add(QueryParameters.QUERY_PAGETIMEOUT, String.valueOf(pageTimeout)); + queryParameters.add(QueryParameters.QUERY_PERSISTENCE, persistenceMode.name()); + queryParameters.add(QueryParameters.QUERY_TRACE, String.valueOf(trace)); + queryParameters.add(ColumnVisibilitySecurityMarking.VISIBILITY_MARKING, queryVisibility); + queryParameters.add("valid", "param"); ColumnVisibilitySecurityMarking marking = new ColumnVisibilitySecurityMarking(); marking.validate(queryParameters); - QueryParameters qp = new QueryParametersImpl(); + QueryParameters qp = new DefaultQueryParameters(); qp.validate(queryParameters); - MultivaluedMap op = new MultivaluedMapImpl<>(); - op.putAll(qp.getUnknownParameters(queryParameters)); - // op.putSingle(PrivateAuditConstants.AUDIT_TYPE, AuditType.NONE.name()); - op.putSingle(PrivateAuditConstants.LOGIC_CLASS, queryLogicName); - op.putSingle(PrivateAuditConstants.COLUMN_VISIBILITY, queryVisibility); - op.putSingle(PrivateAuditConstants.USER_DN, userDNpair.subjectDN()); + MultiValueMap op = qp.getUnknownParameters(queryParameters); + op.add(PrivateAuditConstants.LOGIC_CLASS, queryLogicName); + op.add(PrivateAuditConstants.COLUMN_VISIBILITY, queryVisibility); + op.add(PrivateAuditConstants.USER_DN, userDNpair.subjectDN()); // Set expectations of the create logic queryLogic1.validate(queryParameters); @@ -742,15 +754,16 @@ public void testCreateQueryAndNext_HappyPath() throws Exception { expect(this.queryLogic1.containsDNWithAccess(Collections.singletonList(userDN))).andReturn(true); expect(this.queryLogic1.getAuditType(null)).andReturn(AuditType.NONE); expect(this.principal.getAuthorizations()).andReturn((Collection) Arrays.asList(Arrays.asList(queryAuthorizations))); - expect(persister.create(eq(userDNpair.subjectDN()), eq(dnList), eq(marking), eq(queryLogicName), eq(qp), eq(op))).andReturn(this.query); + expect(persister.create(eq(userDNpair.subjectDN()), eq(dnList), eq(marking), eq(queryLogicName), eq(qp), eq(MapUtils.toMultivaluedMap(op)))) + .andReturn(this.query); expect(this.queryLogic1.getAuditType(this.query)).andReturn(AuditType.NONE); expect(this.queryLogic1.getConnectionPriority()).andReturn(Priority.NORMAL); expect(this.queryLogic1.getConnPoolName()).andReturn("connPool1"); expect(this.queryLogic1.isLongRunningQuery()).andReturn(false); expect(this.connectionFactory.getTrackingMap(isA(StackTraceElement[].class))).andReturn(null); this.query.populateTrackingMap(null); - this.connectionRequestBean.requestBegin(queryId.toString()); - expect(this.connectionFactory.getClient("connPool1", Priority.NORMAL, null)).andReturn(this.client); + this.connectionRequestBean.requestBegin(queryId.toString(), userDN.toLowerCase(), null); + expect(this.connectionFactory.getClient(userDN.toLowerCase(), new ArrayList<>(0), "connPool1", Priority.NORMAL, null)).andReturn(this.client); this.connectionRequestBean.requestEnd(queryId.toString()); expect(this.traceInfos.get(userSid)).andReturn(new ArrayList<>(0)); expect(this.traceInfos.get(null)).andReturn(Arrays.asList(PatternWrapper.wrap("NONMATCHING_REGEX"))); @@ -840,16 +853,16 @@ public void testCreateQueryAndNext_HappyPath() throws Exception { setInternalState(subject, ClosedQueryCache.class, closedCache); setInternalState(subject, Persister.class, persister); setInternalState(subject, QueryLogicFactoryImpl.class, queryLogicFactory); - setInternalState(subject, QueryExpirationConfiguration.class, queryExpirationConf); + setInternalState(subject, QueryExpirationProperties.class, queryExpirationConf); setInternalState(subject, AuditBean.class, auditor); setInternalState(subject, QueryMetricsBean.class, metrics); setInternalState(subject, Multimap.class, traceInfos); setInternalState(subject, SecurityMarking.class, new ColumnVisibilitySecurityMarking()); - setInternalState(subject, QueryParameters.class, new QueryParametersImpl()); + setInternalState(subject, QueryParameters.class, new DefaultQueryParameters()); setInternalState(subject, QueryMetricFactory.class, new QueryMetricFactoryImpl()); setInternalState(connectionRequestBean, EJBContext.class, context); setInternalState(subject, AccumuloConnectionRequestBean.class, connectionRequestBean); - BaseQueryResponse result1 = subject.createQueryAndNext(queryLogicName, queryParameters); + BaseQueryResponse result1 = subject.createQueryAndNext(queryLogicName, MapUtils.toMultivaluedMap(queryParameters)); PowerMock.verifyAll(); // Verify results @@ -887,9 +900,9 @@ public void testCreateQueryAndNext_BadID() throws Exception { queryParameters.putSingle(QueryParameters.QUERY_STRING, query); queryParameters.putSingle(QueryParameters.QUERY_NAME, queryName); queryParameters.putSingle(QueryParameters.QUERY_LOGIC_NAME, queryLogicName); - queryParameters.putSingle(QueryParameters.QUERY_BEGIN, QueryParametersImpl.formatDate(beginDate)); - queryParameters.putSingle(QueryParameters.QUERY_END, QueryParametersImpl.formatDate(endDate)); - queryParameters.putSingle(QueryParameters.QUERY_EXPIRATION, QueryParametersImpl.formatDate(expirationDate)); + queryParameters.putSingle(QueryParameters.QUERY_BEGIN, DefaultQueryParameters.formatDate(beginDate)); + queryParameters.putSingle(QueryParameters.QUERY_END, DefaultQueryParameters.formatDate(endDate)); + queryParameters.putSingle(QueryParameters.QUERY_EXPIRATION, DefaultQueryParameters.formatDate(expirationDate)); queryParameters.putSingle(QueryParameters.QUERY_AUTHORIZATIONS, queryAuthorizations); queryParameters.putSingle(QueryParameters.QUERY_PAGESIZE, String.valueOf(pagesize)); queryParameters.putSingle(QueryParameters.QUERY_PAGETIMEOUT, String.valueOf(pageTimeout)); @@ -901,7 +914,7 @@ public void testCreateQueryAndNext_BadID() throws Exception { ColumnVisibilitySecurityMarking marking = new ColumnVisibilitySecurityMarking(); marking.validate(queryParameters); - QueryParameters qp = new QueryParametersImpl(); + QueryParameters qp = new DefaultQueryParameters(); qp.validate(queryParameters); MultivaluedMap op = new MultivaluedMapImpl<>(); @@ -930,8 +943,8 @@ public void testCreateQueryAndNext_BadID() throws Exception { expect(this.queryLogic1.isLongRunningQuery()).andReturn(false); expect(this.connectionFactory.getTrackingMap(isA(StackTraceElement[].class))).andReturn(null); this.query.populateTrackingMap(null); - this.connectionRequestBean.requestBegin(queryId.toString()); - expect(this.connectionFactory.getClient("connPool1", Priority.NORMAL, null)).andReturn(this.client); + this.connectionRequestBean.requestBegin(queryId.toString(), userDN.toLowerCase(), null); + expect(this.connectionFactory.getClient(userDN.toLowerCase(), new ArrayList<>(0), "connPool1", Priority.NORMAL, null)).andReturn(this.client); this.connectionRequestBean.requestEnd(queryId.toString()); expect(this.traceInfos.get(userSid)).andReturn(new ArrayList<>(0)); expect(this.traceInfos.get(null)).andReturn(Arrays.asList(PatternWrapper.wrap("NONMATCHING_REGEX"))); @@ -999,12 +1012,12 @@ public void testCreateQueryAndNext_BadID() throws Exception { setInternalState(subject, ClosedQueryCache.class, closedCache); setInternalState(subject, Persister.class, persister); setInternalState(subject, QueryLogicFactoryImpl.class, queryLogicFactory); - setInternalState(subject, QueryExpirationConfiguration.class, queryExpirationConf); + setInternalState(subject, QueryExpirationProperties.class, queryExpirationConf); setInternalState(subject, AuditBean.class, auditor); setInternalState(subject, QueryMetricsBean.class, metrics); setInternalState(subject, Multimap.class, traceInfos); setInternalState(subject, SecurityMarking.class, new ColumnVisibilitySecurityMarking()); - setInternalState(subject, QueryParameters.class, new QueryParametersImpl()); + setInternalState(subject, QueryParameters.class, new DefaultQueryParameters()); setInternalState(subject, QueryMetricFactory.class, new QueryMetricFactoryImpl()); setInternalState(connectionRequestBean, EJBContext.class, context); setInternalState(subject, AccumuloConnectionRequestBean.class, connectionRequestBean); @@ -1054,9 +1067,9 @@ public void testCreateQueryAndNext_PageSizeParam() throws Exception { queryParameters.putSingle(QueryParameters.QUERY_STRING, query); queryParameters.putSingle(QueryParameters.QUERY_NAME, queryName); queryParameters.putSingle(QueryParameters.QUERY_LOGIC_NAME, queryLogicName); - queryParameters.putSingle(QueryParameters.QUERY_BEGIN, QueryParametersImpl.formatDate(beginDate)); - queryParameters.putSingle(QueryParameters.QUERY_END, QueryParametersImpl.formatDate(endDate)); - queryParameters.putSingle(QueryParameters.QUERY_EXPIRATION, QueryParametersImpl.formatDate(expirationDate)); + queryParameters.putSingle(QueryParameters.QUERY_BEGIN, DefaultQueryParameters.formatDate(beginDate)); + queryParameters.putSingle(QueryParameters.QUERY_END, DefaultQueryParameters.formatDate(endDate)); + queryParameters.putSingle(QueryParameters.QUERY_EXPIRATION, DefaultQueryParameters.formatDate(expirationDate)); queryParameters.putSingle(QueryParameters.QUERY_AUTHORIZATIONS, queryAuthorizations); queryParameters.putSingle(QueryParameters.QUERY_PAGESIZE, String.valueOf(pagesize)); queryParameters.putSingle(QueryParameters.QUERY_PAGETIMEOUT, String.valueOf(pageTimeout)); @@ -1069,7 +1082,7 @@ public void testCreateQueryAndNext_PageSizeParam() throws Exception { ColumnVisibilitySecurityMarking marking = new ColumnVisibilitySecurityMarking(); marking.validate(queryParameters); - QueryParameters qp = new QueryParametersImpl(); + QueryParameters qp = new DefaultQueryParameters(); qp.validate(queryParameters); MultivaluedMap op = new MultivaluedMapImpl<>(); @@ -1097,8 +1110,8 @@ public void testCreateQueryAndNext_PageSizeParam() throws Exception { expect(this.queryLogic1.getConnPoolName()).andReturn("connPool1"); expect(this.queryLogic1.isLongRunningQuery()).andReturn(false); expect(this.connectionFactory.getTrackingMap(isA(StackTraceElement[].class))).andReturn(null); - this.connectionRequestBean.requestBegin(queryId.toString()); - expect(this.connectionFactory.getClient("connPool1", Priority.NORMAL, null)).andReturn(this.client); + this.connectionRequestBean.requestBegin(queryId.toString(), userDN.toLowerCase(), null); + expect(this.connectionFactory.getClient("connPool1", new ArrayList<>(), Priority.NORMAL, null)).andReturn(this.client); this.connectionRequestBean.requestEnd(queryId.toString()); expect(this.traceInfos.get(userSid)).andReturn(new ArrayList<>(0)); expect(this.traceInfos.get(null)).andReturn(Arrays.asList(PatternWrapper.wrap("NONMATCHING_REGEX"))); @@ -1158,12 +1171,12 @@ public void testCreateQueryAndNext_PageSizeParam() throws Exception { setInternalState(subject, ClosedQueryCache.class, closedCache); setInternalState(subject, Persister.class, persister); setInternalState(subject, QueryLogicFactoryImpl.class, queryLogicFactory); - setInternalState(subject, QueryExpirationConfiguration.class, queryExpirationConf); + setInternalState(subject, QueryExpirationProperties.class, queryExpirationConf); setInternalState(subject, AuditBean.class, auditor); setInternalState(subject, QueryMetricsBean.class, metrics); setInternalState(subject, Multimap.class, traceInfos); setInternalState(subject, SecurityMarking.class, new ColumnVisibilitySecurityMarking()); - setInternalState(subject, QueryParameters.class, new QueryParametersImpl()); + setInternalState(subject, QueryParameters.class, new DefaultQueryParameters()); setInternalState(subject, QueryMetricFactory.class, new QueryMetricFactoryImpl()); setInternalState(connectionRequestBean, EJBContext.class, context); setInternalState(subject, AccumuloConnectionRequestBean.class, connectionRequestBean); @@ -1213,9 +1226,9 @@ public void testCreateQueryAndNext_PageSizeParamTwo() throws Exception { queryParameters.putSingle(QueryParameters.QUERY_STRING, query); queryParameters.putSingle(QueryParameters.QUERY_NAME, queryName); queryParameters.putSingle(QueryParameters.QUERY_LOGIC_NAME, queryLogicName); - queryParameters.putSingle(QueryParameters.QUERY_BEGIN, QueryParametersImpl.formatDate(beginDate)); - queryParameters.putSingle(QueryParameters.QUERY_END, QueryParametersImpl.formatDate(endDate)); - queryParameters.putSingle(QueryParameters.QUERY_EXPIRATION, QueryParametersImpl.formatDate(expirationDate)); + queryParameters.putSingle(QueryParameters.QUERY_BEGIN, DefaultQueryParameters.formatDate(beginDate)); + queryParameters.putSingle(QueryParameters.QUERY_END, DefaultQueryParameters.formatDate(endDate)); + queryParameters.putSingle(QueryParameters.QUERY_EXPIRATION, DefaultQueryParameters.formatDate(expirationDate)); queryParameters.putSingle(QueryParameters.QUERY_AUTHORIZATIONS, queryAuthorizations); queryParameters.putSingle(QueryParameters.QUERY_PAGESIZE, String.valueOf(pagesize)); // If the wrong page size parameter is added here, it should be dropped automatically by the QueryImpl @@ -1229,7 +1242,7 @@ public void testCreateQueryAndNext_PageSizeParamTwo() throws Exception { ColumnVisibilitySecurityMarking marking = new ColumnVisibilitySecurityMarking(); marking.validate(queryParameters); - QueryParameters qp = new QueryParametersImpl(); + QueryParameters qp = new DefaultQueryParameters(); qp.validate(queryParameters); MultivaluedMap op = new MultivaluedMapImpl<>(); @@ -1258,8 +1271,8 @@ public void testCreateQueryAndNext_PageSizeParamTwo() throws Exception { expect(this.queryLogic1.getConnPoolName()).andReturn("connPool1"); expect(this.queryLogic1.isLongRunningQuery()).andReturn(false); expect(this.connectionFactory.getTrackingMap(isA(StackTraceElement[].class))).andReturn(null); - this.connectionRequestBean.requestBegin(queryId.toString()); - expect(this.connectionFactory.getClient("connPool1", Priority.NORMAL, null)).andReturn(this.client); + this.connectionRequestBean.requestBegin(queryId.toString(), userDN.toLowerCase(), null); + expect(this.connectionFactory.getClient("connPool1", new ArrayList<>(), Priority.NORMAL, null)).andReturn(this.client); this.connectionRequestBean.requestEnd(queryId.toString()); expect(this.traceInfos.get(userSid)).andReturn(new ArrayList<>(0)); expect(this.traceInfos.get(null)).andReturn(Arrays.asList(PatternWrapper.wrap("NONMATCHING_REGEX"))); @@ -1340,12 +1353,12 @@ public void testCreateQueryAndNext_PageSizeParamTwo() throws Exception { setInternalState(subject, ClosedQueryCache.class, closedCache); setInternalState(subject, Persister.class, persister); setInternalState(subject, QueryLogicFactoryImpl.class, queryLogicFactory); - setInternalState(subject, QueryExpirationConfiguration.class, queryExpirationConf); + setInternalState(subject, QueryExpirationProperties.class, queryExpirationConf); setInternalState(subject, AuditBean.class, auditor); setInternalState(subject, QueryMetricsBean.class, metrics); setInternalState(subject, Multimap.class, traceInfos); setInternalState(subject, SecurityMarking.class, new ColumnVisibilitySecurityMarking()); - setInternalState(subject, QueryParameters.class, new QueryParametersImpl()); + setInternalState(subject, QueryParameters.class, new DefaultQueryParameters()); setInternalState(subject, QueryMetricFactory.class, new QueryMetricFactoryImpl()); setInternalState(connectionRequestBean, EJBContext.class, context); setInternalState(subject, AccumuloConnectionRequestBean.class, connectionRequestBean); @@ -1397,9 +1410,9 @@ public void testCreateQueryAndNext_DoubleAuditValues() throws Exception { queryParameters.putSingle(QueryParameters.QUERY_STRING, query); queryParameters.putSingle(QueryParameters.QUERY_NAME, queryName); queryParameters.putSingle(QueryParameters.QUERY_LOGIC_NAME, queryLogicName); - queryParameters.putSingle(QueryParameters.QUERY_BEGIN, QueryParametersImpl.formatDate(beginDate)); - queryParameters.putSingle(QueryParameters.QUERY_END, QueryParametersImpl.formatDate(endDate)); - queryParameters.putSingle(QueryParameters.QUERY_EXPIRATION, QueryParametersImpl.formatDate(expirationDate)); + queryParameters.putSingle(QueryParameters.QUERY_BEGIN, DefaultQueryParameters.formatDate(beginDate)); + queryParameters.putSingle(QueryParameters.QUERY_END, DefaultQueryParameters.formatDate(endDate)); + queryParameters.putSingle(QueryParameters.QUERY_EXPIRATION, DefaultQueryParameters.formatDate(expirationDate)); queryParameters.putSingle(QueryParameters.QUERY_AUTHORIZATIONS, queryAuthorizations); queryParameters.putSingle(QueryParameters.QUERY_PAGESIZE, String.valueOf(pagesize)); queryParameters.putSingle(QueryParameters.QUERY_PAGETIMEOUT, String.valueOf(pageTimeout)); @@ -1414,11 +1427,10 @@ public void testCreateQueryAndNext_DoubleAuditValues() throws Exception { ColumnVisibilitySecurityMarking marking = new ColumnVisibilitySecurityMarking(); marking.validate(queryParameters); - QueryParameters qp = new QueryParametersImpl(); + QueryParameters qp = new DefaultQueryParameters(); qp.validate(queryParameters); - MultivaluedMap op = new MultivaluedMapImpl<>(); - op.putAll(qp.getUnknownParameters(queryParameters)); + MultivaluedMap op = MapUtils.toMultivaluedMap(qp.getUnknownParameters(MapUtils.toMultiValueMap(queryParameters))); // op.putSingle(PrivateAuditConstants.AUDIT_TYPE, AuditType.NONE.name()); op.putSingle(PrivateAuditConstants.LOGIC_CLASS, queryLogicName); op.putSingle(PrivateAuditConstants.COLUMN_VISIBILITY, queryVisibility); @@ -1447,7 +1459,7 @@ public void testCreateQueryAndNext_DoubleAuditValues() throws Exception { expect(this.queryLogic1.getConnPoolName()).andReturn("connPool1"); expect(this.connectionFactory.getTrackingMap(isA(StackTraceElement[].class))).andReturn(null); this.query.populateTrackingMap(null); - expect(this.connectionFactory.getClient("connPool1", Priority.NORMAL, null)).andReturn(this.client); + expect(this.connectionFactory.getClient(userDN.toLowerCase(), new ArrayList<>(0), "connPool1", Priority.NORMAL, null)).andReturn(this.client); expect(this.traceInfos.get(userSid)).andReturn(new ArrayList<>(0)); expect(this.traceInfos.get(null)).andReturn(Arrays.asList(PatternWrapper.wrap("NONMATCHING_REGEX"))); expect(this.qlCache.add(queryId.toString(), userSid, this.queryLogic1, this.client)).andReturn(true); @@ -1493,7 +1505,7 @@ public void testCreateQueryAndNext_DoubleAuditValues() throws Exception { expect(this.cache.get(queryId.toString())).andReturn(this.runningQuery); expect(cache.lock(queryId.toString())).andReturn(true); expect(this.runningQuery.getSettings()).andReturn(this.query); - this.connectionRequestBean.requestBegin(queryId.toString()); + this.connectionRequestBean.requestBegin(queryId.toString(), userDN.toLowerCase(), null); expect(this.runningQuery.getClient()).andReturn(this.client); this.connectionRequestBean.requestEnd(queryId.toString()); @@ -1538,12 +1550,12 @@ public void testCreateQueryAndNext_DoubleAuditValues() throws Exception { setInternalState(subject, ClosedQueryCache.class, closedCache); setInternalState(subject, Persister.class, persister); setInternalState(subject, QueryLogicFactoryImpl.class, queryLogicFactory); - setInternalState(subject, QueryExpirationConfiguration.class, queryExpirationConf); + setInternalState(subject, QueryExpirationProperties.class, queryExpirationConf); setInternalState(subject, AuditBean.class, auditor); setInternalState(subject, QueryMetricsBean.class, metrics); setInternalState(subject, Multimap.class, traceInfos); setInternalState(subject, SecurityMarking.class, new ColumnVisibilitySecurityMarking()); - setInternalState(subject, QueryParameters.class, new QueryParametersImpl()); + setInternalState(subject, QueryParameters.class, new DefaultQueryParameters()); setInternalState(subject, QueryMetricFactory.class, new QueryMetricFactoryImpl()); setInternalState(connectionRequestBean, EJBContext.class, context); setInternalState(subject, AccumuloConnectionRequestBean.class, connectionRequestBean); @@ -1578,29 +1590,28 @@ public void testCreateQueryAndNext_AddToCacheException() throws Exception { List dnList = Collections.singletonList(userDN); UUID queryId = UUID.randomUUID(); - MultivaluedMap queryParameters = new MultivaluedMapImpl<>(); - queryParameters.putSingle(QueryParameters.QUERY_STRING, query); - queryParameters.putSingle(QueryParameters.QUERY_NAME, queryName); - queryParameters.putSingle(QueryParameters.QUERY_LOGIC_NAME, queryLogicName); - queryParameters.putSingle(QueryParameters.QUERY_BEGIN, QueryParametersImpl.formatDate(beginDate)); - queryParameters.putSingle(QueryParameters.QUERY_END, QueryParametersImpl.formatDate(endDate)); - queryParameters.putSingle(QueryParameters.QUERY_EXPIRATION, QueryParametersImpl.formatDate(expirationDate)); - queryParameters.putSingle(QueryParameters.QUERY_AUTHORIZATIONS, queryAuthorizations); - queryParameters.putSingle(QueryParameters.QUERY_PAGESIZE, String.valueOf(pagesize)); - queryParameters.putSingle(QueryParameters.QUERY_PAGETIMEOUT, String.valueOf(pageTimeout)); - queryParameters.putSingle(QueryParameters.QUERY_PERSISTENCE, persistenceMode.name()); - queryParameters.putSingle(QueryParameters.QUERY_TRACE, String.valueOf(trace)); - queryParameters.putSingle("valid", "param"); - queryParameters.putSingle(ColumnVisibilitySecurityMarking.VISIBILITY_MARKING, queryVisibility); + MultiValueMap queryParameters = new LinkedMultiValueMap<>(); + queryParameters.set(QueryParameters.QUERY_STRING, query); + queryParameters.set(QueryParameters.QUERY_NAME, queryName); + queryParameters.set(QueryParameters.QUERY_LOGIC_NAME, queryLogicName); + queryParameters.set(QueryParameters.QUERY_BEGIN, DefaultQueryParameters.formatDate(beginDate)); + queryParameters.set(QueryParameters.QUERY_END, DefaultQueryParameters.formatDate(endDate)); + queryParameters.set(QueryParameters.QUERY_EXPIRATION, DefaultQueryParameters.formatDate(expirationDate)); + queryParameters.set(QueryParameters.QUERY_AUTHORIZATIONS, queryAuthorizations); + queryParameters.set(QueryParameters.QUERY_PAGESIZE, String.valueOf(pagesize)); + queryParameters.set(QueryParameters.QUERY_PAGETIMEOUT, String.valueOf(pageTimeout)); + queryParameters.set(QueryParameters.QUERY_PERSISTENCE, persistenceMode.name()); + queryParameters.set(QueryParameters.QUERY_TRACE, String.valueOf(trace)); + queryParameters.set("valid", "param"); + queryParameters.set(ColumnVisibilitySecurityMarking.VISIBILITY_MARKING, queryVisibility); ColumnVisibilitySecurityMarking marking = new ColumnVisibilitySecurityMarking(); marking.validate(queryParameters); - QueryParameters qp = new QueryParametersImpl(); + QueryParameters qp = new DefaultQueryParameters(); qp.validate(queryParameters); - MultivaluedMap op = new MultivaluedMapImpl<>(); - op.putAll(qp.getUnknownParameters(queryParameters)); + MultivaluedMap op = MapUtils.toMultivaluedMap(qp.getUnknownParameters(queryParameters)); // op.putSingle(PrivateAuditConstants.AUDIT_TYPE, AuditType.ACTIVE.name()); op.putSingle(PrivateAuditConstants.LOGIC_CLASS, queryLogicName); op.putSingle(PrivateAuditConstants.COLUMN_VISIBILITY, queryVisibility); @@ -1623,13 +1634,13 @@ public void testCreateQueryAndNext_AddToCacheException() throws Exception { expect(persister.create(eq(userDNpair.subjectDN()), eq(dnList), eq(marking), eq(queryLogicName), eq(qp), eq(op))).andReturn(this.query); expect(this.queryLogic1.getAuditType(this.query)).andReturn(AuditType.ACTIVE); expect(this.queryLogic1.getSelectors(this.query)).andReturn(null); - expect(auditor.audit(eq(queryParameters))).andReturn(null); + expect(auditor.audit(anyObject())).andReturn(null); expect(this.queryLogic1.getConnectionPriority()).andReturn(Priority.NORMAL); expect(this.queryLogic1.getConnPoolName()).andReturn("connPool1"); expect(this.connectionFactory.getTrackingMap(isA(StackTraceElement[].class))).andReturn(null); this.query.populateTrackingMap(null); - this.connectionRequestBean.requestBegin(queryId.toString()); - expect(this.connectionFactory.getClient("connPool1", Priority.NORMAL, null)).andReturn(this.client); + this.connectionRequestBean.requestBegin(queryId.toString(), userDN.toLowerCase(), null); + expect(this.connectionFactory.getClient(userDN.toLowerCase(), new ArrayList<>(0), "connPool1", Priority.NORMAL, null)).andReturn(this.client); this.connectionRequestBean.requestEnd(queryId.toString()); expect(this.traceInfos.get(userSid)).andReturn(Arrays.asList(PatternWrapper.wrap(query))); expect(this.qlCache.add(queryId.toString(), userSid, this.queryLogic1, this.client)) @@ -1649,13 +1660,13 @@ public void testCreateQueryAndNext_AddToCacheException() throws Exception { PowerMock.replayAll(); QueryExecutorBean subject = new QueryExecutorBean(); setInternalState(subject, EJBContext.class, context); - setInternalState(subject, QueryParameters.class, new QueryParametersImpl()); + setInternalState(subject, QueryParameters.class, new DefaultQueryParameters()); setInternalState(subject, AccumuloConnectionFactory.class, connectionFactory); setInternalState(subject, ResponseObjectFactory.class, responseObjectFactory); setInternalState(subject, CreatedQueryLogicCacheBean.class, qlCache); setInternalState(subject, Persister.class, persister); setInternalState(subject, QueryLogicFactory.class, queryLogicFactory); - setInternalState(subject, QueryExpirationConfiguration.class, queryExpirationConf); + setInternalState(subject, QueryExpirationProperties.class, queryExpirationConf); setInternalState(subject, AuditBean.class, auditor); setInternalState(subject, Multimap.class, traceInfos); setInternalState(subject, SecurityMarking.class, marking); @@ -1664,7 +1675,7 @@ public void testCreateQueryAndNext_AddToCacheException() throws Exception { setInternalState(subject, AccumuloConnectionRequestBean.class, connectionRequestBean); Throwable result1 = null; try { - subject.createQueryAndNext(queryLogicName, queryParameters); + subject.createQueryAndNext(queryLogicName, MapUtils.toMultivaluedMap(queryParameters)); } catch (DatawaveWebApplicationException e) { result1 = e.getCause(); } @@ -1694,7 +1705,7 @@ public void testCreateQueryAndNext_ButNoResults() throws Exception { boolean trace = false; String userName = "userName"; String userSid = "userSid"; - String userDN = "userdn"; + String userDN = "userDN"; SubjectIssuerDNPair userDNpair = SubjectIssuerDNPair.of(userDN); List dnList = Collections.singletonList(userDN); UUID queryId = UUID.randomUUID(); @@ -1707,9 +1718,9 @@ public void testCreateQueryAndNext_ButNoResults() throws Exception { queryParameters.putSingle(QueryParameters.QUERY_STRING, query); queryParameters.putSingle(QueryParameters.QUERY_NAME, queryName); queryParameters.putSingle(QueryParameters.QUERY_LOGIC_NAME, queryLogicName); - queryParameters.putSingle(QueryParameters.QUERY_BEGIN, QueryParametersImpl.formatDate(beginDate)); - queryParameters.putSingle(QueryParameters.QUERY_END, QueryParametersImpl.formatDate(endDate)); - queryParameters.putSingle(QueryParameters.QUERY_EXPIRATION, QueryParametersImpl.formatDate(expirationDate)); + queryParameters.putSingle(QueryParameters.QUERY_BEGIN, DefaultQueryParameters.formatDate(beginDate)); + queryParameters.putSingle(QueryParameters.QUERY_END, DefaultQueryParameters.formatDate(endDate)); + queryParameters.putSingle(QueryParameters.QUERY_EXPIRATION, DefaultQueryParameters.formatDate(expirationDate)); queryParameters.putSingle(QueryParameters.QUERY_AUTHORIZATIONS, queryAuthorizations); queryParameters.putSingle(QueryParameters.QUERY_PAGESIZE, String.valueOf(pagesize)); queryParameters.putSingle(QueryParameters.QUERY_PAGETIMEOUT, String.valueOf(pageTimeout)); @@ -1721,11 +1732,10 @@ public void testCreateQueryAndNext_ButNoResults() throws Exception { ColumnVisibilitySecurityMarking marking = new ColumnVisibilitySecurityMarking(); marking.validate(queryParameters); - QueryParameters qp = new QueryParametersImpl(); + QueryParameters qp = new DefaultQueryParameters(); qp.validate(queryParameters); - MultivaluedMap op = new MultivaluedMapImpl<>(); - op.putAll(qp.getUnknownParameters(queryParameters)); + MultivaluedMap op = MapUtils.toMultivaluedMap(qp.getUnknownParameters(MapUtils.toMultiValueMap(queryParameters))); op.putSingle(PrivateAuditConstants.LOGIC_CLASS, queryLogicName); op.putSingle(PrivateAuditConstants.COLUMN_VISIBILITY, queryVisibility); op.putSingle(PrivateAuditConstants.USER_DN, userDNpair.subjectDN()); @@ -1795,7 +1805,7 @@ public void testCreateQueryAndNext_ButNoResults() throws Exception { expect(this.cache.get(queryId.toString())).andReturn(this.runningQuery); expect(cache.lock(queryId.toString())).andReturn(true); expect(this.runningQuery.getSettings()).andReturn(this.query).anyTimes(); - this.connectionRequestBean.requestBegin(queryId.toString()); + this.connectionRequestBean.requestBegin(queryId.toString(), userDN.toLowerCase(), null); expect(this.runningQuery.getClient()).andReturn(this.client); this.connectionRequestBean.requestEnd(queryId.toString()); this.runningQuery.setActiveCall(true); @@ -1831,13 +1841,15 @@ public void testCreateQueryAndNext_ButNoResults() throws Exception { // Set expectations expect(this.context.getCallerPrincipal()).andReturn(this.principal).anyTimes(); - expect(this.principal.getName()).andReturn(userName); - expect(this.principal.getShortName()).andReturn(userSid).times(2); - expect(this.principal.getAuthorizations()).andReturn((Collection) Arrays.asList(Arrays.asList(queryAuthorizations))); - expect(this.connectionRequestBean.cancelConnectionRequest(queryId.toString(), this.principal)).andReturn(false); + expect(this.principal.getName()).andReturn(userName).anyTimes(); + expect(this.principal.getShortName()).andReturn(userSid).anyTimes(); + expect(this.principal.getUserDN()).andReturn(SubjectIssuerDNPair.of(userDN)); + expect(this.principal.getDNs()).andReturn(new String[] {userDN}); + expect(this.principal.getAuthorizations()).andReturn((Collection) Arrays.asList(Arrays.asList(queryAuthorizations))).anyTimes(); + expect(this.connectionRequestBean.cancelConnectionRequest(queryId.toString(), userDN.toLowerCase())).andReturn(false); expect(this.qlCache.pollIfOwnedBy(queryId.toString(), userSid)).andReturn(null); expect(this.cache.get(queryId.toString())).andReturn(this.runningQuery); - expect(this.connectionFactory.getClient("connPool1", Priority.NORMAL, null)).andReturn(this.client); + expect(this.connectionFactory.getClient(userDN.toLowerCase(), new ArrayList<>(0), "connPool1", Priority.NORMAL, null)).andReturn(this.client); this.runningQuery.closeConnection(this.connectionFactory); this.cache.remove(queryId.toString()); this.closedCache.add(queryId.toString()); @@ -1861,12 +1873,12 @@ public void testCreateQueryAndNext_ButNoResults() throws Exception { setInternalState(subject, ClosedQueryCache.class, closedCache); setInternalState(subject, Persister.class, persister); setInternalState(subject, QueryLogicFactory.class, queryLogicFactory); - setInternalState(subject, QueryExpirationConfiguration.class, queryExpirationConf); + setInternalState(subject, QueryExpirationProperties.class, queryExpirationConf); setInternalState(subject, AuditBean.class, auditor); setInternalState(subject, QueryMetricsBean.class, metrics); setInternalState(subject, Multimap.class, traceInfos); setInternalState(subject, SecurityMarking.class, new ColumnVisibilitySecurityMarking()); - setInternalState(subject, QueryParameters.class, new QueryParametersImpl()); + setInternalState(subject, QueryParameters.class, new DefaultQueryParameters()); setInternalState(subject, QueryMetricFactory.class, new QueryMetricFactoryImpl()); setInternalState(connectionRequestBean, EJBContext.class, context); setInternalState(subject, AccumuloConnectionRequestBean.class, connectionRequestBean); @@ -1897,14 +1909,14 @@ public void testCreateQueryAndNext_InvalidExpirationDate() throws Exception { String parameters = null; boolean trace = false; - MultivaluedMap p = new MultivaluedMapImpl<>(); - p.putAll(QueryParametersImpl.paramsToMap(queryLogicName, query, queryName, queryVisibility, beginDate, endDate, queryAuthorizations, expirationDate, - pagesize, pageTimeout, maxResultsOverride, persistenceMode, systemFrom, parameters, trace)); + MultivaluedMap p = MapUtils.toMultivaluedMap( + DefaultQueryParameters.paramsToMap(queryLogicName, query, queryName, queryVisibility, beginDate, endDate, queryAuthorizations, + expirationDate, pagesize, pageTimeout, maxResultsOverride, persistenceMode, systemFrom, parameters, trace)); // Run the test PowerMock.replayAll(); QueryExecutorBean subject = new QueryExecutorBean(); - setInternalState(subject, QueryParameters.class, new QueryParametersImpl()); + setInternalState(subject, QueryParameters.class, new DefaultQueryParameters()); setInternalState(subject, QueryMetricFactory.class, new QueryMetricFactoryImpl()); Throwable result1 = null; try { @@ -1941,14 +1953,14 @@ public void testCreateQueryAndNext_InvalidPageSize() throws Exception { String parameters = null; boolean trace = false; - MultivaluedMap p = new MultivaluedMapImpl<>(); - p.putAll(QueryParametersImpl.paramsToMap(queryLogicName, query, queryName, queryVisibility, beginDate, endDate, queryAuthorizations, expirationDate, - pagesize, pageTimeout, maxResultsOverride, persistenceMode, systemFrom, parameters, trace)); + MultivaluedMap p = MapUtils.toMultivaluedMap( + DefaultQueryParameters.paramsToMap(queryLogicName, query, queryName, queryVisibility, beginDate, endDate, queryAuthorizations, + expirationDate, pagesize, pageTimeout, maxResultsOverride, persistenceMode, systemFrom, parameters, trace)); // Run the test PowerMock.replayAll(); QueryExecutorBean subject = new QueryExecutorBean(); - setInternalState(subject, QueryParameters.class, new QueryParametersImpl()); + setInternalState(subject, QueryParameters.class, new DefaultQueryParameters()); setInternalState(subject, QueryMetricFactory.class, new QueryMetricFactoryImpl()); Throwable result1 = null; try { @@ -1986,18 +1998,17 @@ public void testCreateQueryAndNext_PageSizeExceedsConfiguredMax() throws Excepti boolean trace = false; // Set expectations - MultivaluedMap queryParameters = new MultivaluedMapImpl<>(); - queryParameters.putAll(QueryParametersImpl.paramsToMap(queryLogicName, query, queryName, queryVisibility, beginDate, endDate, queryAuthorizations, - expirationDate, pagesize, pageTimeout, maxResultsOverride, persistenceMode, systemFrom, parameters, trace)); + MultivaluedMap queryParameters = MapUtils.toMultivaluedMap( + DefaultQueryParameters.paramsToMap(queryLogicName, query, queryName, queryVisibility, beginDate, endDate, queryAuthorizations, + expirationDate, pagesize, pageTimeout, maxResultsOverride, persistenceMode, systemFrom, parameters, trace)); ColumnVisibilitySecurityMarking marking = new ColumnVisibilitySecurityMarking(); marking.validate(queryParameters); - QueryParameters qp = new QueryParametersImpl(); + QueryParameters qp = new DefaultQueryParameters(); qp.validate(queryParameters); - MultivaluedMap op = new MultivaluedMapImpl<>(); - op.putAll(qp.getUnknownParameters(queryParameters)); + MultivaluedMap op = MapUtils.toMultivaluedMap(qp.getUnknownParameters(MapUtils.toMultiValueMap(queryParameters))); op.putSingle(PrivateAuditConstants.LOGIC_CLASS, queryLogicName); op.putSingle(PrivateAuditConstants.COLUMN_VISIBILITY, queryVisibility); expect(this.queryLogicFactory.getQueryLogic(queryLogicName, this.principal)).andReturn((QueryLogic) this.queryLogic1); @@ -2018,9 +2029,9 @@ public void testCreateQueryAndNext_PageSizeExceedsConfiguredMax() throws Excepti QueryExecutorBean subject = new QueryExecutorBean(); setInternalState(subject, EJBContext.class, context); setInternalState(subject, QueryLogicFactory.class, queryLogicFactory); - setInternalState(subject, QueryExpirationConfiguration.class, queryExpirationConf); + setInternalState(subject, QueryExpirationProperties.class, queryExpirationConf); setInternalState(subject, SecurityMarking.class, new ColumnVisibilitySecurityMarking()); - setInternalState(subject, QueryParameters.class, new QueryParametersImpl()); + setInternalState(subject, QueryParameters.class, new DefaultQueryParameters()); setInternalState(subject, QueryMetricFactory.class, new QueryMetricFactoryImpl()); Throwable result1 = null; try { @@ -2065,14 +2076,14 @@ public void testCreateQueryAndNext_UndefinedQueryLogic() throws Exception { QueryExecutorBean subject = new QueryExecutorBean(); setInternalState(subject, QueryLogicFactory.class, queryLogicFactory); setInternalState(subject, EJBContext.class, context); - setInternalState(subject, QueryExpirationConfiguration.class, queryExpirationConf); - setInternalState(subject, QueryParameters.class, new QueryParametersImpl()); + setInternalState(subject, QueryExpirationProperties.class, queryExpirationConf); + setInternalState(subject, QueryParameters.class, new DefaultQueryParameters()); setInternalState(subject, QueryMetricFactory.class, new QueryMetricFactoryImpl()); Throwable result1 = null; try { - MultivaluedMap queryParameters = new MultivaluedMapImpl<>(); - queryParameters.putAll(QueryParametersImpl.paramsToMap(queryLogicName, query, queryName, queryVisibility, beginDate, endDate, queryAuthorizations, - expirationDate, pagesize, pageTimeout, maxResultsOverride, persistenceMode, systemFrom, parameters, trace)); + MultivaluedMap queryParameters = MapUtils.toMultivaluedMap( + DefaultQueryParameters.paramsToMap(queryLogicName, query, queryName, queryVisibility, beginDate, endDate, queryAuthorizations, + expirationDate, pagesize, pageTimeout, maxResultsOverride, persistenceMode, systemFrom, parameters, trace)); subject.createQueryAndNext(queryLogicName, queryParameters); @@ -2110,13 +2121,13 @@ public void testDefineQuery_InvalidExpirationDate() throws Exception { // Run the test PowerMock.replayAll(); QueryExecutorBean subject = new QueryExecutorBean(); - setInternalState(subject, QueryParameters.class, new QueryParametersImpl()); + setInternalState(subject, QueryParameters.class, new DefaultQueryParameters()); setInternalState(subject, QueryMetricFactory.class, new QueryMetricFactoryImpl()); Throwable result1 = null; try { - MultivaluedMap queryParameters = new MultivaluedMapImpl<>(); - queryParameters.putAll(QueryParametersImpl.paramsToMap(queryLogicName, query, queryName, queryVisibility, beginDate, endDate, queryAuthorizations, - expirationDate, pagesize, pageTimeout, maxResultsOverride, persistenceMode, systemFrom, parameters, trace)); + MultivaluedMap queryParameters = MapUtils.toMultivaluedMap( + DefaultQueryParameters.paramsToMap(queryLogicName, query, queryName, queryVisibility, beginDate, endDate, queryAuthorizations, + expirationDate, pagesize, pageTimeout, maxResultsOverride, persistenceMode, systemFrom, parameters, trace)); subject.defineQuery(queryLogicName, queryParameters); @@ -2154,13 +2165,13 @@ public void testDefineQuery_InvalidPageSize() throws Exception { // Run the test PowerMock.replayAll(); QueryExecutorBean subject = new QueryExecutorBean(); - setInternalState(subject, QueryParameters.class, new QueryParametersImpl()); + setInternalState(subject, QueryParameters.class, new DefaultQueryParameters()); setInternalState(subject, QueryMetricFactory.class, new QueryMetricFactoryImpl()); Throwable result1 = null; try { - MultivaluedMap queryParameters = new MultivaluedMapImpl<>(); - queryParameters.putAll(QueryParametersImpl.paramsToMap(queryLogicName, query, queryName, queryVisibility, beginDate, endDate, queryAuthorizations, - expirationDate, pagesize, pageTimeout, maxResultsOverride, persistenceMode, systemFrom, parameters, trace)); + MultivaluedMap queryParameters = MapUtils.toMultivaluedMap( + DefaultQueryParameters.paramsToMap(queryLogicName, query, queryName, queryVisibility, beginDate, endDate, queryAuthorizations, + expirationDate, pagesize, pageTimeout, maxResultsOverride, persistenceMode, systemFrom, parameters, trace)); subject.defineQuery(queryLogicName, queryParameters); } catch (DatawaveWebApplicationException e) { @@ -2192,25 +2203,24 @@ public void testDefineQuery_UncheckedException() throws Exception { QueryPersistence persistenceMode = QueryPersistence.PERSISTENT; String userName = "userName"; String userSid = "userSid"; - String userDN = "userdn"; + String userDN = "userDN"; SubjectIssuerDNPair userDNpair = SubjectIssuerDNPair.of(userDN); boolean trace = false; List dnList = Collections.singletonList(userDN); UUID queryId = UUID.randomUUID(); - MultivaluedMap queryParameters = new MultivaluedMapImpl<>(); - queryParameters.putAll(QueryParametersImpl.paramsToMap(null, query, queryName, queryVisibility, beginDate, endDate, queryAuthorizations, expirationDate, - pagesize, pageTimeout, maxResultsOverride, persistenceMode, null, null, trace)); + MultivaluedMap queryParameters = MapUtils + .toMultivaluedMap(DefaultQueryParameters.paramsToMap(null, query, queryName, queryVisibility, beginDate, endDate, queryAuthorizations, + expirationDate, pagesize, pageTimeout, maxResultsOverride, persistenceMode, null, null, trace)); queryParameters.putSingle("valid", "param"); ColumnVisibilitySecurityMarking marking = new ColumnVisibilitySecurityMarking(); marking.validate(queryParameters); - QueryParameters qp = new QueryParametersImpl(); + QueryParameters qp = new DefaultQueryParameters(); qp.validate(queryParameters); - MultivaluedMap op = new MultivaluedMapImpl<>(); - op.putAll(qp.getUnknownParameters(queryParameters)); + MultivaluedMap op = MapUtils.toMultivaluedMap(qp.getUnknownParameters(MapUtils.toMultiValueMap(queryParameters))); op.putSingle(PrivateAuditConstants.LOGIC_CLASS, queryLogicName); op.putSingle(PrivateAuditConstants.COLUMN_VISIBILITY, queryVisibility); op.putSingle(PrivateAuditConstants.USER_DN, userDNpair.subjectDN()); @@ -2239,10 +2249,10 @@ public void testDefineQuery_UncheckedException() throws Exception { setInternalState(subject, EJBContext.class, context); setInternalState(subject, Persister.class, persister); setInternalState(subject, QueryLogicFactory.class, queryLogicFactory); - setInternalState(subject, QueryExpirationConfiguration.class, queryExpirationConf); + setInternalState(subject, QueryExpirationProperties.class, queryExpirationConf); setInternalState(subject, Multimap.class, traceInfos); setInternalState(subject, SecurityMarking.class, new ColumnVisibilitySecurityMarking()); - setInternalState(subject, QueryParameters.class, new QueryParametersImpl()); + setInternalState(subject, QueryParameters.class, new DefaultQueryParameters()); setInternalState(subject, QueryMetricFactory.class, new QueryMetricFactoryImpl()); subject.defineQuery(queryLogicName, queryParameters); } finally { @@ -2336,9 +2346,9 @@ public void testDuplicateQuery_HappyPath() throws Exception { MultivaluedMap queryParameters = new MultivaluedMapImpl<>(); queryParameters.putSingle(QueryParameters.QUERY_STRING, query); queryParameters.putSingle(QueryParameters.QUERY_NAME, newQueryName); - queryParameters.putSingle(QueryParameters.QUERY_BEGIN, QueryParametersImpl.formatDate(beginDate)); - queryParameters.putSingle(QueryParameters.QUERY_END, QueryParametersImpl.formatDate(endDate)); - queryParameters.putSingle(QueryParameters.QUERY_EXPIRATION, QueryParametersImpl.formatDate(expirationDate)); + queryParameters.putSingle(QueryParameters.QUERY_BEGIN, DefaultQueryParameters.formatDate(beginDate)); + queryParameters.putSingle(QueryParameters.QUERY_END, DefaultQueryParameters.formatDate(endDate)); + queryParameters.putSingle(QueryParameters.QUERY_EXPIRATION, DefaultQueryParameters.formatDate(expirationDate)); queryParameters.putSingle(QueryParameters.QUERY_AUTHORIZATIONS, queryAuthorizations); queryParameters.putSingle(QueryParameters.QUERY_PARAMS, parameters); queryParameters.putSingle(QueryParameters.QUERY_PAGESIZE, String.valueOf(pagesize)); @@ -2350,11 +2360,10 @@ public void testDuplicateQuery_HappyPath() throws Exception { ColumnVisibilitySecurityMarking marking = new ColumnVisibilitySecurityMarking(); marking.validate(queryParameters); - QueryParameters qp = new QueryParametersImpl(); + QueryParameters qp = new DefaultQueryParameters(); qp.validate(queryParameters); - MultivaluedMap op = new MultivaluedMapImpl<>(); - op.putAll(qp.getUnknownParameters(queryParameters)); + MultivaluedMap op = MapUtils.toMultivaluedMap(qp.getUnknownParameters(MapUtils.toMultiValueMap(queryParameters))); op.putSingle(PrivateAuditConstants.LOGIC_CLASS, queryLogic1.getClass().getSimpleName()); op.putSingle(PrivateAuditConstants.COLUMN_VISIBILITY, queryVisibility); op.putSingle(PrivateAuditConstants.USER_DN, userDN); @@ -2392,7 +2401,7 @@ public void testDuplicateQuery_HappyPath() throws Exception { expect(this.queryLogic1.getConnPoolName()).andReturn("connPool1"); expect(this.connectionFactory.getTrackingMap(isA(StackTraceElement[].class))).andReturn(null); this.query.populateTrackingMap(null); - expect(this.connectionFactory.getClient("connPool1", Priority.NORMAL, null)).andReturn(this.client); + expect(this.connectionFactory.getClient(userDN.toLowerCase(), new ArrayList<>(0), "connPool1", Priority.NORMAL, null)).andReturn(this.client); expect(this.qlCache.add(newQuery1.getId().toString(), userSid, this.queryLogic1, this.client)).andReturn(true); expect(this.queryLogic1.getCollectQueryMetrics()).andReturn(false); expect(this.queryLogic1.initialize(eq(this.client), isA(Query.class), isA(Set.class))).andReturn(this.genericConfiguration); @@ -2413,10 +2422,10 @@ public void testDuplicateQuery_HappyPath() throws Exception { setInternalState(subject, ClosedQueryCache.class, closedCache); setInternalState(subject, Persister.class, persister); setInternalState(subject, QueryLogicFactory.class, queryLogicFactory); - setInternalState(subject, QueryExpirationConfiguration.class, queryExpirationConf); + setInternalState(subject, QueryExpirationProperties.class, queryExpirationConf); setInternalState(subject, QueryMetricsBean.class, metrics); setInternalState(subject, Multimap.class, traceInfos); - setInternalState(subject, QueryParameters.class, new QueryParametersImpl()); + setInternalState(subject, QueryParameters.class, new DefaultQueryParameters()); setInternalState(subject, QueryMetricFactory.class, new QueryMetricFactoryImpl()); GenericResponse result1 = subject.duplicateQuery(queryId.toString(), newQueryName, queryLogicName, query, queryVisibility, beginDate, endDate, queryAuthorizations, expirationDate, pagesize, pageTimeout, maxResultsOverride, persistenceMode, parameters, trace); @@ -2466,8 +2475,8 @@ public void testDuplicateQuery_FindByIDReturnsNull() throws Exception { setInternalState(subject, ClosedQueryCache.class, closedCache); setInternalState(subject, Persister.class, persister); setInternalState(subject, QueryLogicFactory.class, queryLogicFactory); - setInternalState(subject, QueryExpirationConfiguration.class, queryExpirationConf); - setInternalState(subject, QueryParameters.class, new QueryParametersImpl()); + setInternalState(subject, QueryExpirationProperties.class, queryExpirationConf); + setInternalState(subject, QueryParameters.class, new DefaultQueryParameters()); setInternalState(subject, QueryMetricFactory.class, new QueryMetricFactoryImpl()); try { @@ -2542,10 +2551,10 @@ public void testDuplicateQuery_UncheckedExceptionThrownDuringCreateQuery() throw setInternalState(subject, ClosedQueryCache.class, closedCache); setInternalState(subject, Persister.class, persister); setInternalState(subject, QueryLogicFactory.class, queryLogicFactory); - setInternalState(subject, QueryExpirationConfiguration.class, queryExpirationConf); + setInternalState(subject, QueryExpirationProperties.class, queryExpirationConf); setInternalState(subject, QueryMetricsBean.class, metrics); setInternalState(subject, Multimap.class, traceInfos); - setInternalState(subject, QueryParameters.class, new QueryParametersImpl()); + setInternalState(subject, QueryParameters.class, new DefaultQueryParameters()); setInternalState(subject, QueryMetricFactory.class, new QueryMetricFactoryImpl()); try { @@ -2783,7 +2792,7 @@ public void testInit() throws Exception { setInternalState(subject, QueryTraceCache.class, traceCache); setInternalState(subject, LookupUUIDConfiguration.class, lookupUUIDConfiguration); setInternalState(subject, QueryLogicFactory.class, queryLogicFactory); - setInternalState(subject, QueryExpirationConfiguration.class, queryExpirationConf); + setInternalState(subject, QueryExpirationProperties.class, queryExpirationConf); setInternalState(subject, QueryMetricFactory.class, new QueryMetricFactoryImpl()); try { @@ -2851,8 +2860,8 @@ public void testList_HappyPath() throws Exception { setInternalState(subject, ClosedQueryCache.class, closedCache); setInternalState(subject, Persister.class, persister); setInternalState(subject, QueryLogicFactory.class, queryLogicFactory); - setInternalState(subject, QueryExpirationConfiguration.class, queryExpirationConf); - setInternalState(subject, QueryParameters.class, new QueryParametersImpl()); + setInternalState(subject, QueryExpirationProperties.class, queryExpirationConf); + setInternalState(subject, QueryParameters.class, new DefaultQueryParameters()); setInternalState(subject, QueryMetricFactory.class, new QueryMetricFactoryImpl()); setInternalState(subject, UserOperationsBean.class, userOperations); QueryImplListResponse result1 = subject.list(queryName); @@ -2928,7 +2937,6 @@ public void testListQueriesForUser_UncheckedException() throws Exception { @Test public void testListQueryLogic() throws Exception { // Set expectations - RoleManager roleManager = new EasyRoleManager(); expect(this.queryLogicFactory.getQueryLogicList()).andReturn(Arrays.asList(this.queryLogic1, this.queryLogic2)); expect(this.queryLogic1.getLogicName()).andReturn("logic1").times(1); // Begin 1st loop expect(this.queryLogic1.getAuditType(null)).andReturn(AuditType.LOCALONLY); @@ -2936,17 +2944,16 @@ public void testListQueryLogic() throws Exception { expect(this.queryLogic1.getOptionalQueryParameters()).andReturn(new TreeSet<>()); expect(this.queryLogic1.getRequiredQueryParameters()).andReturn(new TreeSet<>()); expect(this.queryLogic1.getExampleQueries()).andReturn(new TreeSet<>()); - expect(this.queryLogic1.getRoleManager()).andReturn(roleManager).anyTimes(); - expect(this.queryLogic1.getResponseClass(EasyMock.anyObject(Query.class))).andThrow(ILLEGAL_STATE_EXCEPTION); + expect(this.queryLogic1.getRequiredRoles()).andReturn(new HashSet<>()).anyTimes(); + expect(this.queryLogic1.getResponseClass(anyObject(Query.class))).andThrow(ILLEGAL_STATE_EXCEPTION); expect(this.queryLogic2.getLogicName()).andReturn("logic2").times(1); // Begin 1st loop expect(this.queryLogic2.getAuditType(null)).andReturn(AuditType.LOCALONLY); expect(this.queryLogic2.getLogicDescription()).andReturn("description2"); expect(this.queryLogic2.getOptionalQueryParameters()).andReturn(new TreeSet<>()); expect(this.queryLogic2.getRequiredQueryParameters()).andReturn(new TreeSet<>()); expect(this.queryLogic2.getExampleQueries()).andReturn(new TreeSet<>()); - RoleManager roleManager2 = new DatawaveRoleManager(Arrays.asList("ROLE_1", "ROLE_2")); - expect(this.queryLogic2.getRoleManager()).andReturn(roleManager2).times(2); - expect(this.queryLogic2.getResponseClass(EasyMock.anyObject(Query.class))).andReturn(this.baseResponse.getClass().getCanonicalName()); + expect(this.queryLogic2.getRequiredRoles()).andReturn(new HashSet<>(Arrays.asList("ROLE_1", "ROLE_2"))).times(2); + expect(this.queryLogic2.getResponseClass(anyObject(Query.class))).andReturn(this.baseResponse.getClass().getCanonicalName()); expect(this.responseObjectFactory.getQueryImpl()).andReturn(new QueryImpl()); Map parsers = new HashMap<>(); parsers.put("PARSER1", null); @@ -2956,7 +2963,7 @@ public void testListQueryLogic() throws Exception { PowerMock.replayAll(); QueryExecutorBean subject = new QueryExecutorBean(); setInternalState(subject, QueryLogicFactory.class, queryLogicFactory); - setInternalState(subject, QueryExpirationConfiguration.class, queryExpirationConf); + setInternalState(subject, QueryExpirationProperties.class, queryExpirationConf); setInternalState(subject, QueryMetricFactory.class, new QueryMetricFactoryImpl()); setInternalState(subject, ResponseObjectFactory.class, responseObjectFactory); QueryLogicResponse result1 = subject.listQueryLogic(); @@ -3041,9 +3048,11 @@ public void testNext_QueryExceptionDueToCacheLock() throws Exception { // Set expectations expect(this.context.getCallerPrincipal()).andReturn(this.principal).anyTimes(); - expect(this.principal.getName()).andReturn(userName); - expect(this.principal.getShortName()).andReturn(userSid); - expect(this.principal.getProxyServers()).andReturn(new ArrayList<>(0)); + expect(this.principal.getName()).andReturn(userName).anyTimes(); + expect(this.principal.getShortName()).andReturn(userSid).anyTimes(); + expect(this.principal.getUserDN()).andReturn(SubjectIssuerDNPair.of(userName)); + expect(this.principal.getDNs()).andReturn(new String[] {userName}); + expect(this.principal.getProxyServers()).andReturn(new ArrayList<>(0)).anyTimes(); expect(this.context.getUserTransaction()).andReturn(this.transaction).anyTimes(); this.transaction.begin(); expect(this.cache.get(queryId.toString())).andReturn(this.runningQuery); @@ -3092,9 +3101,11 @@ public void testNext_UncheckedException() throws Exception { // Set expectations expect(this.context.getCallerPrincipal()).andReturn(this.principal).anyTimes(); - expect(this.principal.getName()).andReturn(userName); - expect(this.principal.getShortName()).andReturn(userSid); - expect(this.principal.getProxyServers()).andReturn(new ArrayList<>(0)); + expect(this.principal.getName()).andReturn(userName).anyTimes(); + expect(this.principal.getShortName()).andReturn(userSid).anyTimes(); + expect(this.principal.getUserDN()).andReturn(SubjectIssuerDNPair.of(userName)); + expect(this.principal.getDNs()).andReturn(new String[] {userName}); + expect(this.principal.getProxyServers()).andReturn(new ArrayList<>(0)).anyTimes(); expect(this.context.getUserTransaction()).andReturn(this.transaction).anyTimes(); this.transaction.begin(); expect(this.cache.get(queryId.toString())).andReturn(this.runningQuery); @@ -3138,9 +3149,11 @@ public void testNext_UserNotOwner() throws Exception { // Set expectations expect(this.context.getCallerPrincipal()).andReturn(this.principal).anyTimes(); - expect(this.principal.getName()).andReturn(userName); - expect(this.principal.getShortName()).andReturn(otherSid); - expect(this.principal.getProxyServers()).andReturn(new ArrayList<>(0)); + expect(this.principal.getName()).andReturn(userName).anyTimes(); + expect(this.principal.getShortName()).andReturn(otherSid).anyTimes(); + expect(this.principal.getUserDN()).andReturn(SubjectIssuerDNPair.of(userName)); + expect(this.principal.getDNs()).andReturn(new String[] {userName}); + expect(this.principal.getProxyServers()).andReturn(new ArrayList<>(0)).anyTimes(); expect(this.context.getUserTransaction()).andReturn(this.transaction).anyTimes(); this.transaction.begin(); expect(this.cache.get(queryId.toString())).andReturn(this.runningQuery); @@ -3193,9 +3206,11 @@ public void testNext_NullQueryReturnedFromCache() throws Exception { // Set expectations expect(this.context.getCallerPrincipal()).andReturn(this.principal).anyTimes(); - expect(this.principal.getName()).andReturn(userName); - expect(this.principal.getShortName()).andReturn(userSid); - expect(this.principal.getProxyServers()).andReturn(new ArrayList<>(0)); + expect(this.principal.getName()).andReturn(userName).anyTimes(); + expect(this.principal.getShortName()).andReturn(userSid).anyTimes(); + expect(this.principal.getUserDN()).andReturn(SubjectIssuerDNPair.of(userName)); + expect(this.principal.getDNs()).andReturn(new String[] {userName}); + expect(this.principal.getProxyServers()).andReturn(new ArrayList<>(0)).anyTimes(); expect(this.context.getUserTransaction()).andReturn(this.transaction).anyTimes(); this.transaction.begin(); expect(this.cache.get(queryId.toString())).andReturn(null); @@ -3310,16 +3325,19 @@ public void testReset_NoPreexistingRunningQuery() throws Exception { map.set(PrivateAuditConstants.COLUMN_VISIBILITY, authorization); map.set(PrivateAuditConstants.USER_DN, userDN); map.set(AuditParameters.AUDIT_ID, queryName); - MultivaluedMap auditMap = new MultivaluedMapImpl(); + MultiValueMap auditMap = new LinkedMultiValueMap(); auditMap.putAll(map); // Set expectations expect(this.context.getUserTransaction()).andReturn(this.transaction).anyTimes(); this.transaction.begin(); expect(this.transaction.getStatus()).andReturn(Status.STATUS_ACTIVE).anyTimes(); - expect(this.context.getCallerPrincipal()).andReturn(this.principal); - expect(this.principal.getName()).andReturn(userName); - expect(this.principal.getShortName()).andReturn(sid); + expect(this.context.getCallerPrincipal()).andReturn(this.principal).anyTimes(); + expect(this.principal.getName()).andReturn(userName).anyTimes(); + expect(this.principal.getShortName()).andReturn(sid).anyTimes(); + expect(this.principal.getUserDN()).andReturn(SubjectIssuerDNPair.of(userDN)); + expect(this.principal.getDNs()).andReturn(new String[] {userDN}); + expect(this.principal.getProxyServers()).andReturn(new ArrayList<>()); expect(this.principal.getAuthorizations()).andReturn((Collection) Arrays.asList(Arrays.asList(authorization))); expect(this.principal.getPrimaryUser()).andReturn(dwUser).anyTimes(); expect(this.dwUser.getAuths()).andReturn(Collections.singleton(authorization)).anyTimes(); @@ -3337,6 +3355,7 @@ public void testReset_NoPreexistingRunningQuery() throws Exception { expect(this.query.getOwner()).andReturn(sid).anyTimes(); expect(this.query.getId()).andReturn(queryId).anyTimes(); expect(this.query.getQuery()).andReturn(queryName).anyTimes(); + expect(this.query.getQueryName()).andReturn(queryName).anyTimes(); this.cache.put(eq(queryId.toString()), isA(RunningQuery.class)); expect(this.cache.lock(queryName)).andReturn(true); expect(this.queryLogic1.getAuditType(this.query)).andReturn(AuditType.PASSIVE); @@ -3351,7 +3370,6 @@ public void testReset_NoPreexistingRunningQuery() throws Exception { expect(this.query.getColumnVisibility()).andReturn(authorization); expect(this.query.getBeginDate()).andReturn(null); expect(this.query.getEndDate()).andReturn(null); - expect(this.query.getQueryName()).andReturn(queryName); expect(this.query.getParameters()).andReturn((Set) Collections.emptySet()); expect(this.query.findParameter(RemoteUserOperationsImpl.INCLUDE_REMOTE_SERVICES)) .andReturn(new QueryImpl.Parameter(RemoteUserOperationsImpl.INCLUDE_REMOTE_SERVICES, "true")).anyTimes(); @@ -3382,8 +3400,9 @@ public void testReset_NoPreexistingRunningQuery() throws Exception { this.query.populateTrackingMap(new HashMap<>()); expect(this.queryLogic1.getConnPoolName()).andReturn("connPool1"); expect(this.queryLogic1.getLogicName()).andReturn(queryLogicName); - connectionRequestBean.requestBegin(queryName); - expect(this.connectionFactory.getClient(eq("connPool1"), eq(Priority.NORMAL), isA(Map.class))).andReturn(this.client); + connectionRequestBean.requestBegin(queryName, userDN.toLowerCase(), new HashMap<>()); + expect(this.connectionFactory.getClient(eq(userDN.toLowerCase()), eq(new ArrayList<>()), eq("connPool1"), eq(Priority.NORMAL), eq(new HashMap<>()))) + .andReturn(this.client); connectionRequestBean.requestEnd(queryName); expect(this.queryLogic1.initialize(eq(this.client), eq(this.query), isA(Set.class))).andReturn(this.genericConfiguration); this.queryLogic1.setupQuery(this.genericConfiguration); @@ -3405,8 +3424,8 @@ public void testReset_NoPreexistingRunningQuery() throws Exception { setInternalState(subject, ClosedQueryCache.class, closedCache); setInternalState(subject, Persister.class, persister); setInternalState(subject, QueryLogicFactory.class, queryLogicFactory); - setInternalState(subject, QueryExpirationConfiguration.class, queryExpirationConf); - setInternalState(subject, QueryParameters.class, new QueryParametersImpl()); + setInternalState(subject, QueryExpirationProperties.class, queryExpirationConf); + setInternalState(subject, QueryParameters.class, new DefaultQueryParameters()); setInternalState(subject, AuditBean.class, auditor); setInternalState(subject, QueryMetricsBean.class, metrics); setInternalState(subject, QueryMetricFactory.class, new QueryMetricFactoryImpl()); @@ -3458,7 +3477,7 @@ public void testReset_PreexistingRunningQueryWithCloseConnectionException() thro setInternalState(subject, ClosedQueryCache.class, closedCache); setInternalState(subject, Persister.class, persister); setInternalState(subject, QueryLogicFactory.class, queryLogicFactory); - setInternalState(subject, QueryExpirationConfiguration.class, queryExpirationConf); + setInternalState(subject, QueryExpirationProperties.class, queryExpirationConf); setInternalState(subject, AuditBean.class, auditor); setInternalState(subject, QueryMetricsBean.class, metrics); setInternalState(subject, QueryMetricFactory.class, new QueryMetricFactoryImpl()); @@ -3512,7 +3531,7 @@ public void testReset_PreexistingRunningQueryWithLockException() throws Exceptio setInternalState(subject, ClosedQueryCache.class, closedCache); setInternalState(subject, Persister.class, persister); setInternalState(subject, QueryLogicFactory.class, queryLogicFactory); - setInternalState(subject, QueryExpirationConfiguration.class, queryExpirationConf); + setInternalState(subject, QueryExpirationProperties.class, queryExpirationConf); setInternalState(subject, AuditBean.class, auditor); setInternalState(subject, QueryMetricsBean.class, metrics); setInternalState(subject, QueryMetricFactory.class, new QueryMetricFactoryImpl()); @@ -3555,9 +3574,9 @@ public void testUpdateQuery_PersistentMode() throws Exception { MultiValueMap p = new LinkedMultiValueMap<>(); p.set(QueryParameters.QUERY_AUTHORIZATIONS, queryAuthorizations); - p.set(QueryParameters.QUERY_BEGIN, QueryParametersImpl.formatDate(beginDate)); - p.set(QueryParameters.QUERY_END, QueryParametersImpl.formatDate(endDate)); - p.set(QueryParameters.QUERY_EXPIRATION, QueryParametersImpl.formatDate(expirationDate)); + p.set(QueryParameters.QUERY_BEGIN, DefaultQueryParameters.formatDate(beginDate)); + p.set(QueryParameters.QUERY_END, DefaultQueryParameters.formatDate(endDate)); + p.set(QueryParameters.QUERY_EXPIRATION, DefaultQueryParameters.formatDate(expirationDate)); p.set(QueryParameters.QUERY_NAME, queryName); p.set(QueryParameters.QUERY_PAGESIZE, Integer.toString(pagesize)); p.set(QueryParameters.QUERY_PAGETIMEOUT, Integer.toString(pageTimeout)); @@ -3570,9 +3589,9 @@ public void testUpdateQuery_PersistentMode() throws Exception { p.set(PrivateAuditConstants.LOGIC_CLASS, queryLogicName); p.set(PrivateAuditConstants.COLUMN_VISIBILITY, queryVisibility); p.set(PrivateAuditConstants.USER_DN, userDN); - MultivaluedMap auditMap = new MultivaluedMapImpl(); + MultiValueMap auditMap = new LinkedMultiValueMap(); auditMap.putAll(p); - auditMap.putSingle(AuditParameters.AUDIT_ID, queryId.toString()); + auditMap.set(AuditParameters.AUDIT_ID, queryId.toString()); // Set expectations expect(this.context.getCallerPrincipal()).andReturn(this.principal).times(4); @@ -3621,7 +3640,7 @@ public void testUpdateQuery_PersistentMode() throws Exception { setInternalState(subject, ClosedQueryCache.class, closedCache); setInternalState(subject, Persister.class, persister); setInternalState(subject, QueryLogicFactory.class, queryLogicFactory); - setInternalState(subject, QueryExpirationConfiguration.class, queryExpirationConf); + setInternalState(subject, QueryExpirationProperties.class, queryExpirationConf); setInternalState(subject, AuditBean.class, auditor); setInternalState(subject, QueryMetricFactory.class, new QueryMetricFactoryImpl()); GenericResponse result1 = subject.updateQuery(queryId.toString(), queryLogicName, query, queryVisibility, beginDate, endDate, @@ -3670,11 +3689,10 @@ public void testExecute_HappyPath() throws Exception { qp.setPageTimeout(pageTimeout); qp.setColumnVisibility(queryAuthorizations); - MultivaluedMap params = new MultivaluedMapImpl<>(); - params.putAll(qp.toMap()); - params.putSingle(QueryParameters.QUERY_TRACE, Boolean.toString(trace)); - params.putSingle(QueryParameters.QUERY_PERSISTENCE, persistenceMode.name()); - params.putSingle(QueryParameters.QUERY_PARAMS, parameters); + MultiValueMap params = new LinkedMultiValueMap<>(qp.toMap()); + params.set(QueryParameters.QUERY_TRACE, Boolean.toString(trace)); + params.set(QueryParameters.QUERY_PERSISTENCE, persistenceMode.name()); + params.set(QueryParameters.QUERY_PARAMS, parameters); QueryExecutorBean subject = PowerMock.createPartialMock(QueryExecutorBean.class, "createQuery"); @@ -3685,7 +3703,7 @@ public void testExecute_HappyPath() throws Exception { expect(this.queryLogicFactory.getQueryLogic(queryLogicName, principal)).andReturn((QueryLogic) this.queryLogic1); expect(this.queryLogic1.getEnrichedTransformer(isA(Query.class))).andReturn(this.transformer); expect(this.transformer.createResponse(isA(ResultsPage.class))).andReturn(this.baseResponse); - expect(subject.createQuery(queryLogicName, params, httpHeaders)).andReturn(createResponse); + expect(subject.createQuery(queryLogicName, MapUtils.toMultivaluedMap(params), httpHeaders)).andReturn(createResponse); expect(this.cache.get(eq(queryId.toString()))).andReturn(this.runningQuery); expect(this.runningQuery.getMetric()).andReturn(this.queryMetric); expect(this.responseObjectFactory.getQueryImpl()).andReturn(new QueryImpl()); @@ -3702,12 +3720,12 @@ public void testExecute_HappyPath() throws Exception { setInternalState(subject, ClosedQueryCache.class, closedCache); setInternalState(subject, Persister.class, persister); setInternalState(subject, QueryLogicFactory.class, queryLogicFactory); - setInternalState(subject, QueryExpirationConfiguration.class, queryExpirationConf); + setInternalState(subject, QueryExpirationProperties.class, queryExpirationConf); setInternalState(subject, AuditBean.class, auditor); setInternalState(subject, QueryMetricsBean.class, metrics); setInternalState(subject, Multimap.class, traceInfos); setInternalState(subject, QueryMetricFactory.class, new QueryMetricFactoryImpl()); - StreamingOutput result1 = subject.execute(queryLogicName, params, httpHeaders); + StreamingOutput result1 = subject.execute(queryLogicName, MapUtils.toMultivaluedMap(params), httpHeaders); PowerMock.verifyAll(); // Verify results @@ -3759,16 +3777,16 @@ public void testExecute_InvalidMediaType() throws Exception { setInternalState(subject, ClosedQueryCache.class, closedCache); setInternalState(subject, Persister.class, persister); setInternalState(subject, QueryLogicFactory.class, queryLogicFactory); - setInternalState(subject, QueryExpirationConfiguration.class, queryExpirationConf); + setInternalState(subject, QueryExpirationProperties.class, queryExpirationConf); setInternalState(subject, AuditBean.class, auditor); setInternalState(subject, QueryMetricsBean.class, metrics); setInternalState(subject, Multimap.class, traceInfos); setInternalState(subject, QueryMetricFactory.class, new QueryMetricFactoryImpl()); StreamingOutput result1 = null; try { - MultivaluedMap queryParameters = new MultivaluedMapImpl<>(); - queryParameters.putAll(QueryParametersImpl.paramsToMap(queryLogicName, query, queryName, queryVisibility, beginDate, endDate, queryAuthorizations, - expirationDate, pagesize, pageTimeout, maxResultsOverride, persistenceMode, systemFrom, parameters, trace)); + MultivaluedMap queryParameters = MapUtils.toMultivaluedMap( + DefaultQueryParameters.paramsToMap(queryLogicName, query, queryName, queryVisibility, beginDate, endDate, queryAuthorizations, + expirationDate, pagesize, pageTimeout, maxResultsOverride, persistenceMode, systemFrom, parameters, trace)); result1 = subject.execute(queryLogicName, queryParameters, httpHeaders); @@ -3792,7 +3810,7 @@ public void testLookupUUID_happyPath() { expect(uriInfo.getQueryParameters()).andReturn(new MultivaluedHashMap<>()); expect(lookupUUIDUtil.getUUIDType("uuidType")).andReturn(uuidType); - expect(uuidType.getDefinedView(null)).andReturn("abc"); + expect(uuidType.getQueryLogic(null)).andReturn("abc"); expect(lookupUUIDUtil.createUUIDQueryAndNext(isA(GetUUIDCriteria.class))).andReturn(response); expect(response.getQueryId()).andReturn("11111"); expect(context.getCallerPrincipal()).andReturn(principal); @@ -3806,7 +3824,7 @@ public void testLookupUUID_happyPath() { setInternalState(subject, ClosedQueryCache.class, closedCache); setInternalState(subject, Persister.class, persister); setInternalState(subject, QueryLogicFactory.class, queryLogicFactory); - setInternalState(subject, QueryExpirationConfiguration.class, queryExpirationConf); + setInternalState(subject, QueryExpirationProperties.class, queryExpirationConf); setInternalState(subject, AuditBean.class, auditor); setInternalState(subject, QueryMetricFactory.class, new QueryMetricFactoryImpl()); setInternalState(subject, LookupUUIDUtil.class, lookupUUIDUtil); @@ -3826,7 +3844,7 @@ public void testLookupUUID_closeFail() { expect(uriInfo.getQueryParameters()).andReturn(new MultivaluedHashMap<>()); expect(lookupUUIDUtil.getUUIDType("uuidType")).andReturn(uuidType); - expect(uuidType.getDefinedView(null)).andReturn("abc"); + expect(uuidType.getQueryLogic(null)).andReturn("abc"); expect(lookupUUIDUtil.createUUIDQueryAndNext(isA(GetUUIDCriteria.class))).andReturn(response); expect(response.getQueryId()).andReturn("11111"); expect(context.getCallerPrincipal()).andReturn(principal); @@ -3840,7 +3858,7 @@ public void testLookupUUID_closeFail() { setInternalState(subject, ClosedQueryCache.class, closedCache); setInternalState(subject, Persister.class, persister); setInternalState(subject, QueryLogicFactory.class, queryLogicFactory); - setInternalState(subject, QueryExpirationConfiguration.class, queryExpirationConf); + setInternalState(subject, QueryExpirationProperties.class, queryExpirationConf); setInternalState(subject, AuditBean.class, auditor); setInternalState(subject, QueryMetricFactory.class, new QueryMetricFactoryImpl()); setInternalState(subject, LookupUUIDUtil.class, lookupUUIDUtil); @@ -3870,7 +3888,7 @@ public void testPlanQuery() throws Exception { boolean trace = false; String userName = "userName"; String userSid = "userSid"; - String userDN = "userdn"; + String userDN = "userDN"; SubjectIssuerDNPair userDNpair = SubjectIssuerDNPair.of(userDN); List dnList = Collections.singletonList(userDN); UUID queryId = UUID.randomUUID(); @@ -3882,9 +3900,9 @@ public void testPlanQuery() throws Exception { queryParameters.putSingle(QueryParameters.QUERY_LOGIC_NAME, queryLogicName); queryParameters.putSingle(QueryParameters.QUERY_STRING, query); queryParameters.putSingle(QueryParameters.QUERY_NAME, queryName); - queryParameters.putSingle(QueryParameters.QUERY_BEGIN, QueryParametersImpl.formatDate(beginDate)); - queryParameters.putSingle(QueryParameters.QUERY_END, QueryParametersImpl.formatDate(endDate)); - queryParameters.putSingle(QueryParameters.QUERY_EXPIRATION, QueryParametersImpl.formatDate(expirationDate)); + queryParameters.putSingle(QueryParameters.QUERY_BEGIN, DefaultQueryParameters.formatDate(beginDate)); + queryParameters.putSingle(QueryParameters.QUERY_END, DefaultQueryParameters.formatDate(endDate)); + queryParameters.putSingle(QueryParameters.QUERY_EXPIRATION, DefaultQueryParameters.formatDate(expirationDate)); queryParameters.putSingle(QueryParameters.QUERY_AUTHORIZATIONS, queryAuthorizations); queryParameters.putSingle(QueryParameters.QUERY_PAGESIZE, String.valueOf(pagesize)); queryParameters.putSingle(QueryParameters.QUERY_PAGETIMEOUT, String.valueOf(pageTimeout)); @@ -3896,11 +3914,10 @@ public void testPlanQuery() throws Exception { ColumnVisibilitySecurityMarking marking = new ColumnVisibilitySecurityMarking(); marking.validate(queryParameters); - QueryParameters qp = new QueryParametersImpl(); + QueryParameters qp = new DefaultQueryParameters(); qp.validate(queryParameters); - MultivaluedMap op = new MultivaluedMapImpl<>(); - op.putAll(qp.getUnknownParameters(queryParameters)); + MultivaluedMap op = MapUtils.toMultivaluedMap(qp.getUnknownParameters(MapUtils.toMultiValueMap(queryParameters))); op.putSingle(PrivateAuditConstants.LOGIC_CLASS, queryLogicName); op.putSingle(PrivateAuditConstants.COLUMN_VISIBILITY, queryVisibility); op.putSingle(PrivateAuditConstants.USER_DN, userDNpair.subjectDN()); @@ -3924,8 +3941,8 @@ public void testPlanQuery() throws Exception { expect(this.queryLogic1.getConnPoolName()).andReturn("connPool1"); expect(this.connectionFactory.getTrackingMap(isA(StackTraceElement[].class))).andReturn(null); this.query.populateTrackingMap(null); - this.connectionRequestBean.requestBegin(queryId.toString()); - expect(this.connectionFactory.getClient("connPool1", Priority.NORMAL, null)).andReturn(this.client); + this.connectionRequestBean.requestBegin(queryId.toString(), userDN.toLowerCase(), null); + expect(this.connectionFactory.getClient(userDN.toLowerCase(), new ArrayList<>(0), "connPool1", Priority.NORMAL, null)).andReturn(this.client); this.connectionRequestBean.requestEnd(queryId.toString()); expect(this.principal.getPrimaryUser()).andReturn(dwUser).anyTimes(); expect(this.dwUser.getAuths()).andReturn(Collections.singleton(queryAuthorizations)).anyTimes(); @@ -3973,12 +3990,12 @@ public void testPlanQuery() throws Exception { setInternalState(subject, ClosedQueryCache.class, closedCache); setInternalState(subject, Persister.class, persister); setInternalState(subject, QueryLogicFactoryImpl.class, queryLogicFactory); - setInternalState(subject, QueryExpirationConfiguration.class, queryExpirationConf); + setInternalState(subject, QueryExpirationProperties.class, queryExpirationConf); setInternalState(subject, AuditBean.class, auditor); setInternalState(subject, QueryMetricsBean.class, metrics); setInternalState(subject, Multimap.class, traceInfos); setInternalState(subject, SecurityMarking.class, new ColumnVisibilitySecurityMarking()); - setInternalState(subject, QueryParameters.class, new QueryParametersImpl()); + setInternalState(subject, QueryParameters.class, new DefaultQueryParameters()); setInternalState(subject, QueryMetricFactory.class, new QueryMetricFactoryImpl()); setInternalState(connectionRequestBean, EJBContext.class, context); setInternalState(subject, AccumuloConnectionRequestBean.class, connectionRequestBean); @@ -4008,7 +4025,7 @@ public void testPlanQueryWithValues() throws Exception { boolean trace = false; String userName = "userName"; String userSid = "userSid"; - String userDN = "userdn"; + String userDN = "userDN"; SubjectIssuerDNPair userDNpair = SubjectIssuerDNPair.of(userDN); List dnList = Collections.singletonList(userDN); UUID queryId = UUID.randomUUID(); @@ -4016,30 +4033,28 @@ public void testPlanQueryWithValues() throws Exception { HashMap> authsMap = new HashMap<>(); authsMap.put("userdn", Arrays.asList(queryAuthorizations)); - MultivaluedMap queryParameters = new MultivaluedMapImpl<>(); - queryParameters.putSingle(QueryParameters.QUERY_LOGIC_NAME, queryLogicName); - queryParameters.putSingle(QueryParameters.QUERY_STRING, query); - queryParameters.putSingle(QueryParameters.QUERY_NAME, queryName); - queryParameters.putSingle(QueryParameters.QUERY_BEGIN, QueryParametersImpl.formatDate(beginDate)); - queryParameters.putSingle(QueryParameters.QUERY_END, QueryParametersImpl.formatDate(endDate)); - queryParameters.putSingle(QueryParameters.QUERY_EXPIRATION, QueryParametersImpl.formatDate(expirationDate)); - queryParameters.putSingle(QueryParameters.QUERY_AUTHORIZATIONS, queryAuthorizations); - queryParameters.putSingle(QueryParameters.QUERY_PAGESIZE, String.valueOf(pagesize)); - queryParameters.putSingle(QueryParameters.QUERY_PAGETIMEOUT, String.valueOf(pageTimeout)); - queryParameters.putSingle(QueryParameters.QUERY_PERSISTENCE, persistenceMode.name()); - queryParameters.putSingle(QueryParameters.QUERY_TRACE, String.valueOf(trace)); - queryParameters.putSingle(ColumnVisibilitySecurityMarking.VISIBILITY_MARKING, queryVisibility); - queryParameters.putSingle("valid", "param"); - queryParameters.putSingle(QueryExecutorBean.EXPAND_VALUES, "true"); - + MultiValueMap queryParameters = new LinkedMultiValueMap<>(); + queryParameters.set(QueryParameters.QUERY_LOGIC_NAME, queryLogicName); + queryParameters.set(QueryParameters.QUERY_STRING, query); + queryParameters.set(QueryParameters.QUERY_NAME, queryName); + queryParameters.set(QueryParameters.QUERY_BEGIN, DefaultQueryParameters.formatDate(beginDate)); + queryParameters.set(QueryParameters.QUERY_END, DefaultQueryParameters.formatDate(endDate)); + queryParameters.set(QueryParameters.QUERY_EXPIRATION, DefaultQueryParameters.formatDate(expirationDate)); + queryParameters.set(QueryParameters.QUERY_AUTHORIZATIONS, queryAuthorizations); + queryParameters.set(QueryParameters.QUERY_PAGESIZE, String.valueOf(pagesize)); + queryParameters.set(QueryParameters.QUERY_PAGETIMEOUT, String.valueOf(pageTimeout)); + queryParameters.set(QueryParameters.QUERY_PERSISTENCE, persistenceMode.name()); + queryParameters.set(QueryParameters.QUERY_TRACE, String.valueOf(trace)); + queryParameters.set(ColumnVisibilitySecurityMarking.VISIBILITY_MARKING, queryVisibility); + queryParameters.set("valid", "param"); + queryParameters.set(QueryExecutorBean.EXPAND_VALUES, "true"); ColumnVisibilitySecurityMarking marking = new ColumnVisibilitySecurityMarking(); marking.validate(queryParameters); - QueryParameters qp = new QueryParametersImpl(); + QueryParameters qp = new DefaultQueryParameters(); qp.validate(queryParameters); - MultivaluedMap op = new MultivaluedMapImpl<>(); - op.putAll(qp.getUnknownParameters(queryParameters)); + MultivaluedMap op = MapUtils.toMultivaluedMap(qp.getUnknownParameters(queryParameters)); // op.putSingle(PrivateAuditConstants.AUDIT_TYPE, AuditType.NONE.name()); op.putSingle(PrivateAuditConstants.LOGIC_CLASS, queryLogicName); op.putSingle(PrivateAuditConstants.COLUMN_VISIBILITY, queryVisibility); @@ -4047,7 +4062,6 @@ public void testPlanQueryWithValues() throws Exception { // Set expectations of the create logic queryLogic1.validate(queryParameters); - // this.query.populateMetric(isA(QueryMetric.class)); expect(this.queryLogicFactory.getQueryLogic(queryLogicName, this.principal)).andReturn((QueryLogic) this.queryLogic1); expect(this.queryLogic1.getMaxPageSize()).andReturn(1000).times(2); expect(this.context.getCallerPrincipal()).andReturn(this.principal).anyTimes(); @@ -4059,7 +4073,7 @@ public void testPlanQueryWithValues() throws Exception { expect(this.queryLogic1.containsDNWithAccess(Collections.singletonList(userDN))).andReturn(true); expect(this.queryLogic1.getAuditType(null)).andReturn(AuditType.PASSIVE); expect(this.queryLogic1.getSelectors(this.query)).andReturn(null); - expect(auditor.audit(eq(queryParameters))).andReturn(null); + expect(auditor.audit(anyObject())).andReturn(null); expect(this.principal.getAuthorizations()).andReturn((Collection) Arrays.asList(Arrays.asList(queryAuthorizations))); expect(persister.create(eq(userDNpair.subjectDN()), eq(dnList), eq(marking), eq(queryLogicName), eq(qp), eq(op))).andReturn(this.query); expect(this.queryLogic1.getAuditType(this.query)).andReturn(AuditType.PASSIVE); @@ -4069,8 +4083,8 @@ public void testPlanQueryWithValues() throws Exception { expect(this.queryLogic1.getUserOperations()).andReturn(null); expect(this.connectionFactory.getTrackingMap(isA(StackTraceElement[].class))).andReturn(null); this.query.populateTrackingMap(null); - this.connectionRequestBean.requestBegin(queryId.toString()); - expect(this.connectionFactory.getClient("connPool1", Priority.NORMAL, null)).andReturn(this.client); + this.connectionRequestBean.requestBegin(queryId.toString(), userDN.toLowerCase(), null); + expect(this.connectionFactory.getClient(userDN.toLowerCase(), new ArrayList<>(0), "connPool1", Priority.NORMAL, null)).andReturn(this.client); this.connectionRequestBean.requestEnd(queryId.toString()); expect(this.principal.getPrimaryUser()).andReturn(dwUser).anyTimes(); @@ -4121,16 +4135,16 @@ public void testPlanQueryWithValues() throws Exception { setInternalState(subject, ClosedQueryCache.class, closedCache); setInternalState(subject, Persister.class, persister); setInternalState(subject, QueryLogicFactoryImpl.class, queryLogicFactory); - setInternalState(subject, QueryExpirationConfiguration.class, queryExpirationConf); + setInternalState(subject, QueryExpirationProperties.class, queryExpirationConf); setInternalState(subject, AuditBean.class, auditor); setInternalState(subject, QueryMetricsBean.class, metrics); setInternalState(subject, Multimap.class, traceInfos); setInternalState(subject, SecurityMarking.class, new ColumnVisibilitySecurityMarking()); - setInternalState(subject, QueryParameters.class, new QueryParametersImpl()); + setInternalState(subject, QueryParameters.class, new DefaultQueryParameters()); setInternalState(subject, QueryMetricFactory.class, new QueryMetricFactoryImpl()); setInternalState(connectionRequestBean, EJBContext.class, context); setInternalState(subject, AccumuloConnectionRequestBean.class, connectionRequestBean); - GenericResponse result1 = subject.planQuery(queryLogicName, queryParameters); + GenericResponse result1 = subject.planQuery(queryLogicName, MapUtils.toMultivaluedMap(queryParameters)); PowerMock.verifyAll(); // Verify results @@ -4162,19 +4176,19 @@ public void testCreateQuery_auditException() throws Exception { HashMap> authsMap = new HashMap<>(); authsMap.put("USERDN", Arrays.asList(queryAuthorizations)); - MultivaluedMap queryParameters = new MultivaluedMapImpl<>(); - queryParameters.putSingle(QueryParameters.QUERY_STRING, query); - queryParameters.putSingle(QueryParameters.QUERY_NAME, queryName); - queryParameters.putSingle(QueryParameters.QUERY_LOGIC_NAME, queryLogicName); - queryParameters.putSingle(QueryParameters.QUERY_BEGIN, QueryParametersImpl.formatDate(beginDate)); - queryParameters.putSingle(QueryParameters.QUERY_END, QueryParametersImpl.formatDate(endDate)); - queryParameters.putSingle(QueryParameters.QUERY_EXPIRATION, QueryParametersImpl.formatDate(expirationDate)); - queryParameters.putSingle(QueryParameters.QUERY_AUTHORIZATIONS, queryAuthorizations); - queryParameters.putSingle(QueryParameters.QUERY_PAGESIZE, String.valueOf(pagesize)); - queryParameters.putSingle(QueryParameters.QUERY_PERSISTENCE, persistenceMode.name()); - queryParameters.putSingle(QueryParameters.QUERY_TRACE, String.valueOf(trace)); - queryParameters.putSingle(ColumnVisibilitySecurityMarking.VISIBILITY_MARKING, queryVisibility); - queryParameters.putSingle("valid", "param"); + MultiValueMap queryParameters = new LinkedMultiValueMap<>(); + queryParameters.set(QueryParameters.QUERY_STRING, query); + queryParameters.set(QueryParameters.QUERY_NAME, queryName); + queryParameters.set(QueryParameters.QUERY_LOGIC_NAME, queryLogicName); + queryParameters.set(QueryParameters.QUERY_BEGIN, DefaultQueryParameters.formatDate(beginDate)); + queryParameters.set(QueryParameters.QUERY_END, DefaultQueryParameters.formatDate(endDate)); + queryParameters.set(QueryParameters.QUERY_EXPIRATION, DefaultQueryParameters.formatDate(expirationDate)); + queryParameters.set(QueryParameters.QUERY_AUTHORIZATIONS, queryAuthorizations); + queryParameters.set(QueryParameters.QUERY_PAGESIZE, String.valueOf(pagesize)); + queryParameters.set(QueryParameters.QUERY_PERSISTENCE, persistenceMode.name()); + queryParameters.set(QueryParameters.QUERY_TRACE, String.valueOf(trace)); + queryParameters.set(ColumnVisibilitySecurityMarking.VISIBILITY_MARKING, queryVisibility); + queryParameters.set("valid", "param"); expect(context.getCallerPrincipal()).andReturn(principal).anyTimes(); expect(this.queryLogicFactory.getQueryLogic(queryLogicName, this.principal)).andReturn((QueryLogic) this.queryLogic1); @@ -4191,7 +4205,7 @@ public void testCreateQuery_auditException() throws Exception { expect(queryLogic1.getSelectors(null)).andReturn(null); expect(this.responseObjectFactory.getQueryImpl()).andReturn(new QueryImpl()); expect(queryLogic1.getResultLimit(anyObject(QueryImpl.class))).andReturn(-1L); - expect(auditor.audit(queryParameters)).andThrow(new JMSRuntimeException("EXPECTED TESTING EXCEPTION")); + expect(auditor.audit(EasyMock.anyObject())).andThrow(new JMSRuntimeException("EXPECTED TESTING EXCEPTION")); queryLogic1.close(); PowerMock.replayAll(); @@ -4205,17 +4219,17 @@ public void testCreateQuery_auditException() throws Exception { setInternalState(executor, ClosedQueryCache.class, closedCache); setInternalState(executor, Persister.class, persister); setInternalState(executor, QueryLogicFactoryImpl.class, queryLogicFactory); - setInternalState(executor, QueryExpirationConfiguration.class, queryExpirationConf); + setInternalState(executor, QueryExpirationProperties.class, queryExpirationConf); setInternalState(executor, AuditBean.class, auditor); setInternalState(executor, QueryMetricsBean.class, metrics); setInternalState(executor, Multimap.class, traceInfos); setInternalState(executor, SecurityMarking.class, new ColumnVisibilitySecurityMarking()); - setInternalState(executor, QueryParameters.class, new QueryParametersImpl()); + setInternalState(executor, QueryParameters.class, new DefaultQueryParameters()); setInternalState(executor, QueryMetricFactory.class, new QueryMetricFactoryImpl()); setInternalState(connectionRequestBean, EJBContext.class, context); setInternalState(executor, AccumuloConnectionRequestBean.class, connectionRequestBean); - executor.createQuery(queryLogicName, queryParameters); + executor.createQuery(queryLogicName, MapUtils.toMultivaluedMap(queryParameters)); PowerMock.verifyAll(); } @@ -4245,13 +4259,13 @@ public void testReset_auditException() throws Exception { qp.setUserDN(userDN); qp.setDnList(Collections.singletonList(userDN)); - MultiValueMap map = new LinkedMultiValueMap<>(); + MultiValueMap map = new LinkedMultiValueMap<>(qp.toMap()); map.set(PrivateAuditConstants.AUDIT_TYPE, AuditType.PASSIVE.name()); map.set(PrivateAuditConstants.LOGIC_CLASS, queryLogicName); map.set(PrivateAuditConstants.COLUMN_VISIBILITY, authorization); map.set(PrivateAuditConstants.USER_DN, userDN); map.set(AuditParameters.AUDIT_ID, queryName); - MultivaluedMap auditMap = new MultivaluedMapImpl(); + MultiValueMap auditMap = new LinkedMultiValueMap(); auditMap.putAll(map); // Set expectations @@ -4299,7 +4313,7 @@ public void testReset_auditException() throws Exception { setInternalState(subject, ClosedQueryCache.class, closedCache); setInternalState(subject, Persister.class, persister); setInternalState(subject, QueryLogicFactory.class, queryLogicFactory); - setInternalState(subject, QueryExpirationConfiguration.class, queryExpirationConf); + setInternalState(subject, QueryExpirationProperties.class, queryExpirationConf); setInternalState(subject, AuditBean.class, auditor); setInternalState(subject, QueryMetricsBean.class, metrics); setInternalState(subject, QueryMetricFactory.class, new QueryMetricFactoryImpl()); @@ -4336,9 +4350,9 @@ public void testUpdateQuery_auditException() throws Exception { MultiValueMap p = new LinkedMultiValueMap<>(); p.set(QueryParameters.QUERY_AUTHORIZATIONS, queryAuthorizations); - p.set(QueryParameters.QUERY_BEGIN, QueryParametersImpl.formatDate(beginDate)); - p.set(QueryParameters.QUERY_END, QueryParametersImpl.formatDate(endDate)); - p.set(QueryParameters.QUERY_EXPIRATION, QueryParametersImpl.formatDate(expirationDate)); + p.set(QueryParameters.QUERY_BEGIN, DefaultQueryParameters.formatDate(beginDate)); + p.set(QueryParameters.QUERY_END, DefaultQueryParameters.formatDate(endDate)); + p.set(QueryParameters.QUERY_EXPIRATION, DefaultQueryParameters.formatDate(expirationDate)); p.set(QueryParameters.QUERY_NAME, queryName); p.set(QueryParameters.QUERY_PAGESIZE, Integer.toString(pagesize)); p.set(QueryParameters.QUERY_PAGETIMEOUT, Integer.toString(pageTimeout)); @@ -4352,7 +4366,7 @@ public void testUpdateQuery_auditException() throws Exception { p.set(PrivateAuditConstants.COLUMN_VISIBILITY, queryVisibility); p.set(PrivateAuditConstants.USER_DN, userDN); p.set(AuditParameters.AUDIT_ID, queryId.toString()); - MultivaluedMap auditMap = new MultivaluedMapImpl(); + MultiValueMap auditMap = new LinkedMultiValueMap(); auditMap.putAll(p); // Set expectations @@ -4396,7 +4410,7 @@ public void testUpdateQuery_auditException() throws Exception { setInternalState(subject, ClosedQueryCache.class, closedCache); setInternalState(subject, Persister.class, persister); setInternalState(subject, QueryLogicFactory.class, queryLogicFactory); - setInternalState(subject, QueryExpirationConfiguration.class, queryExpirationConf); + setInternalState(subject, QueryExpirationProperties.class, queryExpirationConf); setInternalState(subject, AuditBean.class, auditor); setInternalState(subject, QueryMetricFactory.class, new QueryMetricFactoryImpl()); @@ -4426,18 +4440,17 @@ public void testDefineQuery_userNotInAllowedDNs() throws Exception { boolean trace = false; // Set expectations - MultivaluedMap queryParameters = new MultivaluedMapImpl<>(); - queryParameters.putAll(QueryParametersImpl.paramsToMap(queryLogicName, query, queryName, queryVisibility, beginDate, endDate, queryAuthorizations, - expirationDate, pagesize, pageTimeout, maxResultsOverride, persistenceMode, systemFrom, parameters, trace)); + MultivaluedMap queryParameters = MapUtils.toMultivaluedMap( + DefaultQueryParameters.paramsToMap(queryLogicName, query, queryName, queryVisibility, beginDate, endDate, queryAuthorizations, + expirationDate, pagesize, pageTimeout, maxResultsOverride, persistenceMode, systemFrom, parameters, trace)); ColumnVisibilitySecurityMarking marking = new ColumnVisibilitySecurityMarking(); marking.validate(queryParameters); - QueryParameters qp = new QueryParametersImpl(); + QueryParameters qp = new DefaultQueryParameters(); qp.validate(queryParameters); - MultivaluedMap op = new MultivaluedMapImpl<>(); - op.putAll(qp.getUnknownParameters(queryParameters)); + MultivaluedMap op = MapUtils.toMultivaluedMap(qp.getUnknownParameters(MapUtils.toMultiValueMap(queryParameters))); op.putSingle(PrivateAuditConstants.LOGIC_CLASS, queryLogicName); op.putSingle(PrivateAuditConstants.COLUMN_VISIBILITY, queryVisibility); expect(this.queryLogicFactory.getQueryLogic(queryLogicName, this.principal)).andReturn((QueryLogic) this.queryLogic1); @@ -4456,9 +4469,9 @@ public void testDefineQuery_userNotInAllowedDNs() throws Exception { QueryExecutorBean subject = new QueryExecutorBean(); setInternalState(subject, EJBContext.class, context); setInternalState(subject, QueryLogicFactory.class, queryLogicFactory); - setInternalState(subject, QueryExpirationConfiguration.class, queryExpirationConf); + setInternalState(subject, QueryExpirationProperties.class, queryExpirationConf); setInternalState(subject, SecurityMarking.class, new ColumnVisibilitySecurityMarking()); - setInternalState(subject, QueryParameters.class, new QueryParametersImpl()); + setInternalState(subject, QueryParameters.class, new DefaultQueryParameters()); setInternalState(subject, QueryMetricFactory.class, new QueryMetricFactoryImpl()); Throwable result1 = null; try { @@ -4497,18 +4510,17 @@ public void testCreateQuery_userNotInAllowedDNs() throws Exception { boolean trace = false; // Set expectations - MultivaluedMap queryParameters = new MultivaluedMapImpl<>(); - queryParameters.putAll(QueryParametersImpl.paramsToMap(queryLogicName, query, queryName, queryVisibility, beginDate, endDate, queryAuthorizations, - expirationDate, pagesize, pageTimeout, maxResultsOverride, persistenceMode, systemFrom, parameters, trace)); + MultivaluedMap queryParameters = MapUtils.toMultivaluedMap( + DefaultQueryParameters.paramsToMap(queryLogicName, query, queryName, queryVisibility, beginDate, endDate, queryAuthorizations, + expirationDate, pagesize, pageTimeout, maxResultsOverride, persistenceMode, systemFrom, parameters, trace)); ColumnVisibilitySecurityMarking marking = new ColumnVisibilitySecurityMarking(); marking.validate(queryParameters); - QueryParameters qp = new QueryParametersImpl(); + QueryParameters qp = new DefaultQueryParameters(); qp.validate(queryParameters); - MultivaluedMap op = new MultivaluedMapImpl<>(); - op.putAll(qp.getUnknownParameters(queryParameters)); + MultivaluedMap op = MapUtils.toMultivaluedMap(qp.getUnknownParameters(MapUtils.toMultiValueMap(queryParameters))); op.putSingle(PrivateAuditConstants.LOGIC_CLASS, queryLogicName); op.putSingle(PrivateAuditConstants.COLUMN_VISIBILITY, queryVisibility); expect(this.queryLogicFactory.getQueryLogic(queryLogicName, this.principal)).andReturn((QueryLogic) this.queryLogic1); @@ -4527,9 +4539,9 @@ public void testCreateQuery_userNotInAllowedDNs() throws Exception { QueryExecutorBean subject = new QueryExecutorBean(); setInternalState(subject, EJBContext.class, context); setInternalState(subject, QueryLogicFactory.class, queryLogicFactory); - setInternalState(subject, QueryExpirationConfiguration.class, queryExpirationConf); + setInternalState(subject, QueryExpirationProperties.class, queryExpirationConf); setInternalState(subject, SecurityMarking.class, new ColumnVisibilitySecurityMarking()); - setInternalState(subject, QueryParameters.class, new QueryParametersImpl()); + setInternalState(subject, QueryParameters.class, new DefaultQueryParameters()); setInternalState(subject, QueryMetricFactory.class, new QueryMetricFactoryImpl()); Throwable result1 = null; try { @@ -4568,18 +4580,17 @@ public void testCreateQueryAndNext_userNotInAllowedDNs() throws Exception { boolean trace = false; // Set expectations - MultivaluedMap queryParameters = new MultivaluedMapImpl<>(); - queryParameters.putAll(QueryParametersImpl.paramsToMap(queryLogicName, query, queryName, queryVisibility, beginDate, endDate, queryAuthorizations, - expirationDate, pagesize, pageTimeout, maxResultsOverride, persistenceMode, systemFrom, parameters, trace)); + MultivaluedMap queryParameters = MapUtils.toMultivaluedMap( + DefaultQueryParameters.paramsToMap(queryLogicName, query, queryName, queryVisibility, beginDate, endDate, queryAuthorizations, + expirationDate, pagesize, pageTimeout, maxResultsOverride, persistenceMode, systemFrom, parameters, trace)); ColumnVisibilitySecurityMarking marking = new ColumnVisibilitySecurityMarking(); marking.validate(queryParameters); - QueryParameters qp = new QueryParametersImpl(); + QueryParameters qp = new DefaultQueryParameters(); qp.validate(queryParameters); - MultivaluedMap op = new MultivaluedMapImpl<>(); - op.putAll(qp.getUnknownParameters(queryParameters)); + MultivaluedMap op = MapUtils.toMultivaluedMap(qp.getUnknownParameters(MapUtils.toMultiValueMap(queryParameters))); op.putSingle(PrivateAuditConstants.LOGIC_CLASS, queryLogicName); op.putSingle(PrivateAuditConstants.COLUMN_VISIBILITY, queryVisibility); expect(this.queryLogicFactory.getQueryLogic(queryLogicName, this.principal)).andReturn((QueryLogic) this.queryLogic1); @@ -4598,9 +4609,9 @@ public void testCreateQueryAndNext_userNotInAllowedDNs() throws Exception { QueryExecutorBean subject = new QueryExecutorBean(); setInternalState(subject, EJBContext.class, context); setInternalState(subject, QueryLogicFactory.class, queryLogicFactory); - setInternalState(subject, QueryExpirationConfiguration.class, queryExpirationConf); + setInternalState(subject, QueryExpirationProperties.class, queryExpirationConf); setInternalState(subject, SecurityMarking.class, new ColumnVisibilitySecurityMarking()); - setInternalState(subject, QueryParameters.class, new QueryParametersImpl()); + setInternalState(subject, QueryParameters.class, new DefaultQueryParameters()); setInternalState(subject, QueryMetricFactory.class, new QueryMetricFactoryImpl()); Throwable result1 = null; try { @@ -4635,16 +4646,16 @@ public void testPlanQuery_userNotInAllowedDNs() throws Exception { boolean trace = false; String userName = "userName"; String userSid = "userSid"; - String userDN = "userdn"; + String userDN = "userDN"; SubjectIssuerDNPair userDNpair = SubjectIssuerDNPair.of(userDN); MultivaluedMap queryParameters = new MultivaluedMapImpl<>(); queryParameters.putSingle(QueryParameters.QUERY_LOGIC_NAME, queryLogicName); queryParameters.putSingle(QueryParameters.QUERY_STRING, query); queryParameters.putSingle(QueryParameters.QUERY_NAME, queryName); - queryParameters.putSingle(QueryParameters.QUERY_BEGIN, QueryParametersImpl.formatDate(beginDate)); - queryParameters.putSingle(QueryParameters.QUERY_END, QueryParametersImpl.formatDate(endDate)); - queryParameters.putSingle(QueryParameters.QUERY_EXPIRATION, QueryParametersImpl.formatDate(expirationDate)); + queryParameters.putSingle(QueryParameters.QUERY_BEGIN, DefaultQueryParameters.formatDate(beginDate)); + queryParameters.putSingle(QueryParameters.QUERY_END, DefaultQueryParameters.formatDate(endDate)); + queryParameters.putSingle(QueryParameters.QUERY_EXPIRATION, DefaultQueryParameters.formatDate(expirationDate)); queryParameters.putSingle(QueryParameters.QUERY_AUTHORIZATIONS, queryAuthorizations); queryParameters.putSingle(QueryParameters.QUERY_PAGESIZE, String.valueOf(pagesize)); queryParameters.putSingle(QueryParameters.QUERY_PAGETIMEOUT, String.valueOf(pageTimeout)); @@ -4656,11 +4667,10 @@ public void testPlanQuery_userNotInAllowedDNs() throws Exception { ColumnVisibilitySecurityMarking marking = new ColumnVisibilitySecurityMarking(); marking.validate(queryParameters); - QueryParameters qp = new QueryParametersImpl(); + QueryParameters qp = new DefaultQueryParameters(); qp.validate(queryParameters); - MultivaluedMap op = new MultivaluedMapImpl<>(); - op.putAll(qp.getUnknownParameters(queryParameters)); + MultivaluedMap op = MapUtils.toMultivaluedMap(qp.getUnknownParameters(MapUtils.toMultiValueMap(queryParameters))); op.putSingle(PrivateAuditConstants.LOGIC_CLASS, queryLogicName); op.putSingle(PrivateAuditConstants.COLUMN_VISIBILITY, queryVisibility); op.putSingle(PrivateAuditConstants.USER_DN, userDNpair.subjectDN()); @@ -4687,12 +4697,12 @@ public void testPlanQuery_userNotInAllowedDNs() throws Exception { setInternalState(subject, ClosedQueryCache.class, closedCache); setInternalState(subject, Persister.class, persister); setInternalState(subject, QueryLogicFactoryImpl.class, queryLogicFactory); - setInternalState(subject, QueryExpirationConfiguration.class, queryExpirationConf); + setInternalState(subject, QueryExpirationProperties.class, queryExpirationConf); setInternalState(subject, AuditBean.class, auditor); setInternalState(subject, QueryMetricsBean.class, metrics); setInternalState(subject, Multimap.class, traceInfos); setInternalState(subject, SecurityMarking.class, new ColumnVisibilitySecurityMarking()); - setInternalState(subject, QueryParameters.class, new QueryParametersImpl()); + setInternalState(subject, QueryParameters.class, new DefaultQueryParameters()); setInternalState(subject, QueryMetricFactory.class, new QueryMetricFactoryImpl()); setInternalState(connectionRequestBean, EJBContext.class, context); setInternalState(subject, AccumuloConnectionRequestBean.class, connectionRequestBean); @@ -4707,7 +4717,7 @@ public void testPlanQuery_userNotInAllowedDNs() throws Exception { // Verify results assertTrue("QueryException expected to have been thrown", result1 instanceof QueryException); assertEquals("Thrown exception expected to have been due to access denied", "401", ((QueryException) result1).getErrorCode()); - assertEquals("Thrown exception expected to detail reason for access denial", "None of the DNs used have access to this query logic: [userdn]", + assertEquals("Thrown exception expected to detail reason for access denial", "None of the DNs used have access to this query logic: [userDN]", result1.getMessage()); } @@ -4729,16 +4739,16 @@ public void testPredictQuery_userNotInAllowedDNs() throws Exception { boolean trace = false; String userName = "userName"; String userSid = "userSid"; - String userDN = "userdn"; + String userDN = "userDN"; SubjectIssuerDNPair userDNpair = SubjectIssuerDNPair.of(userDN); MultivaluedMap queryParameters = new MultivaluedMapImpl<>(); queryParameters.putSingle(QueryParameters.QUERY_LOGIC_NAME, queryLogicName); queryParameters.putSingle(QueryParameters.QUERY_STRING, query); queryParameters.putSingle(QueryParameters.QUERY_NAME, queryName); - queryParameters.putSingle(QueryParameters.QUERY_BEGIN, QueryParametersImpl.formatDate(beginDate)); - queryParameters.putSingle(QueryParameters.QUERY_END, QueryParametersImpl.formatDate(endDate)); - queryParameters.putSingle(QueryParameters.QUERY_EXPIRATION, QueryParametersImpl.formatDate(expirationDate)); + queryParameters.putSingle(QueryParameters.QUERY_BEGIN, DefaultQueryParameters.formatDate(beginDate)); + queryParameters.putSingle(QueryParameters.QUERY_END, DefaultQueryParameters.formatDate(endDate)); + queryParameters.putSingle(QueryParameters.QUERY_EXPIRATION, DefaultQueryParameters.formatDate(expirationDate)); queryParameters.putSingle(QueryParameters.QUERY_AUTHORIZATIONS, queryAuthorizations); queryParameters.putSingle(QueryParameters.QUERY_PAGESIZE, String.valueOf(pagesize)); queryParameters.putSingle(QueryParameters.QUERY_PAGETIMEOUT, String.valueOf(pageTimeout)); @@ -4750,11 +4760,10 @@ public void testPredictQuery_userNotInAllowedDNs() throws Exception { ColumnVisibilitySecurityMarking marking = new ColumnVisibilitySecurityMarking(); marking.validate(queryParameters); - QueryParameters qp = new QueryParametersImpl(); + QueryParameters qp = new DefaultQueryParameters(); qp.validate(queryParameters); - MultivaluedMap op = new MultivaluedMapImpl<>(); - op.putAll(qp.getUnknownParameters(queryParameters)); + MultivaluedMap op = MapUtils.toMultivaluedMap(qp.getUnknownParameters(MapUtils.toMultiValueMap(queryParameters))); op.putSingle(PrivateAuditConstants.LOGIC_CLASS, queryLogicName); op.putSingle(PrivateAuditConstants.COLUMN_VISIBILITY, queryVisibility); op.putSingle(PrivateAuditConstants.USER_DN, userDNpair.subjectDN()); @@ -4781,12 +4790,12 @@ public void testPredictQuery_userNotInAllowedDNs() throws Exception { setInternalState(subject, ClosedQueryCache.class, closedCache); setInternalState(subject, Persister.class, persister); setInternalState(subject, QueryLogicFactoryImpl.class, queryLogicFactory); - setInternalState(subject, QueryExpirationConfiguration.class, queryExpirationConf); + setInternalState(subject, QueryExpirationProperties.class, queryExpirationConf); setInternalState(subject, AuditBean.class, auditor); setInternalState(subject, QueryMetricsBean.class, metrics); setInternalState(subject, Multimap.class, traceInfos); setInternalState(subject, SecurityMarking.class, new ColumnVisibilitySecurityMarking()); - setInternalState(subject, QueryParameters.class, new QueryParametersImpl()); + setInternalState(subject, QueryParameters.class, new DefaultQueryParameters()); setInternalState(subject, QueryMetricFactory.class, new QueryMetricFactoryImpl()); setInternalState(connectionRequestBean, EJBContext.class, context); setInternalState(subject, AccumuloConnectionRequestBean.class, connectionRequestBean); @@ -4801,7 +4810,7 @@ public void testPredictQuery_userNotInAllowedDNs() throws Exception { // Verify results assertTrue("QueryException expected to have been thrown", result1 instanceof QueryException); assertEquals("Thrown exception expected to have been due to access denied", "401", ((QueryException) result1).getErrorCode()); - assertEquals("Thrown exception expected to detail reason for access denial", "None of the DNs used have access to this query logic: [userdn]", + assertEquals("Thrown exception expected to detail reason for access denial", "None of the DNs used have access to this query logic: [userDN]", result1.getMessage()); } @@ -4878,4 +4887,20 @@ public Set getExampleQueries() { return Collections.emptySet(); } } + + public void populateMetric(Query query, QueryMetric qm) { + qm.setQueryType(query.getClass()); + qm.setQueryId(query.getId().toString()); + qm.setUser(query.getOwner()); + qm.setUserDN(query.getUserDN()); + qm.setQuery(query.getQuery()); + qm.setQueryLogic(query.getQueryLogicName()); + qm.setBeginDate(query.getBeginDate()); + qm.setEndDate(query.getEndDate()); + qm.setQueryAuthorizations(query.getQueryAuthorizations()); + qm.setQueryName(query.getQueryName()); + qm.setParameters(query.getParameters()); + qm.setColumnVisibility(query.getColumnVisibility()); + } + } diff --git a/web-services/query/src/test/java/datawave/webservice/query/runner/ExtendedRunningQueryTest.java b/web-services/query/src/test/java/datawave/webservice/query/runner/ExtendedRunningQueryTest.java index 774f5017323..51cbf1e5378 100644 --- a/web-services/query/src/test/java/datawave/webservice/query/runner/ExtendedRunningQueryTest.java +++ b/web-services/query/src/test/java/datawave/webservice/query/runner/ExtendedRunningQueryTest.java @@ -33,6 +33,13 @@ import com.google.common.collect.Lists; +import datawave.core.common.connection.AccumuloConnectionFactory; +import datawave.core.common.connection.AccumuloConnectionFactory.Priority; +import datawave.core.query.cache.ResultsPage; +import datawave.core.query.configuration.GenericQueryConfiguration; +import datawave.core.query.logic.QueryLogic; +import datawave.microservice.authorization.util.AuthorizationsUtil; +import datawave.microservice.query.Query; import datawave.microservice.querymetric.QueryMetric; import datawave.microservice.querymetric.QueryMetricFactoryImpl; import datawave.security.authorization.DatawavePrincipal; @@ -40,13 +47,6 @@ import datawave.security.authorization.DatawaveUser.UserType; import datawave.security.authorization.SubjectIssuerDNPair; import datawave.security.util.DnUtils; -import datawave.security.util.WSAuthorizationsUtil; -import datawave.webservice.common.connection.AccumuloConnectionFactory; -import datawave.webservice.common.connection.AccumuloConnectionFactory.Priority; -import datawave.webservice.query.Query; -import datawave.webservice.query.cache.ResultsPage; -import datawave.webservice.query.configuration.GenericQueryConfiguration; -import datawave.webservice.query.logic.QueryLogic; import datawave.webservice.query.metric.QueryMetricsBean; import datawave.webservice.query.util.QueryUncaughtExceptionHandler; @@ -181,7 +181,7 @@ public void testNext_HappyPathUsingDeprecatedConstructor() throws Exception { expect(this.genericConfiguration.getQueryString()).andReturn(query).once(); expect(this.queryLogic.isLongRunningQuery()).andReturn(false); expect(this.queryLogic.getResultLimit(eq(this.query))).andReturn(maxResults); - this.queryLogic.preInitialize(this.query, WSAuthorizationsUtil.buildAuthorizations(Collections.singleton(Collections.singleton("AUTH_1")))); + this.queryLogic.preInitialize(this.query, AuthorizationsUtil.buildAuthorizations(Collections.singleton(Collections.singleton("AUTH_1")))); expect(this.queryLogic.getUserOperations()).andReturn(null); this.queryLogic.setPageProcessingStartTime(anyLong()); @@ -272,7 +272,7 @@ public void testNextMaxResults_HappyPathUsingDeprecatedConstructor() throws Exce expect(this.queryLogic.getPageByteTrigger()).andReturn(pageByteTrigger).anyTimes(); expect(this.queryLogic.getMaxWork()).andReturn(maxWork).anyTimes(); expect(this.queryLogic.getMaxResults()).andReturn(maxResults).anyTimes(); - this.queryLogic.preInitialize(this.query, WSAuthorizationsUtil.buildAuthorizations(Collections.singleton(Collections.singleton("AUTH_1")))); + this.queryLogic.preInitialize(this.query, AuthorizationsUtil.buildAuthorizations(Collections.singleton(Collections.singleton("AUTH_1")))); expect(this.queryLogic.getUserOperations()).andReturn(null); expect(this.genericConfiguration.getQueryString()).andReturn(query).once(); this.queryLogic.setPageProcessingStartTime(anyLong()); @@ -342,7 +342,7 @@ public void testNext_NoResultsAfterCancellationUsingDeprecatedConstructor() thro expect(this.queryLogic.isLongRunningQuery()).andReturn(false); expect(this.queryLogic.getResultLimit(eq(this.query))).andReturn(maxResults); expect(this.queryLogic.getMaxResults()).andReturn(maxResults); - this.queryLogic.preInitialize(this.query, WSAuthorizationsUtil.buildAuthorizations(Collections.singleton(Collections.singleton("AUTH_1")))); + this.queryLogic.preInitialize(this.query, AuthorizationsUtil.buildAuthorizations(Collections.singleton(Collections.singleton("AUTH_1")))); expect(this.queryLogic.getUserOperations()).andReturn(null); this.queryLogic.setPageProcessingStartTime(anyLong()); @@ -397,7 +397,7 @@ public void testCloseConnection_HappyPath() throws Exception { expect(this.queryLogic.isLongRunningQuery()).andReturn(false); expect(this.queryLogic.getResultLimit(eq(this.query))).andReturn(maxResults); expect(this.queryLogic.getMaxResults()).andReturn(maxResults); - this.queryLogic.preInitialize(this.query, WSAuthorizationsUtil.buildAuthorizations(Collections.singleton(Collections.singleton("AUTH_1")))); + this.queryLogic.preInitialize(this.query, AuthorizationsUtil.buildAuthorizations(Collections.singleton(Collections.singleton("AUTH_1")))); expect(this.queryLogic.getUserOperations()).andReturn(null); this.queryLogic.setupQuery(this.genericConfiguration); this.queryMetrics.updateMetric(isA(QueryMetric.class)); @@ -486,7 +486,7 @@ public void testNextWithDnResultLimit_HappyPathUsingDeprecatedConstructor() thro expect(this.queryLogic.getPageByteTrigger()).andReturn(pageByteTrigger).anyTimes(); expect(this.queryLogic.getMaxWork()).andReturn(maxWork).anyTimes(); expect(this.queryLogic.getMaxResults()).andReturn(maxResults).anyTimes(); - this.queryLogic.preInitialize(this.query, WSAuthorizationsUtil.buildAuthorizations(Collections.singleton(Collections.singleton("AUTH_1")))); + this.queryLogic.preInitialize(this.query, AuthorizationsUtil.buildAuthorizations(Collections.singleton(Collections.singleton("AUTH_1")))); expect(this.queryLogic.getUserOperations()).andReturn(null); expect(this.genericConfiguration.getQueryString()).andReturn(query).once(); this.queryLogic.setPageProcessingStartTime(anyLong()); diff --git a/web-services/query/src/test/java/datawave/webservice/query/runner/QueryExecutorBeanTest.java b/web-services/query/src/test/java/datawave/webservice/query/runner/QueryExecutorBeanTest.java index 964dfc83880..29efa06e89c 100644 --- a/web-services/query/src/test/java/datawave/webservice/query/runner/QueryExecutorBeanTest.java +++ b/web-services/query/src/test/java/datawave/webservice/query/runner/QueryExecutorBeanTest.java @@ -61,14 +61,26 @@ import org.xml.sax.SAXException; import com.google.common.collect.HashMultimap; -import com.google.common.collect.Maps; import com.google.common.collect.Multimap; import com.google.common.collect.Sets; import datawave.accumulo.inmemory.InMemoryAccumuloClient; import datawave.accumulo.inmemory.InMemoryInstance; +import datawave.core.common.audit.PrivateAuditConstants; +import datawave.core.common.connection.AccumuloConnectionFactory; +import datawave.core.query.configuration.GenericQueryConfiguration; +import datawave.core.query.logic.BaseQueryLogic; +import datawave.core.query.logic.QueryLogic; +import datawave.core.query.logic.QueryLogicFactory; +import datawave.core.query.predict.QueryPredictor; import datawave.marking.ColumnVisibilitySecurityMarking; import datawave.marking.SecurityMarking; +import datawave.microservice.query.DefaultQueryParameters; +import datawave.microservice.query.Query; +import datawave.microservice.query.QueryImpl; +import datawave.microservice.query.QueryParameters; +import datawave.microservice.query.QueryPersistence; +import datawave.microservice.query.config.QueryExpirationProperties; import datawave.microservice.querymetric.BaseQueryMetric; import datawave.microservice.querymetric.BaseQueryMetric.Lifecycle; import datawave.microservice.querymetric.BaseQueryMetric.Prediction; @@ -87,32 +99,21 @@ import datawave.webservice.common.audit.AuditService; import datawave.webservice.common.audit.Auditor.AuditType; import datawave.webservice.common.audit.DefaultAuditParameterBuilder; -import datawave.webservice.common.audit.PrivateAuditConstants; -import datawave.webservice.common.connection.AccumuloConnectionFactory; import datawave.webservice.common.exception.BadRequestException; import datawave.webservice.common.exception.DatawaveWebApplicationException; -import datawave.webservice.query.Query; -import datawave.webservice.query.QueryImpl; -import datawave.webservice.query.QueryParameters; -import datawave.webservice.query.QueryParametersImpl; -import datawave.webservice.query.QueryPersistence; import datawave.webservice.query.cache.ClosedQueryCache; import datawave.webservice.query.cache.CreatedQueryLogicCacheBean; import datawave.webservice.query.cache.CreatedQueryLogicCacheBean.Triple; import datawave.webservice.query.cache.QueryCache; -import datawave.webservice.query.cache.QueryExpirationConfiguration; import datawave.webservice.query.cache.QueryTraceCache; -import datawave.webservice.query.configuration.GenericQueryConfiguration; import datawave.webservice.query.configuration.LookupUUIDConfiguration; import datawave.webservice.query.exception.DatawaveErrorCode; import datawave.webservice.query.exception.QueryException; import datawave.webservice.query.factory.Persister; -import datawave.webservice.query.logic.BaseQueryLogic; -import datawave.webservice.query.logic.QueryLogic; -import datawave.webservice.query.logic.QueryLogicFactory; import datawave.webservice.query.logic.QueryLogicFactoryImpl; import datawave.webservice.query.metric.QueryMetricsBean; import datawave.webservice.query.result.event.ResponseObjectFactory; +import datawave.webservice.query.util.MapUtils; import datawave.webservice.result.GenericResponse; @RunWith(PowerMockRunner.class) @@ -145,7 +146,7 @@ public class QueryExecutorBeanTest { private AuditService auditService; private QueryMetricsBean metrics; private QueryLogicFactoryImpl queryLogicFactory; - private QueryExpirationConfiguration queryExpirationConf; + private QueryExpirationProperties queryExpirationConf; private Persister persister; private QueryPredictor predictor; private ResponseObjectFactory responseObjectFactory; @@ -179,10 +180,10 @@ public void setup() throws Exception { predictor = createStrictMock(QueryPredictor.class); ctx = createStrictMock(EJBContext.class); qlCache = new CreatedQueryLogicCacheBean(); - queryExpirationConf = new QueryExpirationConfiguration(); - queryExpirationConf.setPageSizeShortCircuitCheckTime(45); - queryExpirationConf.setPageShortCircuitTimeout(58); - queryExpirationConf.setCallTime(60); + queryExpirationConf = new QueryExpirationProperties(); + queryExpirationConf.setShortCircuitCheckTime(45); + queryExpirationConf.setShortCircuitTimeout(58); + queryExpirationConf.setCallTimeout(60); connectionRequestBean = createStrictMock(AccumuloConnectionRequestBean.class); responseObjectFactory = createStrictMock(ResponseObjectFactory.class); setInternalState(auditor, AuditService.class, auditService); @@ -195,7 +196,7 @@ public void setup() throws Exception { setInternalState(bean, AuditBean.class, auditor); setInternalState(bean, QueryMetricsBean.class, metrics); setInternalState(bean, QueryLogicFactory.class, queryLogicFactory); - setInternalState(bean, QueryExpirationConfiguration.class, queryExpirationConf); + setInternalState(bean, QueryExpirationProperties.class, queryExpirationConf); setInternalState(bean, Persister.class, persister); setInternalState(bean, QueryPredictor.class, predictor); setInternalState(bean, EJBContext.class, ctx); @@ -204,7 +205,7 @@ public void setup() throws Exception { setInternalState(bean, Multimap.class, HashMultimap.create()); setInternalState(bean, LookupUUIDConfiguration.class, new LookupUUIDConfiguration()); setInternalState(bean, SecurityMarking.class, new ColumnVisibilitySecurityMarking()); - setInternalState(bean, QueryParameters.class, new QueryParametersImpl()); + setInternalState(bean, QueryParameters.class, new DefaultQueryParameters()); setInternalState(bean, QueryMetricFactory.class, new QueryMetricFactoryImpl()); setInternalState(bean, AccumuloConnectionRequestBean.class, connectionRequestBean); @@ -246,9 +247,9 @@ private MultivaluedMap createNewQueryParameterMap() throws Except p.putSingle(QueryParameters.QUERY_STRING, "foo == 'bar'"); p.putSingle(QueryParameters.QUERY_NAME, "query name"); p.putSingle(QueryParameters.QUERY_AUTHORIZATIONS, StringUtils.join(auths, ",")); - p.putSingle(QueryParameters.QUERY_BEGIN, QueryParametersImpl.formatDate(beginDate)); - p.putSingle(QueryParameters.QUERY_END, QueryParametersImpl.formatDate(endDate)); - p.putSingle(QueryParameters.QUERY_EXPIRATION, QueryParametersImpl.formatDate(expirationDate)); + p.putSingle(QueryParameters.QUERY_BEGIN, DefaultQueryParameters.formatDate(beginDate)); + p.putSingle(QueryParameters.QUERY_END, DefaultQueryParameters.formatDate(endDate)); + p.putSingle(QueryParameters.QUERY_EXPIRATION, DefaultQueryParameters.formatDate(expirationDate)); p.putSingle(QueryParameters.QUERY_NAME, queryName); p.putSingle(QueryParameters.QUERY_PAGESIZE, Integer.toString(pagesize)); p.putSingle(QueryParameters.QUERY_STRING, query); @@ -259,9 +260,8 @@ private MultivaluedMap createNewQueryParameterMap() throws Except } private MultivaluedMap createNewQueryParameters(QueryImpl q, MultivaluedMap p) { - QueryParameters qp = new QueryParametersImpl(); - MultivaluedMap optionalParameters = new MultivaluedMapImpl<>(); - optionalParameters.putAll(qp.getUnknownParameters(p)); + QueryParameters qp = new DefaultQueryParameters(); + MultivaluedMap optionalParameters = MapUtils.toMultivaluedMap(qp.getUnknownParameters(MapUtils.toMultiValueMap(p))); optionalParameters.putSingle(PrivateAuditConstants.USER_DN, userDN.toLowerCase()); optionalParameters.putSingle(PrivateAuditConstants.COLUMN_VISIBILITY, "PRIVATE|PUBLIC"); optionalParameters.putSingle(PrivateAuditConstants.LOGIC_CLASS, q.getQueryLogicName()); @@ -285,10 +285,9 @@ private void defineTestRunner(QueryImpl q, MultivaluedMap p) thro PowerMock.resetAll(); EasyMock.expect(ctx.getCallerPrincipal()).andReturn(principal).anyTimes(); - suppress(constructor(QueryParametersImpl.class)); + suppress(constructor(DefaultQueryParameters.class)); EasyMock.expect(persister.create(principal.getUserDN().subjectDN(), dnList, (SecurityMarking) Whitebox.getField(bean.getClass(), "marking").get(bean), queryLogicName, (QueryParameters) Whitebox.getField(bean.getClass(), "qp").get(bean), optionalParameters)).andReturn(q); - EasyMock.expect(queryLogicFactory.getQueryLogic(queryLogicName, principal)).andReturn(logic); EasyMock.expect(logic.getRequiredQueryParameters()).andReturn(Collections.EMPTY_SET); EasyMock.expect(logic.getConnectionPriority()).andReturn(AccumuloConnectionFactory.Priority.NORMAL); @@ -348,9 +347,9 @@ public void testCreateWithNoSelectedAuths() throws Exception { MultivaluedMap p = new MultivaluedMapImpl<>(); p.putSingle(QueryParameters.QUERY_AUTHORIZATIONS, ""); - p.putSingle(QueryParameters.QUERY_BEGIN, QueryParametersImpl.formatDate(beginDate)); - p.putSingle(QueryParameters.QUERY_END, QueryParametersImpl.formatDate(beginDate)); - p.putSingle(QueryParameters.QUERY_EXPIRATION, QueryParametersImpl.formatDate(expirationDate)); + p.putSingle(QueryParameters.QUERY_BEGIN, DefaultQueryParameters.formatDate(beginDate)); + p.putSingle(QueryParameters.QUERY_END, DefaultQueryParameters.formatDate(beginDate)); + p.putSingle(QueryParameters.QUERY_EXPIRATION, DefaultQueryParameters.formatDate(expirationDate)); p.putSingle(QueryParameters.QUERY_NAME, queryName); p.putSingle(QueryParameters.QUERY_PAGESIZE, Integer.toString(pagesize)); p.putSingle(QueryParameters.QUERY_STRING, query); @@ -360,9 +359,8 @@ public void testCreateWithNoSelectedAuths() throws Exception { InMemoryInstance instance = new InMemoryInstance(); AccumuloClient client = new InMemoryAccumuloClient("root", instance); - QueryParameters qp = new QueryParametersImpl(); - MultivaluedMap optionalParameters = new MultivaluedMapImpl<>(); - optionalParameters.putAll(qp.getUnknownParameters(p)); + QueryParameters qp = new DefaultQueryParameters(); + MultivaluedMap optionalParameters = MapUtils.toMultivaluedMap(qp.getUnknownParameters(MapUtils.toMultiValueMap(p))); DatawaveUser user = new DatawaveUser(SubjectIssuerDNPair.of(userDN, ""), UserType.USER, Arrays.asList(auths), null, null, 0L); @@ -373,7 +371,7 @@ public void testCreateWithNoSelectedAuths() throws Exception { PowerMock.resetAll(); EasyMock.expect(ctx.getCallerPrincipal()).andReturn(principal).anyTimes(); - suppress(constructor(QueryParametersImpl.class)); + suppress(constructor(DefaultQueryParameters.class)); EasyMock.expect(persister.create(userDN, dnList, (SecurityMarking) Whitebox.getField(bean.getClass(), "marking").get(bean), queryLogicName, (QueryParameters) Whitebox.getField(bean.getClass(), "qp").get(bean), optionalParameters)).andReturn(q); @@ -434,7 +432,7 @@ public void testPredict() throws Exception { PowerMock.resetAll(); EasyMock.expect(ctx.getCallerPrincipal()).andReturn(principal).anyTimes(); - suppress(constructor(QueryParametersImpl.class)); + suppress(constructor(DefaultQueryParameters.class)); EasyMock.expect(persister.create(principal.getUserDN().subjectDN(), dnList, (SecurityMarking) Whitebox.getField(bean.getClass(), "marking").get(bean), queryLogicName, (QueryParameters) Whitebox.getField(bean.getClass(), "qp").get(bean), optionalParameters)).andReturn(q); @@ -628,8 +626,8 @@ public void testBeginDateAfterEndDate() throws Exception { final MultivaluedMap queryParameters = createNewQueryParameterMap(); queryParameters.remove(QueryParameters.QUERY_BEGIN); queryParameters.remove(QueryParameters.QUERY_END); - queryParameters.putSingle(QueryParameters.QUERY_BEGIN, QueryParametersImpl.formatDate(beginDate)); - queryParameters.putSingle(QueryParameters.QUERY_END, QueryParametersImpl.formatDate(endDate)); + queryParameters.putSingle(QueryParameters.QUERY_BEGIN, DefaultQueryParameters.formatDate(beginDate)); + queryParameters.putSingle(QueryParameters.QUERY_END, DefaultQueryParameters.formatDate(endDate)); try { queryParameters.putSingle(QueryParameters.QUERY_LOGIC_NAME, "EventQueryLogic"); @@ -684,22 +682,21 @@ public void testCloseActuallyCloses() throws Exception { EasyMock.expect(persister.findById(EasyMock.anyString())).andReturn(null).anyTimes(); EasyMock.expect(responseObjectFactory.getQueryImpl()).andReturn(new QueryImpl()); EasyMock.expect(logic.getResultLimit(anyObject(QueryImpl.class))).andReturn(-1L); - EasyMock.expect(connectionFactory.getTrackingMap(anyObject())).andReturn(Maps.newHashMap()).anyTimes(); + EasyMock.expect(connectionFactory.getTrackingMap(anyObject())).andReturn(null).anyTimes(); BaseQueryMetric metric = new QueryMetricFactoryImpl().createMetric(); metric.populate(q); EasyMock.expectLastCall(); metric.setQueryType(RunningQuery.class.getSimpleName()); metric.setLifecycle(Lifecycle.DEFINED); - System.out.println(metric); Set predictions = new HashSet<>(); predictions.add(new Prediction("source", 1)); EasyMock.expect(predictor.predict(metric)).andReturn(predictions); - connectionRequestBean.requestBegin(q.getId().toString()); + connectionRequestBean.requestBegin(q.getId().toString(), userDN.toLowerCase(), null); EasyMock.expectLastCall(); - EasyMock.expect(connectionFactory.getClient(eq("connPool1"), anyObject(), anyObject())).andReturn(c).anyTimes(); + EasyMock.expect(connectionFactory.getClient(eq(userDN.toLowerCase()), eq(null), eq("connPool1"), anyObject(), anyObject())).andReturn(c).anyTimes(); connectionRequestBean.requestEnd(q.getId().toString()); EasyMock.expectLastCall(); connectionFactory.returnClient(c); @@ -717,7 +714,7 @@ public void testCloseActuallyCloses() throws Exception { logic.preInitialize(q, WSAuthorizationsUtil.buildAuthorizations(Collections.singleton(Sets.newHashSet("PUBLIC", "PRIVATE")))); EasyMock.expect(logic.getUserOperations()).andReturn(null); - EasyMock.expect(connectionRequestBean.cancelConnectionRequest(q.getId().toString(), principal)).andReturn(false).anyTimes(); + EasyMock.expect(connectionRequestBean.cancelConnectionRequest(q.getId().toString(), userDN.toLowerCase())).andReturn(false).anyTimes(); connectionFactory.returnClient(EasyMock.isA(AccumuloClient.class)); final AtomicBoolean initializeLooping = new AtomicBoolean(false); diff --git a/web-services/query/src/test/java/datawave/webservice/query/runner/RunningQueryTest.java b/web-services/query/src/test/java/datawave/webservice/query/runner/RunningQueryTest.java index b67228d7b55..cb3dc873a16 100644 --- a/web-services/query/src/test/java/datawave/webservice/query/runner/RunningQueryTest.java +++ b/web-services/query/src/test/java/datawave/webservice/query/runner/RunningQueryTest.java @@ -32,6 +32,13 @@ import datawave.accumulo.inmemory.InMemoryAccumuloClient; import datawave.accumulo.inmemory.InMemoryInstance; +import datawave.core.common.connection.AccumuloConnectionFactory; +import datawave.core.query.configuration.GenericQueryConfiguration; +import datawave.core.query.logic.BaseQueryLogic; +import datawave.core.query.logic.QueryLogic; +import datawave.core.query.logic.composite.CompositeQueryLogic; +import datawave.microservice.authorization.util.AuthorizationsUtil; +import datawave.microservice.query.QueryImpl; import datawave.microservice.querymetric.QueryMetricFactoryImpl; import datawave.security.authorization.AuthorizationException; import datawave.security.authorization.DatawavePrincipal; @@ -39,15 +46,7 @@ import datawave.security.authorization.DatawaveUser.UserType; import datawave.security.authorization.SubjectIssuerDNPair; import datawave.security.util.DnUtils; -import datawave.security.util.WSAuthorizationsUtil; -import datawave.webservice.common.connection.AccumuloConnectionFactory; -import datawave.webservice.query.QueryImpl; -import datawave.webservice.query.configuration.GenericQueryConfiguration; -import datawave.webservice.query.logic.BaseQueryLogic; -import datawave.webservice.query.logic.DatawaveRoleManager; -import datawave.webservice.query.logic.QueryLogic; import datawave.webservice.query.logic.TestQueryLogic; -import datawave.webservice.query.logic.composite.CompositeQueryLogic; import datawave.webservice.query.logic.composite.CompositeQueryLogicTest; public class RunningQueryTest { @@ -118,7 +117,7 @@ public void testConstructorSetsConnection() throws Exception { expect(logic.isLongRunningQuery()).andReturn(false); expect(logic.getResultLimit(settings)).andReturn(-1L); expect(logic.getMaxResults()).andReturn(-1L); - logic.preInitialize(settings, WSAuthorizationsUtil.buildAuthorizations(null)); + logic.preInitialize(settings, AuthorizationsUtil.buildAuthorizations(null)); expect(logic.getUserOperations()).andReturn(null); replay(logic); @@ -141,7 +140,7 @@ public void testConstructorWithNullConnector() throws Exception { expect(logic.isLongRunningQuery()).andReturn(false); expect(logic.getResultLimit(settings)).andReturn(-1L); expect(logic.getMaxResults()).andReturn(-1L); - logic.preInitialize(settings, WSAuthorizationsUtil.buildAuthorizations(null)); + logic.preInitialize(settings, AuthorizationsUtil.buildAuthorizations(null)); expect(logic.getUserOperations()).andReturn(null); replay(logic); @@ -163,7 +162,7 @@ public void testConstructorShouldNotMergeAuths() throws Exception { Authorizations expected = new Authorizations(auths); expect(logic.getCollectQueryMetrics()).andReturn(false); - logic.preInitialize(settings, WSAuthorizationsUtil.buildAuthorizations(Collections.singleton(Sets.newHashSet("A", "B", "C")))); + logic.preInitialize(settings, AuthorizationsUtil.buildAuthorizations(Collections.singleton(Sets.newHashSet("A", "B", "C")))); expect(logic.getUserOperations()).andReturn(null); replay(logic); @@ -189,21 +188,21 @@ public void testWithCompositeQueryLogic() throws Exception { HashSet roles = new HashSet<>(); roles.add("NONTESTROLE"); logic1.setTableName("thisTable"); - logic1.setRoleManager(new DatawaveRoleManager(roles)); + logic1.setRequiredRoles(roles); CompositeQueryLogicTest.TestQueryLogic2 logic2 = new CompositeQueryLogicTest.TestQueryLogic2(); HashSet roles2 = new HashSet<>(); roles2.add("NONTESTROLE"); logic2.setTableName("thatTable"); - logic2.setRoleManager(new DatawaveRoleManager(roles2)); - logics.put("TestQuery1", logic1); - logics.put("TestQuery2", logic2); + logic2.setRequiredRoles(roles2); + logics.put("TestQueryLogic", logic1); + logics.put("TestQueryLogic2", logic2); CompositeQueryLogic compositeQueryLogic = new CompositeQueryLogic(); compositeQueryLogic.setQueryLogics(logics); DatawaveUser user = new DatawaveUser(userDN, UserType.USER, Arrays.asList(auths), null, null, 0L); DatawavePrincipal principal = new DatawavePrincipal(Collections.singletonList(user)); - compositeQueryLogic.setPrincipal(principal); + compositeQueryLogic.setCurrentUser(principal); try { RunningQuery query = new RunningQuery(client, connectionPriority, compositeQueryLogic, settings, null, principal, new QueryMetricFactoryImpl()); } catch (NullPointerException npe) { diff --git a/web-services/query/src/test/java/datawave/webservice/query/util/LookupUUIDUtilTest.java b/web-services/query/src/test/java/datawave/webservice/query/util/LookupUUIDUtilTest.java index b61eec6caf8..ea6ba2e886a 100644 --- a/web-services/query/src/test/java/datawave/webservice/query/util/LookupUUIDUtilTest.java +++ b/web-services/query/src/test/java/datawave/webservice/query/util/LookupUUIDUtilTest.java @@ -17,12 +17,12 @@ import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; +import datawave.core.query.logic.QueryLogicFactory; +import datawave.microservice.query.Query; +import datawave.microservice.query.QueryImpl; import datawave.query.data.UUIDType; import datawave.security.authorization.UserOperations; -import datawave.webservice.query.Query; -import datawave.webservice.query.QueryImpl; import datawave.webservice.query.configuration.LookupUUIDConfiguration; -import datawave.webservice.query.logic.QueryLogicFactory; import datawave.webservice.query.result.event.ResponseObjectFactory; import datawave.webservice.query.runner.QueryExecutor; diff --git a/web-services/query/src/test/java/datawave/webservice/query/util/QueryUtilTest.java b/web-services/query/src/test/java/datawave/webservice/query/util/QueryUtilTest.java index 56874dbad22..ed52bad46b6 100644 --- a/web-services/query/src/test/java/datawave/webservice/query/util/QueryUtilTest.java +++ b/web-services/query/src/test/java/datawave/webservice/query/util/QueryUtilTest.java @@ -14,9 +14,10 @@ import com.google.protobuf.InvalidProtocolBufferException; -import datawave.webservice.query.Query; -import datawave.webservice.query.QueryImpl; -import datawave.webservice.query.QueryImpl.Parameter; +import datawave.core.query.util.QueryUtil; +import datawave.microservice.query.Query; +import datawave.microservice.query.QueryImpl; +import datawave.microservice.query.QueryImpl.Parameter; public class QueryUtilTest { diff --git a/web-services/query/src/test/resources/TestConfiguredQueryLogicFactory.xml b/web-services/query/src/test/resources/TestConfiguredQueryLogicFactory.xml index 075857b342f..c042a99cdb6 100644 --- a/web-services/query/src/test/resources/TestConfiguredQueryLogicFactory.xml +++ b/web-services/query/src/test/resources/TestConfiguredQueryLogicFactory.xml @@ -46,7 +46,6 @@ - @@ -59,12 +58,5 @@ - - - - - - - - \ No newline at end of file + diff --git a/web-services/query/src/test/resources/TestQueryLogicFactory.xml b/web-services/query/src/test/resources/TestQueryLogicFactory.xml index f1bab540051..a7391d162e0 100644 --- a/web-services/query/src/test/resources/TestQueryLogicFactory.xml +++ b/web-services/query/src/test/resources/TestQueryLogicFactory.xml @@ -21,32 +21,17 @@ - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/web-services/rest-api/pom.xml b/web-services/rest-api/pom.xml index e759765db33..5fa492c4279 100644 --- a/web-services/rest-api/pom.xml +++ b/web-services/rest-api/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 6.14.0-SNAPSHOT + 7.0.0-SNAPSHOT datawave-ws-rest-api war diff --git a/web-services/rest-api/src/main/webapp/WEB-INF/web.xml b/web-services/rest-api/src/main/webapp/WEB-INF/web.xml index 0279c35a755..e6ee4223324 100644 --- a/web-services/rest-api/src/main/webapp/WEB-INF/web.xml +++ b/web-services/rest-api/src/main/webapp/WEB-INF/web.xml @@ -54,7 +54,7 @@ datawave.configuration.ConfigurationBean, - datawave.webservice.common.cache.AccumuloTableCache, + datawave.webservice.common.cache.AccumuloTableCacheBean, datawave.webservice.common.connection.AccumuloConnectionFactoryBean, datawave.webservice.common.audit.AuditBean, datawave.webservice.common.health.HealthBean, diff --git a/web-services/security/pom.xml b/web-services/security/pom.xml index 6743dc68b15..d4cc3923f57 100644 --- a/web-services/security/pom.xml +++ b/web-services/security/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 6.14.0-SNAPSHOT + 7.0.0-SNAPSHOT datawave-ws-security ejb @@ -100,6 +100,10 @@ org.powermock powermock-module-junit4 + + org.powermock + powermock-reflect + com.fasterxml.jackson.module jackson-module-jaxb-annotations @@ -261,9 +265,6 @@ true src/main/resources - - source-templates/** - test-classes @@ -307,45 +308,6 @@ - - maven-resources-plugin - - - copy-templated-sources - - copy-resources - - validate - - ${project.build.directory}/generated-sources/templated-sources - - - src/main/resources/source-templates - true - - - - - - - - org.codehaus.mojo - build-helper-maven-plugin - - - add-source - - add-source - - generate-sources - - - target/generated-sources/templated-sources - - - - - diff --git a/web-services/security/src/main/java/datawave/security/authorization/remote/ConditionalRemoteUserOperations.java b/web-services/security/src/main/java/datawave/security/authorization/remote/ConditionalRemoteUserOperations.java deleted file mode 100644 index 788407c714c..00000000000 --- a/web-services/security/src/main/java/datawave/security/authorization/remote/ConditionalRemoteUserOperations.java +++ /dev/null @@ -1,109 +0,0 @@ -package datawave.security.authorization.remote; - -import java.util.Collections; -import java.util.function.Function; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import datawave.security.authorization.AuthorizationException; -import datawave.security.authorization.DatawavePrincipal; -import datawave.security.authorization.UserOperations; -import datawave.user.AuthorizationsListBase; -import datawave.webservice.query.result.event.ResponseObjectFactory; -import datawave.webservice.result.GenericResponse; - -/** - * A conditional remote user operations will only invoke the delegate remote service base on a specified function of the specified principal. For example we may - * only need to invoke the remote user operations if we know the remote system will have additional auths that this user will need for the query logic being - * invoked. - * - * An example may be a composite query that call a local and a remote query logic. Perhaps we can already tell that the user will not be able to get any - * additional authorities from the remote system and hence the remote call will not be required. - */ -public class ConditionalRemoteUserOperations implements UserOperations { - private static final Logger log = LoggerFactory.getLogger(ConditionalRemoteUserOperations.class); - - private UserOperations delegate; - private Function condition; - private ResponseObjectFactory responseObjectFactory; - - private static final GenericResponse EMPTY_RESPONSE = new GenericResponse<>(); - - public boolean isFiltered(DatawavePrincipal principal) { - if (!condition.apply(principal)) { - if (log.isDebugEnabled()) { - log.debug("Filter " + condition + " blocking " + principal.getName() + " from " + delegate + " user operations"); - } - return true; - } else { - if (log.isDebugEnabled()) { - log.debug("Passing through filter " + condition + " for " + principal.getName() + " for " + delegate + " user operations"); - } - return false; - } - } - - @Override - public AuthorizationsListBase listEffectiveAuthorizations(Object callerObject) throws AuthorizationException { - assert (delegate != null); - assert (condition != null); - assert (responseObjectFactory != null); - - final DatawavePrincipal principal = getDatawavePrincipal(callerObject); - - if (!isFiltered(principal)) { - return delegate.listEffectiveAuthorizations(callerObject); - } else { - AuthorizationsListBase response = responseObjectFactory.getAuthorizationsList(); - response.setUserAuths(principal.getUserDN().subjectDN(), principal.getUserDN().issuerDN(), Collections.EMPTY_LIST); - return response; - } - } - - @Override - public GenericResponse flushCachedCredentials(Object callerObject) throws AuthorizationException { - assert (delegate != null); - assert (condition != null); - assert (responseObjectFactory != null); - - final DatawavePrincipal principal = getDatawavePrincipal(callerObject); - - if (!isFiltered(principal)) { - return delegate.flushCachedCredentials(callerObject); - } else { - return EMPTY_RESPONSE; - } - } - - private DatawavePrincipal getDatawavePrincipal(Object callerObject) { - if (callerObject instanceof DatawavePrincipal) { - return (DatawavePrincipal) callerObject; - } - throw new RuntimeException("Cannot handle a " + callerObject.getClass() + ". Only DatawavePrincipal is accepted"); - } - - public UserOperations getDelegate() { - return delegate; - } - - public void setDelegate(UserOperations delegate) { - this.delegate = delegate; - } - - public Function getCondition() { - return condition; - } - - public void setCondition(Function condition) { - this.condition = condition; - } - - public ResponseObjectFactory getResponseObjectFactory() { - return responseObjectFactory; - } - - public void setResponseObjectFactory(ResponseObjectFactory responseObjectFactory) { - this.responseObjectFactory = responseObjectFactory; - } -} diff --git a/web-services/security/src/main/java/datawave/security/authorization/remote/RemoteUserOperationsImpl.java b/web-services/security/src/main/java/datawave/security/authorization/remote/RemoteUserOperationsImpl.java index 50f4a8cf544..42772e14c27 100644 --- a/web-services/security/src/main/java/datawave/security/authorization/remote/RemoteUserOperationsImpl.java +++ b/web-services/security/src/main/java/datawave/security/authorization/remote/RemoteUserOperationsImpl.java @@ -14,6 +14,7 @@ import datawave.security.auth.DatawaveAuthenticationMechanism; import datawave.security.authorization.AuthorizationException; import datawave.security.authorization.DatawavePrincipal; +import datawave.security.authorization.ProxiedUserDetails; import datawave.security.authorization.UserOperations; import datawave.user.AuthorizationsListBase; import datawave.webservice.common.remote.RemoteHttpService; @@ -51,14 +52,14 @@ public void init() { @Override @Cacheable(value = "getRemoteUser", key = "{#principal}", cacheManager = "remoteOperationsCacheManager") - public DatawavePrincipal getRemoteUser(DatawavePrincipal principal) throws AuthorizationException { - log.info("Cache fault: Retrieving user for " + principal.getPrimaryUser().getDn()); - return UserOperations.super.getRemoteUser(principal); + public ProxiedUserDetails getRemoteUser(ProxiedUserDetails currentUser) throws AuthorizationException { + log.info("Cache fault: Retrieving user for " + currentUser.getPrimaryUser().getDn()); + return UserOperations.super.getRemoteUser(currentUser); } @Override @Cacheable(value = "listEffectiveAuthorizations", key = "{#callerObject}", cacheManager = "remoteOperationsCacheManager") - public AuthorizationsListBase listEffectiveAuthorizations(Object callerObject) throws AuthorizationException { + public AuthorizationsListBase listEffectiveAuthorizations(ProxiedUserDetails callerObject) throws AuthorizationException { init(); final DatawavePrincipal principal = getDatawavePrincipal(callerObject); log.info("Cache fault: Retrieving effective auths for " + principal.getPrimaryUser().getDn()); @@ -76,7 +77,7 @@ public AuthorizationsListBase listEffectiveAuthorizations(Object callerObject) t } @Override - public GenericResponse flushCachedCredentials(Object callerObject) throws AuthorizationException { + public GenericResponse flushCachedCredentials(ProxiedUserDetails callerObject) throws AuthorizationException { init(); final DatawavePrincipal principal = getDatawavePrincipal(callerObject); final String suffix = FLUSH_CREDS; diff --git a/web-services/security/src/main/java/datawave/security/authorization/test/TestDatawaveUserService.java b/web-services/security/src/main/java/datawave/security/authorization/test/TestDatawaveUserService.java index 4b5ba7d542d..ae8d4ecbcdc 100644 --- a/web-services/security/src/main/java/datawave/security/authorization/test/TestDatawaveUserService.java +++ b/web-services/security/src/main/java/datawave/security/authorization/test/TestDatawaveUserService.java @@ -32,13 +32,13 @@ import datawave.configuration.RefreshableScope; import datawave.configuration.spring.SpringBean; +import datawave.core.common.connection.AccumuloConnectionFactory; import datawave.security.authorization.AuthorizationException; import datawave.security.authorization.CachedDatawaveUserService; import datawave.security.authorization.DatawaveUser; import datawave.security.authorization.DatawaveUserInfo; import datawave.security.authorization.DatawaveUserService; import datawave.security.authorization.SubjectIssuerDNPair; -import datawave.webservice.common.connection.AccumuloConnectionFactory; import datawave.webservice.util.NotEqualPropertyExpressionInterpreter; /** @@ -206,7 +206,7 @@ protected void readTestUsers() { protected List readAccumuloAuthorizations() { try { - AccumuloClient client = accumuloConnectionFactory.getClient(null, AccumuloConnectionFactory.Priority.ADMIN, new HashMap<>()); + AccumuloClient client = accumuloConnectionFactory.getClient(null, null, AccumuloConnectionFactory.Priority.ADMIN, new HashMap<>()); Authorizations auths = client.securityOperations().getUserAuthorizations(client.whoami()); return Arrays.asList(auths.toString().split("\\s*,\\s*")); } catch (Exception e) { diff --git a/web-services/security/src/main/java/datawave/security/cache/CredentialsCacheBean.java b/web-services/security/src/main/java/datawave/security/cache/CredentialsCacheBean.java index 2d86e03e6d5..45b1fb1e725 100644 --- a/web-services/security/src/main/java/datawave/security/cache/CredentialsCacheBean.java +++ b/web-services/security/src/main/java/datawave/security/cache/CredentialsCacheBean.java @@ -39,13 +39,13 @@ import datawave.configuration.ConfigurationEvent; import datawave.configuration.DatawaveEmbeddedProjectStageHolder; import datawave.configuration.RefreshLifecycle; +import datawave.core.common.connection.AccumuloConnectionFactory; import datawave.security.DnList; import datawave.security.authorization.CachedDatawaveUserService; import datawave.security.authorization.DatawavePrincipal; import datawave.security.authorization.DatawaveUser; import datawave.security.authorization.DatawaveUserInfo; import datawave.security.system.AuthorizationCache; -import datawave.webservice.common.connection.AccumuloConnectionFactory; import datawave.webservice.common.exception.DatawaveWebApplicationException; import datawave.webservice.query.exception.QueryException; import datawave.webservice.result.GenericResponse; @@ -311,7 +311,7 @@ public GenericResponse reloadAccumuloAuthorizations() { private void retrieveAccumuloAuthorizations() throws Exception { Map trackingMap = accumuloConnectionFactory.getTrackingMap(Thread.currentThread().getStackTrace()); - AccumuloClient c = accumuloConnectionFactory.getClient(AccumuloConnectionFactory.Priority.ADMIN, trackingMap); + AccumuloClient c = accumuloConnectionFactory.getClient(null, null, AccumuloConnectionFactory.Priority.ADMIN, trackingMap); try { Authorizations auths = c.securityOperations().getUserAuthorizations(c.whoami()); HashSet authSet = new HashSet<>(); diff --git a/web-services/security/src/main/java/datawave/security/user/UserOperationsBean.java b/web-services/security/src/main/java/datawave/security/user/UserOperationsBean.java index 3b3a380c34f..e31df6d475a 100644 --- a/web-services/security/src/main/java/datawave/security/user/UserOperationsBean.java +++ b/web-services/security/src/main/java/datawave/security/user/UserOperationsBean.java @@ -28,6 +28,7 @@ import datawave.configuration.spring.SpringBean; import datawave.security.authorization.DatawavePrincipal; import datawave.security.authorization.DatawaveUser; +import datawave.security.authorization.ProxiedUserDetails; import datawave.security.authorization.UserOperations; import datawave.security.cache.CredentialsCacheBean; import datawave.security.util.WSAuthorizationsUtil; @@ -53,7 +54,6 @@ public class UserOperationsBean implements UserOperations { private CredentialsCacheBean credentialsCache; @Inject - @SpringBean(name = "ResponseObjectFactory") private ResponseObjectFactory responseObjectFactory; @Inject @@ -116,7 +116,7 @@ public AuthorizationsListBase listEffectiveAuthorizations(@DefaultValue("true") } @Override - public AuthorizationsListBase listEffectiveAuthorizations(Object p) { + public AuthorizationsListBase listEffectiveAuthorizations(ProxiedUserDetails p) { return listEffectiveAuthorizations(p, true); } @@ -175,15 +175,15 @@ private AuthorizationsListBase listEffectiveAuthorizations(Object p, boolean inc "application/x-protostuff"}) @PermitAll public GenericResponse flushCachedCredentials(@DefaultValue("true") @QueryParam("includeRemoteServices") boolean includeRemoteServices) { - return flushCachedCredentials(context.getCallerPrincipal(), includeRemoteServices); + return flushCachedCredentials((ProxiedUserDetails) context.getCallerPrincipal(), includeRemoteServices); } @Override - public GenericResponse flushCachedCredentials(Object callerPrincipal) { + public GenericResponse flushCachedCredentials(ProxiedUserDetails callerPrincipal) { return flushCachedCredentials(callerPrincipal, true); } - private GenericResponse flushCachedCredentials(Object callerPrincipal, boolean includeRemoteServices) { + private GenericResponse flushCachedCredentials(ProxiedUserDetails callerPrincipal, boolean includeRemoteServices) { GenericResponse response = new GenericResponse<>(); log.info("Flushing credentials for " + callerPrincipal + " from the cache."); diff --git a/web-services/security/src/test/java/datawave/security/authorization/remote/ConditionalRemoteUserOperationsTest.java b/web-services/security/src/test/java/datawave/security/authorization/remote/ConditionalRemoteUserOperationsTest.java index 7a3c3af3b8b..bac8351be2e 100644 --- a/web-services/security/src/test/java/datawave/security/authorization/remote/ConditionalRemoteUserOperationsTest.java +++ b/web-services/security/src/test/java/datawave/security/authorization/remote/ConditionalRemoteUserOperationsTest.java @@ -3,15 +3,19 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.List; +import java.util.function.Supplier; import org.junit.Test; import org.wildfly.common.Assert; import com.google.common.collect.HashMultimap; +import datawave.microservice.query.Query; import datawave.security.authorization.AuthorizationException; +import datawave.security.authorization.ConditionalRemoteUserOperations; import datawave.security.authorization.DatawavePrincipal; import datawave.security.authorization.DatawaveUser; +import datawave.security.authorization.ProxiedUserDetails; import datawave.security.authorization.SubjectIssuerDNPair; import datawave.security.authorization.UserOperations; import datawave.user.AuthorizationsListBase; @@ -20,7 +24,6 @@ import datawave.webservice.dictionary.data.DescriptionBase; import datawave.webservice.dictionary.data.FieldsBase; import datawave.webservice.metadata.MetadataFieldBase; -import datawave.webservice.query.Query; import datawave.webservice.query.cachedresults.CacheableQueryRow; import datawave.webservice.query.result.EdgeQueryResponseBase; import datawave.webservice.query.result.edge.EdgeBase; @@ -41,13 +44,13 @@ private static class MockRemoteUserOperations implements UserOperations { boolean invoked = false; @Override - public AuthorizationsListBase listEffectiveAuthorizations(Object callerObject) throws AuthorizationException { + public AuthorizationsListBase listEffectiveAuthorizations(ProxiedUserDetails callerObject) throws AuthorizationException { invoked = true; return new DefaultAuthorizationsList(); } @Override - public GenericResponse flushCachedCredentials(Object callerObject) { + public GenericResponse flushCachedCredentials(ProxiedUserDetails callerObject) { invoked = true; return new GenericResponse<>(); } @@ -58,7 +61,7 @@ public void testConditional() throws AuthorizationException { MockRemoteUserOperations testOperations = new MockRemoteUserOperations(); ConditionalRemoteUserOperations testObj = new ConditionalRemoteUserOperations(); testObj.setDelegate(testOperations); - testObj.setResponseObjectFactory(new MockResponseObjectFactory()); + testObj.setAuthorizationsListBaseSupplier(() -> new MockResponseObjectFactory().getAuthorizationsList()); testObj.setCondition(a -> a.getProxiedUsers().size() == 1); List users = new ArrayList<>(); diff --git a/web-services/security/src/test/java/datawave/security/authorization/remote/RemoteUserOperationsImplHttpTest.java b/web-services/security/src/test/java/datawave/security/authorization/remote/RemoteUserOperationsImplHttpTest.java index b9f51f47596..a742f5a1c20 100644 --- a/web-services/security/src/test/java/datawave/security/authorization/remote/RemoteUserOperationsImplHttpTest.java +++ b/web-services/security/src/test/java/datawave/security/authorization/remote/RemoteUserOperationsImplHttpTest.java @@ -30,6 +30,7 @@ import com.sun.net.httpserver.HttpHandler; import com.sun.net.httpserver.HttpServer; +import datawave.microservice.query.Query; import datawave.security.authorization.DatawavePrincipal; import datawave.security.util.DnUtils; import datawave.user.AuthorizationsListBase; @@ -40,7 +41,6 @@ import datawave.webservice.dictionary.data.DescriptionBase; import datawave.webservice.dictionary.data.FieldsBase; import datawave.webservice.metadata.MetadataFieldBase; -import datawave.webservice.query.Query; import datawave.webservice.query.cachedresults.CacheableQueryRow; import datawave.webservice.query.result.EdgeQueryResponseBase; import datawave.webservice.query.result.edge.EdgeBase; diff --git a/web-services/security/src/test/java/datawave/security/authorization/test/TestDatawaveUserServiceTest.java b/web-services/security/src/test/java/datawave/security/authorization/test/TestDatawaveUserServiceTest.java index 519625aeffe..9d138e37deb 100644 --- a/web-services/security/src/test/java/datawave/security/authorization/test/TestDatawaveUserServiceTest.java +++ b/web-services/security/src/test/java/datawave/security/authorization/test/TestDatawaveUserServiceTest.java @@ -6,6 +6,7 @@ import java.util.Collection; import java.util.Collections; import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.stream.Collectors; @@ -28,6 +29,8 @@ import datawave.accumulo.inmemory.InMemoryAccumuloClient; import datawave.accumulo.inmemory.InMemoryInstance; +import datawave.core.common.connection.AccumuloConnectionFactory; +import datawave.core.common.result.ConnectionPool; import datawave.security.authorization.AuthorizationException; import datawave.security.authorization.CachedDatawaveUserService; import datawave.security.authorization.DatawaveUser; @@ -35,7 +38,6 @@ import datawave.security.authorization.DatawaveUserInfo; import datawave.security.authorization.DatawaveUserService; import datawave.security.authorization.SubjectIssuerDNPair; -import datawave.webservice.common.connection.AccumuloConnectionFactory; @RunWith(Enclosed.class) public class TestDatawaveUserServiceTest { @@ -241,28 +243,44 @@ public MockAccumuloConnectionFactory() { } @Override - public String getConnectionUserName(String poolName) { - return "test"; + public AccumuloClient getClient(String userDN, Collection proxiedDNs, Priority priority, Map trackingMap) throws Exception { + return new InMemoryAccumuloClient("root", inMemoryInstance); } @Override - public AccumuloClient getClient(Priority priority, Map trackingMap) throws Exception { + public AccumuloClient getClient(String userDN, Collection proxiedDNs, String poolName, Priority priority, Map trackingMap) + throws Exception { return new InMemoryAccumuloClient("root", inMemoryInstance); } @Override - public AccumuloClient getClient(String poolName, Priority priority, Map trackingMap) throws Exception { - return new InMemoryAccumuloClient("root", inMemoryInstance); + public void returnClient(AccumuloClient client) { + } @Override - public void returnClient(AccumuloClient client) { + public String report() { + return null; + } + + @Override + public List getConnectionPools() { + return null; + } + @Override + public int getConnectionUsagePercent() { + return 0; } @Override public Map getTrackingMap(StackTraceElement[] stackTrace) { return new HashMap<>(); } + + @Override + public void close() throws Exception { + + } } } diff --git a/web-services/security/src/test/java/datawave/security/cache/CredentialsCacheBeanTest.java b/web-services/security/src/test/java/datawave/security/cache/CredentialsCacheBeanTest.java index b1fd6699a53..3a3d7c6c7c6 100644 --- a/web-services/security/src/test/java/datawave/security/cache/CredentialsCacheBeanTest.java +++ b/web-services/security/src/test/java/datawave/security/cache/CredentialsCacheBeanTest.java @@ -7,8 +7,10 @@ import java.security.Principal; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.Collections; import java.util.HashSet; +import java.util.List; import java.util.Map; import java.util.Set; @@ -33,13 +35,14 @@ import com.google.common.collect.Lists; import datawave.configuration.spring.BeanProvider; +import datawave.core.common.connection.AccumuloConnectionFactory; +import datawave.core.common.result.ConnectionPool; import datawave.security.DnList; import datawave.security.authorization.DatawavePrincipal; import datawave.security.authorization.DatawaveUser; import datawave.security.authorization.DatawaveUser.UserType; import datawave.security.authorization.SubjectIssuerDNPair; import datawave.security.system.AuthorizationCache; -import datawave.webservice.common.connection.AccumuloConnectionFactory; @RunWith(Arquillian.class) public class CredentialsCacheBeanTest { @@ -170,28 +173,43 @@ public Set getCachedKeys() { private static class MockAccumuloConnectionFactory implements AccumuloConnectionFactory { @Override - public String getConnectionUserName(String poolName) { + public AccumuloClient getClient(String userDN, Collection proxiedDNs, Priority priority, Map trackingMap) { return null; } @Override - public AccumuloClient getClient(Priority priority, Map trackingMap) { + public AccumuloClient getClient(String userDN, Collection proxiedDNs, String poolName, Priority priority, Map trackingMap) { return null; } @Override - public AccumuloClient getClient(String poolName, Priority priority, Map trackingMap) { + public void returnClient(AccumuloClient client) { + + } + + @Override + public String report() { return null; } @Override - public void returnClient(AccumuloClient client) { + public List getConnectionPools() { + return null; + } + @Override + public int getConnectionUsagePercent() { + return 0; } @Override public Map getTrackingMap(StackTraceElement[] stackTrace) { return null; } + + @Override + public void close() throws Exception { + + } } } diff --git a/web-services/web-root/pom.xml b/web-services/web-root/pom.xml index cba4bae2e02..d6c269e5220 100644 --- a/web-services/web-root/pom.xml +++ b/web-services/web-root/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 6.14.0-SNAPSHOT + 7.0.0-SNAPSHOT datawave-ws-web-root war From ee894a416913bedd9ed9c59a2d88e16a657764ed Mon Sep 17 00:00:00 2001 From: Whitney O'Meara Date: Mon, 20 May 2024 16:45:50 +0000 Subject: [PATCH 05/20] update submodule commits --- contrib/datawave-utils | 2 +- core/base-rest-responses | 2 +- core/in-memory-accumulo | 2 +- core/metrics-reporter | 2 +- core/utils/accumulo-utils | 2 +- core/utils/common-utils | 2 +- core/utils/metadata-utils | 2 +- core/utils/type-utils | 2 +- microservices/microservice-parent | 2 +- microservices/microservice-service-parent | 2 +- microservices/services/accumulo | 2 +- microservices/services/audit | 2 +- microservices/services/authorization | 2 +- microservices/services/config | 2 +- microservices/services/dictionary | 2 +- microservices/services/hazelcast | 2 +- microservices/services/mapreduce-query | 2 +- microservices/services/modification | 2 +- microservices/services/query | 2 +- microservices/services/query-executor | 2 +- microservices/services/query-metric | 2 +- microservices/starters/audit | 2 +- microservices/starters/cache | 2 +- microservices/starters/cached-results | 2 +- microservices/starters/datawave | 2 +- microservices/starters/metadata | 2 +- microservices/starters/query | 2 +- microservices/starters/query-metric | 2 +- 28 files changed, 28 insertions(+), 28 deletions(-) diff --git a/contrib/datawave-utils b/contrib/datawave-utils index 8438bce9c0b..4348fc36a35 160000 --- a/contrib/datawave-utils +++ b/contrib/datawave-utils @@ -1 +1 @@ -Subproject commit 8438bce9c0ba7f163704838d52927d547ee6efcf +Subproject commit 4348fc36a3519ca9e5d1b96ac47c3f0b64abe34b diff --git a/core/base-rest-responses b/core/base-rest-responses index f0941b049b0..9f64203a210 160000 --- a/core/base-rest-responses +++ b/core/base-rest-responses @@ -1 +1 @@ -Subproject commit f0941b049b0e1a6cef6dd82dd9f2fb86fbefbe78 +Subproject commit 9f64203a2109dd43cc0b423713ce388e7fa6c46d diff --git a/core/in-memory-accumulo b/core/in-memory-accumulo index ab81e784581..1999eb55b63 160000 --- a/core/in-memory-accumulo +++ b/core/in-memory-accumulo @@ -1 +1 @@ -Subproject commit ab81e784581d6cef04622fbf8f9a689d5aa4b616 +Subproject commit 1999eb55b635d97b098b00fe5b7a77a9729a4bae diff --git a/core/metrics-reporter b/core/metrics-reporter index cdf1d8d474a..5f3b44493cc 160000 --- a/core/metrics-reporter +++ b/core/metrics-reporter @@ -1 +1 @@ -Subproject commit cdf1d8d474a98faa592c9acac8e7817f1faf2af2 +Subproject commit 5f3b44493ccefd2a39bf365a9324496f1b02c40c diff --git a/core/utils/accumulo-utils b/core/utils/accumulo-utils index 087a6590bb8..9d6c66e5ae9 160000 --- a/core/utils/accumulo-utils +++ b/core/utils/accumulo-utils @@ -1 +1 @@ -Subproject commit 087a6590bb8d0e0b6c783ce5f22db242a42480ee +Subproject commit 9d6c66e5ae906653137b6dd7a9a4671c844c5653 diff --git a/core/utils/common-utils b/core/utils/common-utils index 69dee4a5391..a571f462c67 160000 --- a/core/utils/common-utils +++ b/core/utils/common-utils @@ -1 +1 @@ -Subproject commit 69dee4a5391895297247e261b00f98cee7833339 +Subproject commit a571f462c67f7abd9d4d7ba85b4b114c5c0b6aae diff --git a/core/utils/metadata-utils b/core/utils/metadata-utils index 1c90817bb03..570d8a933c1 160000 --- a/core/utils/metadata-utils +++ b/core/utils/metadata-utils @@ -1 +1 @@ -Subproject commit 1c90817bb03f491b3dd24c7ade2cbb3c6bf605cc +Subproject commit 570d8a933c1fc39d2f7a7a9735f0dcd51e97e557 diff --git a/core/utils/type-utils b/core/utils/type-utils index f821e78309b..45badd393b6 160000 --- a/core/utils/type-utils +++ b/core/utils/type-utils @@ -1 +1 @@ -Subproject commit f821e78309b6331024fda04afcc09a84f34c1e3f +Subproject commit 45badd393b6db2e2c48a03943335f0527e0f3c3f diff --git a/microservices/microservice-parent b/microservices/microservice-parent index df0eccee14c..9b9cc24e9e4 160000 --- a/microservices/microservice-parent +++ b/microservices/microservice-parent @@ -1 +1 @@ -Subproject commit df0eccee14c4c906c14679fad4948be915a5ae69 +Subproject commit 9b9cc24e9e49159e1d5130cc4267f7cd43d3999f diff --git a/microservices/microservice-service-parent b/microservices/microservice-service-parent index ebf58213c95..c7007341a45 160000 --- a/microservices/microservice-service-parent +++ b/microservices/microservice-service-parent @@ -1 +1 @@ -Subproject commit ebf58213c955214329faeb1efe85bb07e05ded8c +Subproject commit c7007341a458c18aab21914093e043dc4e0ea45c diff --git a/microservices/services/accumulo b/microservices/services/accumulo index d8586d45996..ce0008ee183 160000 --- a/microservices/services/accumulo +++ b/microservices/services/accumulo @@ -1 +1 @@ -Subproject commit d8586d459969504430779b4c958ec20f6fc2f5c2 +Subproject commit ce0008ee18387fd09f8c23daf9f85ec69485322c diff --git a/microservices/services/audit b/microservices/services/audit index 94eb6ba81e0..f2263032929 160000 --- a/microservices/services/audit +++ b/microservices/services/audit @@ -1 +1 @@ -Subproject commit 94eb6ba81e06d348ecf5a01b4c0e19bc8cd8d2ca +Subproject commit f2263032929489bdce47fbfa150f8f9d7cff5dd4 diff --git a/microservices/services/authorization b/microservices/services/authorization index 5c92e1b7665..77b999a68de 160000 --- a/microservices/services/authorization +++ b/microservices/services/authorization @@ -1 +1 @@ -Subproject commit 5c92e1b766587b27358b5a695103475d7f1270b3 +Subproject commit 77b999a68de843cc514bd9e332331a4d0d9eabd1 diff --git a/microservices/services/config b/microservices/services/config index 25e03153aa6..9ba7c7c877e 160000 --- a/microservices/services/config +++ b/microservices/services/config @@ -1 +1 @@ -Subproject commit 25e03153aa6a2dbf0b7496ec12f9ea2d1bb7eee4 +Subproject commit 9ba7c7c877e4b3b03e1d5f044d3a14b6232aa007 diff --git a/microservices/services/dictionary b/microservices/services/dictionary index d09519d054e..bc520ef9c11 160000 --- a/microservices/services/dictionary +++ b/microservices/services/dictionary @@ -1 +1 @@ -Subproject commit d09519d054e759760f5c1044ed3b19c53ab3470f +Subproject commit bc520ef9c11160e6aeba29c1b59b9666a011ec50 diff --git a/microservices/services/hazelcast b/microservices/services/hazelcast index 51679472b7f..3ddfa15b393 160000 --- a/microservices/services/hazelcast +++ b/microservices/services/hazelcast @@ -1 +1 @@ -Subproject commit 51679472b7fbd6fc73aec09d8c3397ea75f8dc88 +Subproject commit 3ddfa15b3933fd061e72a3e19cd85168ee2bdd6e diff --git a/microservices/services/mapreduce-query b/microservices/services/mapreduce-query index deefe1ebd6c..bdbd80ffd63 160000 --- a/microservices/services/mapreduce-query +++ b/microservices/services/mapreduce-query @@ -1 +1 @@ -Subproject commit deefe1ebd6c7450d65af478526f97ea5dc3df8ff +Subproject commit bdbd80ffd6380bc63ce83a8feda7fa0cec7c4939 diff --git a/microservices/services/modification b/microservices/services/modification index c2791ee6061..140d6534ca5 160000 --- a/microservices/services/modification +++ b/microservices/services/modification @@ -1 +1 @@ -Subproject commit c2791ee6061d811768c9d315e1574dd573271c2c +Subproject commit 140d6534ca5e7f7fcfe1f571953cb009a18058d2 diff --git a/microservices/services/query b/microservices/services/query index 85cd4e8ec57..1bb3158ce57 160000 --- a/microservices/services/query +++ b/microservices/services/query @@ -1 +1 @@ -Subproject commit 85cd4e8ec57a8e1f9eb5a2870959b08d3b39f058 +Subproject commit 1bb3158ce575dd8cd95aed984fde7d49b59afb48 diff --git a/microservices/services/query-executor b/microservices/services/query-executor index 2970bd342c2..50dc2eab150 160000 --- a/microservices/services/query-executor +++ b/microservices/services/query-executor @@ -1 +1 @@ -Subproject commit 2970bd342c28cd20ed93784e1da74cca601e0d23 +Subproject commit 50dc2eab1505472eeea14c578503e86311f63e90 diff --git a/microservices/services/query-metric b/microservices/services/query-metric index 251b0836642..4049b6489f1 160000 --- a/microservices/services/query-metric +++ b/microservices/services/query-metric @@ -1 +1 @@ -Subproject commit 251b0836642ef2aab454b4278429bb7318e7fd8c +Subproject commit 4049b6489f187743ef77d665e9c0139ad7675b3f diff --git a/microservices/starters/audit b/microservices/starters/audit index 42d04f6da3d..2001eaa2387 160000 --- a/microservices/starters/audit +++ b/microservices/starters/audit @@ -1 +1 @@ -Subproject commit 42d04f6da3dfa4f41921a9b4d26eaea2b0eabadc +Subproject commit 2001eaa238773dd5e3d2a948b70ab7f761a0b698 diff --git a/microservices/starters/cache b/microservices/starters/cache index 98bc3137442..754fa5a612d 160000 --- a/microservices/starters/cache +++ b/microservices/starters/cache @@ -1 +1 @@ -Subproject commit 98bc3137442eea99c36173d2e598076024d1b96b +Subproject commit 754fa5a612d86e72bcdeab4b478f89f1982aeabf diff --git a/microservices/starters/cached-results b/microservices/starters/cached-results index 098d6c0d56e..231741c8412 160000 --- a/microservices/starters/cached-results +++ b/microservices/starters/cached-results @@ -1 +1 @@ -Subproject commit 098d6c0d56ebeef1ea421903963aec8e8d3b1818 +Subproject commit 231741c841226425467fdf992734baa20e497298 diff --git a/microservices/starters/datawave b/microservices/starters/datawave index 30d55cd22e0..583209a53f1 160000 --- a/microservices/starters/datawave +++ b/microservices/starters/datawave @@ -1 +1 @@ -Subproject commit 30d55cd22e00c0478783265ab76886c9f0a4c5d9 +Subproject commit 583209a53f171b5717d8260aa5971cb0acedb6b4 diff --git a/microservices/starters/metadata b/microservices/starters/metadata index df0232ab5e3..360a8e8a73a 160000 --- a/microservices/starters/metadata +++ b/microservices/starters/metadata @@ -1 +1 @@ -Subproject commit df0232ab5e3d399a219f87c2e65e66f85bef5fdc +Subproject commit 360a8e8a73ace5a428d4a217fdf71c9cf4f93df6 diff --git a/microservices/starters/query b/microservices/starters/query index aa88a610f76..103d6e259a4 160000 --- a/microservices/starters/query +++ b/microservices/starters/query @@ -1 +1 @@ -Subproject commit aa88a610f767c97f16b4137c84f9dff61e9f239c +Subproject commit 103d6e259a45a4fc7956fd6be5126ea895099f8b diff --git a/microservices/starters/query-metric b/microservices/starters/query-metric index 9ae002a7927..cc6808372a8 160000 --- a/microservices/starters/query-metric +++ b/microservices/starters/query-metric @@ -1 +1 @@ -Subproject commit 9ae002a79276bc804272e93ddf239938bbaeb8e0 +Subproject commit cc6808372a8e36f7d51b40195136ed539b671225 From 2de7fdf1b860e13efe3b0382ff3318945e16326b Mon Sep 17 00:00:00 2001 From: Whitney O'Meara Date: Mon, 20 May 2024 19:45:48 +0000 Subject: [PATCH 06/20] updated to tagged release --- common-test/pom.xml | 2 +- contrib/datawave-quickstart/docker/pom.xml | 2 +- core/base-rest-responses | 2 +- core/cached-results/pom.xml | 2 +- core/common-util/pom.xml | 2 +- core/common/pom.xml | 2 +- core/connection-pool/pom.xml | 2 +- core/in-memory-accumulo | 2 +- core/map-reduce/pom.xml | 2 +- core/metrics-reporter | 2 +- core/modification/pom.xml | 2 +- core/pom.xml | 2 +- core/query/pom.xml | 2 +- core/utils/accumulo-utils | 2 +- core/utils/common-utils | 2 +- core/utils/metadata-utils | 2 +- core/utils/pom.xml | 2 +- core/utils/type-utils | 2 +- docker/docker-compose.yml | 26 ++++++++-------- docs/pom.xml | 2 +- microservices/configcheck/pom.xml | 2 +- microservices/microservice-parent | 2 +- microservices/microservice-service-parent | 2 +- microservices/pom.xml | 2 +- microservices/services/accumulo | 2 +- microservices/services/audit | 2 +- microservices/services/authorization | 2 +- microservices/services/config | 2 +- microservices/services/dictionary | 2 +- microservices/services/hazelcast | 2 +- microservices/services/mapreduce-query | 2 +- microservices/services/modification | 2 +- microservices/services/pom.xml | 2 +- microservices/services/query | 2 +- microservices/services/query-executor | 2 +- microservices/services/query-metric | 2 +- microservices/starters/audit | 2 +- microservices/starters/cache | 2 +- microservices/starters/cached-results | 2 +- microservices/starters/datawave | 2 +- microservices/starters/metadata | 2 +- microservices/starters/pom.xml | 2 +- microservices/starters/query | 2 +- microservices/starters/query-metric | 2 +- pom.xml | 30 +++++++++---------- warehouse/accumulo-extensions/pom.xml | 2 +- warehouse/age-off-utils/pom.xml | 2 +- warehouse/age-off/pom.xml | 2 +- warehouse/assemble/datawave/pom.xml | 2 +- warehouse/assemble/pom.xml | 2 +- warehouse/assemble/webservice/pom.xml | 2 +- warehouse/common/pom.xml | 2 +- warehouse/core/pom.xml | 2 +- warehouse/data-dictionary-core/pom.xml | 2 +- warehouse/edge-dictionary-core/pom.xml | 2 +- .../edge-model-configuration-core/pom.xml | 2 +- warehouse/index-stats/pom.xml | 2 +- warehouse/ingest-configuration/pom.xml | 2 +- warehouse/ingest-core/pom.xml | 2 +- warehouse/ingest-csv/pom.xml | 2 +- warehouse/ingest-json/pom.xml | 2 +- warehouse/ingest-nyctlc/pom.xml | 2 +- warehouse/ingest-scripts/pom.xml | 2 +- warehouse/ingest-ssdeep/pom.xml | 2 +- warehouse/ingest-wikipedia/pom.xml | 2 +- warehouse/metrics-core/pom.xml | 2 +- warehouse/ops-tools/config-compare/pom.xml | 2 +- warehouse/ops-tools/index-validation/pom.xml | 2 +- warehouse/ops-tools/pom.xml | 2 +- warehouse/pom.xml | 2 +- warehouse/query-core/pom.xml | 2 +- warehouse/regression-testing/pom.xml | 2 +- warehouse/ssdeep-common/pom.xml | 2 +- web-services/accumulo/pom.xml | 2 +- web-services/atom/pom.xml | 2 +- web-services/cached-results/pom.xml | 2 +- web-services/client/pom.xml | 2 +- web-services/common-util/pom.xml | 2 +- web-services/common/pom.xml | 2 +- web-services/deploy/application/pom.xml | 2 +- web-services/deploy/configuration/pom.xml | 2 +- web-services/deploy/docs/pom.xml | 2 +- web-services/deploy/pom.xml | 2 +- .../spring-framework-integration/pom.xml | 2 +- web-services/dictionary/pom.xml | 2 +- web-services/examples/client-login/pom.xml | 2 +- web-services/examples/http-client/pom.xml | 2 +- web-services/examples/jms-client/pom.xml | 2 +- web-services/examples/pom.xml | 2 +- web-services/examples/query-war/pom.xml | 2 +- web-services/map-reduce-embedded/pom.xml | 2 +- web-services/map-reduce-status/pom.xml | 2 +- web-services/map-reduce/pom.xml | 2 +- web-services/metrics/pom.xml | 2 +- web-services/model/pom.xml | 2 +- web-services/modification/pom.xml | 2 +- web-services/pom.xml | 2 +- web-services/query-websocket/pom.xml | 2 +- web-services/query/pom.xml | 2 +- web-services/rest-api/pom.xml | 2 +- web-services/security/pom.xml | 2 +- web-services/web-root/pom.xml | 2 +- 102 files changed, 128 insertions(+), 128 deletions(-) diff --git a/common-test/pom.xml b/common-test/pom.xml index 5230d7f1272..9a48af992a6 100644 --- a/common-test/pom.xml +++ b/common-test/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-parent - 7.0.0-SNAPSHOT + 7.0.0 datawave-common-test ${project.artifactId} diff --git a/contrib/datawave-quickstart/docker/pom.xml b/contrib/datawave-quickstart/docker/pom.xml index 772c424f5e8..c79ed71c686 100644 --- a/contrib/datawave-quickstart/docker/pom.xml +++ b/contrib/datawave-quickstart/docker/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-parent - 7.0.0-SNAPSHOT + 7.0.0 ../../../pom.xml quickstart diff --git a/core/base-rest-responses b/core/base-rest-responses index 9f64203a210..a5fbf0ce72e 160000 --- a/core/base-rest-responses +++ b/core/base-rest-responses @@ -1 +1 @@ -Subproject commit 9f64203a2109dd43cc0b423713ce388e7fa6c46d +Subproject commit a5fbf0ce72e5a1592b702d7913edc4a64d08f89f diff --git a/core/cached-results/pom.xml b/core/cached-results/pom.xml index 591bd5d39ce..17ffa7719c5 100644 --- a/core/cached-results/pom.xml +++ b/core/cached-results/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.core datawave-core-parent - 7.0.0-SNAPSHOT + 7.0.0 datawave-core-cached-results ${project.artifactId} diff --git a/core/common-util/pom.xml b/core/common-util/pom.xml index 5846437d977..556f3089f4e 100644 --- a/core/common-util/pom.xml +++ b/core/common-util/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.core datawave-core-parent - 7.0.0-SNAPSHOT + 7.0.0 datawave-core-common-util ${project.artifactId} diff --git a/core/common/pom.xml b/core/common/pom.xml index 01f207a1580..e1d29b24896 100644 --- a/core/common/pom.xml +++ b/core/common/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.core datawave-core-parent - 7.0.0-SNAPSHOT + 7.0.0 datawave-core-common ${project.artifactId} diff --git a/core/connection-pool/pom.xml b/core/connection-pool/pom.xml index bbeab199f17..f05e0bff334 100644 --- a/core/connection-pool/pom.xml +++ b/core/connection-pool/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.core datawave-core-parent - 7.0.0-SNAPSHOT + 7.0.0 datawave-core-connection-pool ${project.artifactId} diff --git a/core/in-memory-accumulo b/core/in-memory-accumulo index 1999eb55b63..529eafd73e9 160000 --- a/core/in-memory-accumulo +++ b/core/in-memory-accumulo @@ -1 +1 @@ -Subproject commit 1999eb55b635d97b098b00fe5b7a77a9729a4bae +Subproject commit 529eafd73e948ae82453dcd6642de318307d60b0 diff --git a/core/map-reduce/pom.xml b/core/map-reduce/pom.xml index 998c9ff23da..e50971623ff 100644 --- a/core/map-reduce/pom.xml +++ b/core/map-reduce/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.core datawave-core-parent - 7.0.0-SNAPSHOT + 7.0.0 datawave-core-map-reduce ${project.artifactId} diff --git a/core/metrics-reporter b/core/metrics-reporter index 5f3b44493cc..cd2fab0762c 160000 --- a/core/metrics-reporter +++ b/core/metrics-reporter @@ -1 +1 @@ -Subproject commit 5f3b44493ccefd2a39bf365a9324496f1b02c40c +Subproject commit cd2fab0762c3c80ab28c21e1cbc76958b6345eae diff --git a/core/modification/pom.xml b/core/modification/pom.xml index 553f3194d32..f23fc169ef3 100644 --- a/core/modification/pom.xml +++ b/core/modification/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.core datawave-core-parent - 7.0.0-SNAPSHOT + 7.0.0 datawave-core-modification ${project.artifactId} diff --git a/core/pom.xml b/core/pom.xml index a4980953cb3..70c7fd72bbd 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-parent - 7.0.0-SNAPSHOT + 7.0.0 gov.nsa.datawave.core datawave-core-parent diff --git a/core/query/pom.xml b/core/query/pom.xml index cb4e29e6556..c142147e80e 100644 --- a/core/query/pom.xml +++ b/core/query/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.core datawave-core-parent - 7.0.0-SNAPSHOT + 7.0.0 datawave-core-query ${project.artifactId} diff --git a/core/utils/accumulo-utils b/core/utils/accumulo-utils index 9d6c66e5ae9..ffd5d3e8625 160000 --- a/core/utils/accumulo-utils +++ b/core/utils/accumulo-utils @@ -1 +1 @@ -Subproject commit 9d6c66e5ae906653137b6dd7a9a4671c844c5653 +Subproject commit ffd5d3e862579d358ada5754f64b6db110f0d26c diff --git a/core/utils/common-utils b/core/utils/common-utils index a571f462c67..2810ed2bdd7 160000 --- a/core/utils/common-utils +++ b/core/utils/common-utils @@ -1 +1 @@ -Subproject commit a571f462c67f7abd9d4d7ba85b4b114c5c0b6aae +Subproject commit 2810ed2bdd7733b7ec98fb4bf470cc070443f5bc diff --git a/core/utils/metadata-utils b/core/utils/metadata-utils index 570d8a933c1..2948b5591e7 160000 --- a/core/utils/metadata-utils +++ b/core/utils/metadata-utils @@ -1 +1 @@ -Subproject commit 570d8a933c1fc39d2f7a7a9735f0dcd51e97e557 +Subproject commit 2948b5591e7ed0077c4c490a205bf301b14511c8 diff --git a/core/utils/pom.xml b/core/utils/pom.xml index 53a9aa8dc21..3033d4a9d4d 100644 --- a/core/utils/pom.xml +++ b/core/utils/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.core datawave-core-parent - 7.0.0-SNAPSHOT + 7.0.0 gov.nsa.datawave.core datawave-utils-parent diff --git a/core/utils/type-utils b/core/utils/type-utils index 45badd393b6..6844e8a7ea3 160000 --- a/core/utils/type-utils +++ b/core/utils/type-utils @@ -1 +1 @@ -Subproject commit 45badd393b6db2e2c48a03943335f0527e0f3c3f +Subproject commit 6844e8a7ea3956c9d9c80551f8db1d31aa93f02a diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml index 51cc9e24d78..23167518282 100644 --- a/docker/docker-compose.yml +++ b/docker/docker-compose.yml @@ -9,7 +9,7 @@ services: - quickstart # To run the wildfly webservice, change `--accumulo` to `--web` command: ["datawave-bootstrap.sh", "--accumulo"] - image: datawave/quickstart-compose:7.0.0-SNAPSHOT + image: datawave/quickstart-compose:7.0.1-SNAPSHOT environment: - DW_CONTAINER_HOST=quickstart - DW_DATAWAVE_WEB_JAVA_OPTS=-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=*:8787 -Duser.timezone=GMT -Dfile.encoding=UTF-8 -Djava.net.preferIPv4Stack=true @@ -122,7 +122,7 @@ services: configuration: entrypoint: [ "java","-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=*:5009","-jar","app.jar" ] - image: datawave/config-service:4.0.0-SNAPSHOT + image: datawave/config-service:4.0.1-SNAPSHOT command: - --spring.output.ansi.enabled=ALWAYS - --spring.profiles.active=consul,native,open_actuator @@ -146,7 +146,7 @@ services: condition: service_started cache: - image: datawave/hazelcast-service:4.0.0-SNAPSHOT + image: datawave/hazelcast-service:4.0.1-SNAPSHOT scale: 1 command: - --spring.profiles.active=consul,compose,remoteauth @@ -174,7 +174,7 @@ services: authorization: entrypoint: [ "java","-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=*:5008","-jar","app.jar" ] - image: datawave/authorization-service:4.0.0-SNAPSHOT + image: datawave/authorization-service:4.0.1-SNAPSHOT command: - --spring.output.ansi.enabled=ALWAYS - --spring.profiles.active=consul,mock,compose,federation @@ -203,7 +203,7 @@ services: profiles: - accumulo - full - image: datawave/accumulo-service:4.0.0-SNAPSHOT + image: datawave/accumulo-service:4.0.1-SNAPSHOT command: - --spring.output.ansi.enabled=ALWAYS - --spring.profiles.active=consul,compose,remoteauth @@ -236,7 +236,7 @@ services: condition: service_healthy audit: - image: datawave/audit-service:4.0.0-SNAPSHOT + image: datawave/audit-service:4.0.1-SNAPSHOT command: - --spring.output.ansi.enabled=ALWAYS - --spring.profiles.active=consul,compose,remoteauth @@ -270,7 +270,7 @@ services: metrics: entrypoint: ["java","-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=*:5007","-jar","app.jar"] - image: datawave/query-metric-service:4.0.0-SNAPSHOT + image: datawave/query-metric-service:4.0.1-SNAPSHOT command: - --spring.output.ansi.enabled=ALWAYS - --spring.profiles.active=consul,compose,remoteauth @@ -307,7 +307,7 @@ services: profiles: - dictionary - full - image: datawave/dictionary-service:4.0.0-SNAPSHOT + image: datawave/dictionary-service:4.0.1-SNAPSHOT command: - --spring.output.ansi.enabled=ALWAYS - --spring.profiles.active=consul,compose,remoteauth @@ -355,7 +355,7 @@ services: # If you want to test cached results, set the CACHED_RESULTS environment variable to 'true' query: entrypoint: ["java","-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=*:5005","-jar","app.jar"] - image: datawave/query-service:1.0.0-SNAPSHOT + image: datawave/query-service:1.0.1-SNAPSHOT command: - --spring.output.ansi.enabled=ALWAYS - --spring.profiles.active=consul,compose,remoteauth,querymessaging,metricssource,query,mrquery,cachedresults,federation @@ -399,7 +399,7 @@ services: profiles: - full entrypoint: ["java","-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=*:5005","-jar","app.jar"] - image: datawave/mapreduce-query-service:1.0.0-SNAPSHOT + image: datawave/mapreduce-query-service:1.0.1-SNAPSHOT command: - --spring.output.ansi.enabled=ALWAYS - --spring.profiles.active=consul,compose,remoteauth,query,mrquery,federation @@ -437,7 +437,7 @@ services: executor-pool1: entrypoint: ["java","-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=*:5006","-jar","app.jar"] - image: datawave/query-executor-service:1.0.0-SNAPSHOT + image: datawave/query-executor-service:1.0.1-SNAPSHOT command: - --spring.application.name=executor-pool1 - --spring.cloud.config.name=executor @@ -486,7 +486,7 @@ services: profiles: - pool2 - full - image: datawave/query-executor-service:1.0.0-SNAPSHOT + image: datawave/query-executor-service:1.0.1-SNAPSHOT command: - --spring.application.name=executor-pool2 - --spring.cloud.config.name=executor @@ -533,7 +533,7 @@ services: profiles: - modification - full - image: datawave/modification-service:1.0.0-SNAPSHOT + image: datawave/modification-service:1.0.1-SNAPSHOT command: - --spring.output.ansi.enabled=ALWAYS - --spring.profiles.active=consul,compose,remoteauth,query diff --git a/docs/pom.xml b/docs/pom.xml index b02858f5fb6..14e1e106286 100644 --- a/docs/pom.xml +++ b/docs/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-parent - 7.0.0-SNAPSHOT + 7.0.0 datawave-docs diff --git a/microservices/configcheck/pom.xml b/microservices/configcheck/pom.xml index a7f08574ace..4ce45fbc9c2 100644 --- a/microservices/configcheck/pom.xml +++ b/microservices/configcheck/pom.xml @@ -3,7 +3,7 @@ 4.0.0 gov.nsa.datawave.microservice datawave-microservice-configcheck - 7.0.0-SNAPSHOT + 7.0.0 ${project.artifactId} diff --git a/microservices/microservice-parent b/microservices/microservice-parent index 9b9cc24e9e4..94e402333c1 160000 --- a/microservices/microservice-parent +++ b/microservices/microservice-parent @@ -1 +1 @@ -Subproject commit 9b9cc24e9e49159e1d5130cc4267f7cd43d3999f +Subproject commit 94e402333c16767ae91c01b60a9ff66b5aaafda1 diff --git a/microservices/microservice-service-parent b/microservices/microservice-service-parent index c7007341a45..65cda7b2c52 160000 --- a/microservices/microservice-service-parent +++ b/microservices/microservice-service-parent @@ -1 +1 @@ -Subproject commit c7007341a458c18aab21914093e043dc4e0ea45c +Subproject commit 65cda7b2c526af5a3a9791b85a88dbd2422bf690 diff --git a/microservices/pom.xml b/microservices/pom.xml index 037d75087fe..550212f36cf 100644 --- a/microservices/pom.xml +++ b/microservices/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-parent - 7.0.0-SNAPSHOT + 7.0.0 gov.nsa.datawave.microservice datawave-microservice-build-parent diff --git a/microservices/services/accumulo b/microservices/services/accumulo index ce0008ee183..a8f79588fc7 160000 --- a/microservices/services/accumulo +++ b/microservices/services/accumulo @@ -1 +1 @@ -Subproject commit ce0008ee18387fd09f8c23daf9f85ec69485322c +Subproject commit a8f79588fc7633e604a7ad0aa74a49b51c9431a6 diff --git a/microservices/services/audit b/microservices/services/audit index f2263032929..a38318e9a2d 160000 --- a/microservices/services/audit +++ b/microservices/services/audit @@ -1 +1 @@ -Subproject commit f2263032929489bdce47fbfa150f8f9d7cff5dd4 +Subproject commit a38318e9a2d62bf7467fbc6249e74c787a36ead2 diff --git a/microservices/services/authorization b/microservices/services/authorization index 77b999a68de..8ea70e1479e 160000 --- a/microservices/services/authorization +++ b/microservices/services/authorization @@ -1 +1 @@ -Subproject commit 77b999a68de843cc514bd9e332331a4d0d9eabd1 +Subproject commit 8ea70e1479e013e9b12645614a8361bfa9f7415a diff --git a/microservices/services/config b/microservices/services/config index 9ba7c7c877e..46208a93df2 160000 --- a/microservices/services/config +++ b/microservices/services/config @@ -1 +1 @@ -Subproject commit 9ba7c7c877e4b3b03e1d5f044d3a14b6232aa007 +Subproject commit 46208a93df217435326cfff0edfe9a95e880ce67 diff --git a/microservices/services/dictionary b/microservices/services/dictionary index bc520ef9c11..c07c1148caa 160000 --- a/microservices/services/dictionary +++ b/microservices/services/dictionary @@ -1 +1 @@ -Subproject commit bc520ef9c11160e6aeba29c1b59b9666a011ec50 +Subproject commit c07c1148caa6d757326f8541735214fa9771952b diff --git a/microservices/services/hazelcast b/microservices/services/hazelcast index 3ddfa15b393..76edd572933 160000 --- a/microservices/services/hazelcast +++ b/microservices/services/hazelcast @@ -1 +1 @@ -Subproject commit 3ddfa15b3933fd061e72a3e19cd85168ee2bdd6e +Subproject commit 76edd5729333ecf4d8a038f7e2b4585c68336017 diff --git a/microservices/services/mapreduce-query b/microservices/services/mapreduce-query index bdbd80ffd63..814d56dde06 160000 --- a/microservices/services/mapreduce-query +++ b/microservices/services/mapreduce-query @@ -1 +1 @@ -Subproject commit bdbd80ffd6380bc63ce83a8feda7fa0cec7c4939 +Subproject commit 814d56dde0699b9482bea398a4046ca7395e627d diff --git a/microservices/services/modification b/microservices/services/modification index 140d6534ca5..9c7f233c0d7 160000 --- a/microservices/services/modification +++ b/microservices/services/modification @@ -1 +1 @@ -Subproject commit 140d6534ca5e7f7fcfe1f571953cb009a18058d2 +Subproject commit 9c7f233c0d787116e6d950bbacc647e3ff7a113b diff --git a/microservices/services/pom.xml b/microservices/services/pom.xml index 29feb7d3fcb..af5076bb24a 100644 --- a/microservices/services/pom.xml +++ b/microservices/services/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.microservice datawave-microservice-build-parent - 7.0.0-SNAPSHOT + 7.0.0 datawave-microservice-service-build-parent pom diff --git a/microservices/services/query b/microservices/services/query index 1bb3158ce57..456e37aab5e 160000 --- a/microservices/services/query +++ b/microservices/services/query @@ -1 +1 @@ -Subproject commit 1bb3158ce575dd8cd95aed984fde7d49b59afb48 +Subproject commit 456e37aab5e2316e20ed3ab6d1e16a44ce57dd18 diff --git a/microservices/services/query-executor b/microservices/services/query-executor index 50dc2eab150..3da942f90a7 160000 --- a/microservices/services/query-executor +++ b/microservices/services/query-executor @@ -1 +1 @@ -Subproject commit 50dc2eab1505472eeea14c578503e86311f63e90 +Subproject commit 3da942f90a783e617729dbb46ea7a134537ccaa5 diff --git a/microservices/services/query-metric b/microservices/services/query-metric index 4049b6489f1..2a8e390424a 160000 --- a/microservices/services/query-metric +++ b/microservices/services/query-metric @@ -1 +1 @@ -Subproject commit 4049b6489f187743ef77d665e9c0139ad7675b3f +Subproject commit 2a8e390424a0cd262c647385581d5dc5f181bfe0 diff --git a/microservices/starters/audit b/microservices/starters/audit index 2001eaa2387..c0f8f8fde2a 160000 --- a/microservices/starters/audit +++ b/microservices/starters/audit @@ -1 +1 @@ -Subproject commit 2001eaa238773dd5e3d2a948b70ab7f761a0b698 +Subproject commit c0f8f8fde2af63b716389220499e0854b8a2043e diff --git a/microservices/starters/cache b/microservices/starters/cache index 754fa5a612d..17c220c186b 160000 --- a/microservices/starters/cache +++ b/microservices/starters/cache @@ -1 +1 @@ -Subproject commit 754fa5a612d86e72bcdeab4b478f89f1982aeabf +Subproject commit 17c220c186bcaf68e42d205b4f45bedb16961634 diff --git a/microservices/starters/cached-results b/microservices/starters/cached-results index 231741c8412..b70471d296b 160000 --- a/microservices/starters/cached-results +++ b/microservices/starters/cached-results @@ -1 +1 @@ -Subproject commit 231741c841226425467fdf992734baa20e497298 +Subproject commit b70471d296b43c06b601cbf8eac261540c78373c diff --git a/microservices/starters/datawave b/microservices/starters/datawave index 583209a53f1..288599251d0 160000 --- a/microservices/starters/datawave +++ b/microservices/starters/datawave @@ -1 +1 @@ -Subproject commit 583209a53f171b5717d8260aa5971cb0acedb6b4 +Subproject commit 288599251d0e64031a5ab216e865b479d864c844 diff --git a/microservices/starters/metadata b/microservices/starters/metadata index 360a8e8a73a..8cfe0a431dc 160000 --- a/microservices/starters/metadata +++ b/microservices/starters/metadata @@ -1 +1 @@ -Subproject commit 360a8e8a73ace5a428d4a217fdf71c9cf4f93df6 +Subproject commit 8cfe0a431dcdd2fa31395ec6b656cc082ecf7df0 diff --git a/microservices/starters/pom.xml b/microservices/starters/pom.xml index 0200a781287..adbd6d1ce8d 100644 --- a/microservices/starters/pom.xml +++ b/microservices/starters/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.microservice datawave-microservice-build-parent - 7.0.0-SNAPSHOT + 7.0.0 datawave-microservice-starter-build-parent pom diff --git a/microservices/starters/query b/microservices/starters/query index 103d6e259a4..eaef27ea9b2 160000 --- a/microservices/starters/query +++ b/microservices/starters/query @@ -1 +1 @@ -Subproject commit 103d6e259a45a4fc7956fd6be5126ea895099f8b +Subproject commit eaef27ea9b21cf07a6c8b513a27c63e51454ee0a diff --git a/microservices/starters/query-metric b/microservices/starters/query-metric index cc6808372a8..bf3c02dcf4d 160000 --- a/microservices/starters/query-metric +++ b/microservices/starters/query-metric @@ -1 +1 @@ -Subproject commit cc6808372a8e36f7d51b40195136ed539b671225 +Subproject commit bf3c02dcf4de49bf1060205730c508a7d3934499 diff --git a/pom.xml b/pom.xml index be9e7852f82..1b555463f24 100644 --- a/pom.xml +++ b/pom.xml @@ -3,7 +3,7 @@ 4.0.0 gov.nsa.datawave datawave-parent - 7.0.0-SNAPSHOT + 7.0.0 pom DataWave DataWave is a Java-based ingest and query framework that leverages Apache Accumulo to provide fast, secure access to your data. @@ -74,7 +74,7 @@ 1.3 4.5.13 4.4.8 - 4.0.0-SNAPSHOT + 4.0.0 9.4.21.Final 2.10.0.pr1 1.9.13 @@ -99,19 +99,19 @@ 7.5.0 2.5.2 1.6.0 - 4.0.0-SNAPSHOT - 4.0.0-SNAPSHOT - 4.0.0-SNAPSHOT - 4.0.0-SNAPSHOT - 4.0.0-SNAPSHOT - 3.0.0-SNAPSHOT - 4.0.0-SNAPSHOT - 1.0.0-SNAPSHOT - 4.0.0-SNAPSHOT - 3.0.0-SNAPSHOT - 1.0.0-SNAPSHOT - 4.0.0-SNAPSHOT - 3.0.0-SNAPSHOT + 4.0.0 + 4.0.0 + 4.0.0 + 4.0.0 + 4.0.0 + 3.0.0 + 4.0.0 + 1.0.0 + 4.0.0 + 3.0.0 + 1.0.0 + 4.0.0 + 3.0.0 1.2 2.23.0 8.0.28 diff --git a/warehouse/accumulo-extensions/pom.xml b/warehouse/accumulo-extensions/pom.xml index 94bbd39ca26..9ade5414166 100644 --- a/warehouse/accumulo-extensions/pom.xml +++ b/warehouse/accumulo-extensions/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 7.0.0-SNAPSHOT + 7.0.0 datawave-accumulo-extensions ${project.artifactId} diff --git a/warehouse/age-off-utils/pom.xml b/warehouse/age-off-utils/pom.xml index 1e05bb334e7..c09f1f593ed 100644 --- a/warehouse/age-off-utils/pom.xml +++ b/warehouse/age-off-utils/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 7.0.0-SNAPSHOT + 7.0.0 datawave-age-off-utils ${project.artifactId} diff --git a/warehouse/age-off/pom.xml b/warehouse/age-off/pom.xml index e4142a3184c..ec61330ac90 100644 --- a/warehouse/age-off/pom.xml +++ b/warehouse/age-off/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 7.0.0-SNAPSHOT + 7.0.0 datawave-age-off ${project.artifactId} diff --git a/warehouse/assemble/datawave/pom.xml b/warehouse/assemble/datawave/pom.xml index b122388d412..9499e5e79f6 100644 --- a/warehouse/assemble/datawave/pom.xml +++ b/warehouse/assemble/datawave/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave assemble-parent - 7.0.0-SNAPSHOT + 7.0.0 assemble-datawave jar diff --git a/warehouse/assemble/pom.xml b/warehouse/assemble/pom.xml index a52be9dd2a4..7c6387e17e5 100644 --- a/warehouse/assemble/pom.xml +++ b/warehouse/assemble/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 7.0.0-SNAPSHOT + 7.0.0 assemble-parent pom diff --git a/warehouse/assemble/webservice/pom.xml b/warehouse/assemble/webservice/pom.xml index 634fcc0394c..325d6fb3d6d 100644 --- a/warehouse/assemble/webservice/pom.xml +++ b/warehouse/assemble/webservice/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave assemble-parent - 7.0.0-SNAPSHOT + 7.0.0 assemble-webservice ${project.artifactId} diff --git a/warehouse/common/pom.xml b/warehouse/common/pom.xml index d42d6252562..7398d3e842a 100644 --- a/warehouse/common/pom.xml +++ b/warehouse/common/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 7.0.0-SNAPSHOT + 7.0.0 datawave-common ${project.artifactId} diff --git a/warehouse/core/pom.xml b/warehouse/core/pom.xml index 4795c111cdd..7d270e1399e 100644 --- a/warehouse/core/pom.xml +++ b/warehouse/core/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 7.0.0-SNAPSHOT + 7.0.0 datawave-core jar diff --git a/warehouse/data-dictionary-core/pom.xml b/warehouse/data-dictionary-core/pom.xml index c30ca957ff9..a93969c1431 100644 --- a/warehouse/data-dictionary-core/pom.xml +++ b/warehouse/data-dictionary-core/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 7.0.0-SNAPSHOT + 7.0.0 datawave-data-dictionary-core jar diff --git a/warehouse/edge-dictionary-core/pom.xml b/warehouse/edge-dictionary-core/pom.xml index 987ef739b67..3969bc0ee3b 100644 --- a/warehouse/edge-dictionary-core/pom.xml +++ b/warehouse/edge-dictionary-core/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 7.0.0-SNAPSHOT + 7.0.0 datawave-edge-dictionary-core jar diff --git a/warehouse/edge-model-configuration-core/pom.xml b/warehouse/edge-model-configuration-core/pom.xml index a200c084250..12be4d30b55 100644 --- a/warehouse/edge-model-configuration-core/pom.xml +++ b/warehouse/edge-model-configuration-core/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 7.0.0-SNAPSHOT + 7.0.0 datawave-edge-model-configuration-core jar diff --git a/warehouse/index-stats/pom.xml b/warehouse/index-stats/pom.xml index 89d2237653e..6103f95daa3 100644 --- a/warehouse/index-stats/pom.xml +++ b/warehouse/index-stats/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 7.0.0-SNAPSHOT + 7.0.0 datawave-index-stats jar diff --git a/warehouse/ingest-configuration/pom.xml b/warehouse/ingest-configuration/pom.xml index 2a487fe2347..a38112c92da 100644 --- a/warehouse/ingest-configuration/pom.xml +++ b/warehouse/ingest-configuration/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 7.0.0-SNAPSHOT + 7.0.0 datawave-ingest-configuration diff --git a/warehouse/ingest-core/pom.xml b/warehouse/ingest-core/pom.xml index dd133501dce..32233b1d3a2 100644 --- a/warehouse/ingest-core/pom.xml +++ b/warehouse/ingest-core/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 7.0.0-SNAPSHOT + 7.0.0 datawave-ingest-core jar diff --git a/warehouse/ingest-csv/pom.xml b/warehouse/ingest-csv/pom.xml index 72bb5d81424..3a084ea7aa4 100644 --- a/warehouse/ingest-csv/pom.xml +++ b/warehouse/ingest-csv/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 7.0.0-SNAPSHOT + 7.0.0 datawave-ingest-csv jar diff --git a/warehouse/ingest-json/pom.xml b/warehouse/ingest-json/pom.xml index 679101668ca..35d375444a2 100644 --- a/warehouse/ingest-json/pom.xml +++ b/warehouse/ingest-json/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 7.0.0-SNAPSHOT + 7.0.0 datawave-ingest-json jar diff --git a/warehouse/ingest-nyctlc/pom.xml b/warehouse/ingest-nyctlc/pom.xml index b15a93b6bd2..116db619628 100644 --- a/warehouse/ingest-nyctlc/pom.xml +++ b/warehouse/ingest-nyctlc/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 7.0.0-SNAPSHOT + 7.0.0 datawave-ingest-nyctlc jar diff --git a/warehouse/ingest-scripts/pom.xml b/warehouse/ingest-scripts/pom.xml index 5a6319a60b0..7e68eb8ab93 100644 --- a/warehouse/ingest-scripts/pom.xml +++ b/warehouse/ingest-scripts/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 7.0.0-SNAPSHOT + 7.0.0 datawave-ingest-scripts ${project.artifactId} diff --git a/warehouse/ingest-ssdeep/pom.xml b/warehouse/ingest-ssdeep/pom.xml index 3489e5d1f86..b1a3f019947 100644 --- a/warehouse/ingest-ssdeep/pom.xml +++ b/warehouse/ingest-ssdeep/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 7.0.0-SNAPSHOT + 7.0.0 datawave-ingest-ssdeep diff --git a/warehouse/ingest-wikipedia/pom.xml b/warehouse/ingest-wikipedia/pom.xml index a4b63d80249..6e73574e166 100644 --- a/warehouse/ingest-wikipedia/pom.xml +++ b/warehouse/ingest-wikipedia/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 7.0.0-SNAPSHOT + 7.0.0 datawave-ingest-wikipedia jar diff --git a/warehouse/metrics-core/pom.xml b/warehouse/metrics-core/pom.xml index 2b21d8c948d..7fa6c9afe28 100644 --- a/warehouse/metrics-core/pom.xml +++ b/warehouse/metrics-core/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 7.0.0-SNAPSHOT + 7.0.0 datawave-metrics-core jar diff --git a/warehouse/ops-tools/config-compare/pom.xml b/warehouse/ops-tools/config-compare/pom.xml index f1a7dec1a7f..a5b0fa60231 100644 --- a/warehouse/ops-tools/config-compare/pom.xml +++ b/warehouse/ops-tools/config-compare/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-ops-tools-parent - 7.0.0-SNAPSHOT + 7.0.0 datawave-ops-tools-config-compare diff --git a/warehouse/ops-tools/index-validation/pom.xml b/warehouse/ops-tools/index-validation/pom.xml index 7ce29016e2f..5a4f73ca377 100644 --- a/warehouse/ops-tools/index-validation/pom.xml +++ b/warehouse/ops-tools/index-validation/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-ops-tools-parent - 7.0.0-SNAPSHOT + 7.0.0 datawave-ops-tools-index-validation jar diff --git a/warehouse/ops-tools/pom.xml b/warehouse/ops-tools/pom.xml index 9533fce3c8e..3c09428e7f4 100644 --- a/warehouse/ops-tools/pom.xml +++ b/warehouse/ops-tools/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 7.0.0-SNAPSHOT + 7.0.0 datawave-ops-tools-parent pom diff --git a/warehouse/pom.xml b/warehouse/pom.xml index 447d4212ed2..d1fbd7bb320 100644 --- a/warehouse/pom.xml +++ b/warehouse/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-parent - 7.0.0-SNAPSHOT + 7.0.0 datawave-warehouse-parent pom diff --git a/warehouse/query-core/pom.xml b/warehouse/query-core/pom.xml index 86f0f2dac38..5083544ca67 100644 --- a/warehouse/query-core/pom.xml +++ b/warehouse/query-core/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 7.0.0-SNAPSHOT + 7.0.0 datawave-query-core jar diff --git a/warehouse/regression-testing/pom.xml b/warehouse/regression-testing/pom.xml index 63d7f06694b..c6705588161 100644 --- a/warehouse/regression-testing/pom.xml +++ b/warehouse/regression-testing/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 7.0.0-SNAPSHOT + 7.0.0 datawave-regression-testing ${project.artifactId} diff --git a/warehouse/ssdeep-common/pom.xml b/warehouse/ssdeep-common/pom.xml index c26ad6862b5..7b3bd49cfe5 100644 --- a/warehouse/ssdeep-common/pom.xml +++ b/warehouse/ssdeep-common/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 7.0.0-SNAPSHOT + 7.0.0 datawave-ssdeep-common diff --git a/web-services/accumulo/pom.xml b/web-services/accumulo/pom.xml index 39c90d5ba5b..0d1a99aba07 100644 --- a/web-services/accumulo/pom.xml +++ b/web-services/accumulo/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 7.0.0-SNAPSHOT + 7.0.0 datawave-ws-accumulo ejb diff --git a/web-services/atom/pom.xml b/web-services/atom/pom.xml index 240a4eaf46f..d75271e5a8f 100644 --- a/web-services/atom/pom.xml +++ b/web-services/atom/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 7.0.0-SNAPSHOT + 7.0.0 datawave-ws-atom ejb diff --git a/web-services/cached-results/pom.xml b/web-services/cached-results/pom.xml index 2f162079a8a..9a15e1a6764 100644 --- a/web-services/cached-results/pom.xml +++ b/web-services/cached-results/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 7.0.0-SNAPSHOT + 7.0.0 datawave-ws-cached-results ejb diff --git a/web-services/client/pom.xml b/web-services/client/pom.xml index a60f8005a36..0c77d6c193c 100644 --- a/web-services/client/pom.xml +++ b/web-services/client/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 7.0.0-SNAPSHOT + 7.0.0 datawave-ws-client jar diff --git a/web-services/common-util/pom.xml b/web-services/common-util/pom.xml index ca2930bcaa9..4760dc28da9 100644 --- a/web-services/common-util/pom.xml +++ b/web-services/common-util/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 7.0.0-SNAPSHOT + 7.0.0 datawave-ws-common-util jar diff --git a/web-services/common/pom.xml b/web-services/common/pom.xml index a21a7d039ef..e023d6cd864 100644 --- a/web-services/common/pom.xml +++ b/web-services/common/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 7.0.0-SNAPSHOT + 7.0.0 datawave-ws-common ejb diff --git a/web-services/deploy/application/pom.xml b/web-services/deploy/application/pom.xml index 2222cf375dd..f2fdee7ccaa 100644 --- a/web-services/deploy/application/pom.xml +++ b/web-services/deploy/application/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-deploy-parent - 7.0.0-SNAPSHOT + 7.0.0 datawave-ws-deploy-application ear diff --git a/web-services/deploy/configuration/pom.xml b/web-services/deploy/configuration/pom.xml index dbb47b8f28d..9b976190148 100644 --- a/web-services/deploy/configuration/pom.xml +++ b/web-services/deploy/configuration/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-deploy-parent - 7.0.0-SNAPSHOT + 7.0.0 datawave-ws-deploy-configuration jar diff --git a/web-services/deploy/docs/pom.xml b/web-services/deploy/docs/pom.xml index d8596cc1e12..01eeb59e685 100644 --- a/web-services/deploy/docs/pom.xml +++ b/web-services/deploy/docs/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-deploy-parent - 7.0.0-SNAPSHOT + 7.0.0 datawave-ws-deploy-docs war diff --git a/web-services/deploy/pom.xml b/web-services/deploy/pom.xml index 1488346706c..360bb0e4027 100644 --- a/web-services/deploy/pom.xml +++ b/web-services/deploy/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 7.0.0-SNAPSHOT + 7.0.0 gov.nsa.datawave.webservices datawave-ws-deploy-parent diff --git a/web-services/deploy/spring-framework-integration/pom.xml b/web-services/deploy/spring-framework-integration/pom.xml index 34fed6ecbca..ee34ad3c616 100644 --- a/web-services/deploy/spring-framework-integration/pom.xml +++ b/web-services/deploy/spring-framework-integration/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-deploy-parent - 7.0.0-SNAPSHOT + 7.0.0 spring-framework-integration ${project.artifactId} diff --git a/web-services/dictionary/pom.xml b/web-services/dictionary/pom.xml index 210a93a4e30..be9c2b383d8 100644 --- a/web-services/dictionary/pom.xml +++ b/web-services/dictionary/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 7.0.0-SNAPSHOT + 7.0.0 datawave-ws-dictionary ejb diff --git a/web-services/examples/client-login/pom.xml b/web-services/examples/client-login/pom.xml index 0a221cac691..c0930e03558 100644 --- a/web-services/examples/client-login/pom.xml +++ b/web-services/examples/client-login/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-examples-parent - 7.0.0-SNAPSHOT + 7.0.0 datawave-ws-examples-client-login ejb diff --git a/web-services/examples/http-client/pom.xml b/web-services/examples/http-client/pom.xml index 5804e3e60e6..61922d4f1ca 100644 --- a/web-services/examples/http-client/pom.xml +++ b/web-services/examples/http-client/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-examples-parent - 7.0.0-SNAPSHOT + 7.0.0 datawave-ws-examples-http-client jar diff --git a/web-services/examples/jms-client/pom.xml b/web-services/examples/jms-client/pom.xml index 54c4cdb4d91..8dc609d7954 100644 --- a/web-services/examples/jms-client/pom.xml +++ b/web-services/examples/jms-client/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-examples-parent - 7.0.0-SNAPSHOT + 7.0.0 datawave-ws-examples-jms-client jar diff --git a/web-services/examples/pom.xml b/web-services/examples/pom.xml index 542cd0c2cf6..16d48731c02 100644 --- a/web-services/examples/pom.xml +++ b/web-services/examples/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 7.0.0-SNAPSHOT + 7.0.0 datawave-ws-examples-parent pom diff --git a/web-services/examples/query-war/pom.xml b/web-services/examples/query-war/pom.xml index 07d32e32e54..eb1aae6675f 100644 --- a/web-services/examples/query-war/pom.xml +++ b/web-services/examples/query-war/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-examples-parent - 7.0.0-SNAPSHOT + 7.0.0 datawave-ws-examples-query-war war diff --git a/web-services/map-reduce-embedded/pom.xml b/web-services/map-reduce-embedded/pom.xml index 3aa13fb87fa..0418d8af139 100644 --- a/web-services/map-reduce-embedded/pom.xml +++ b/web-services/map-reduce-embedded/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 7.0.0-SNAPSHOT + 7.0.0 datawave-ws-map-reduce-embedded jar diff --git a/web-services/map-reduce-status/pom.xml b/web-services/map-reduce-status/pom.xml index acbde4cd14e..64cae517247 100644 --- a/web-services/map-reduce-status/pom.xml +++ b/web-services/map-reduce-status/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 7.0.0-SNAPSHOT + 7.0.0 datawave-ws-map-reduce-status ejb diff --git a/web-services/map-reduce/pom.xml b/web-services/map-reduce/pom.xml index 5821613668b..19df5bdd010 100644 --- a/web-services/map-reduce/pom.xml +++ b/web-services/map-reduce/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 7.0.0-SNAPSHOT + 7.0.0 datawave-ws-map-reduce ejb diff --git a/web-services/metrics/pom.xml b/web-services/metrics/pom.xml index 88627fe3c0a..2238dcc5603 100644 --- a/web-services/metrics/pom.xml +++ b/web-services/metrics/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 7.0.0-SNAPSHOT + 7.0.0 datawave-ws-metrics ejb diff --git a/web-services/model/pom.xml b/web-services/model/pom.xml index 57afbcd8ed9..7121614b1ce 100644 --- a/web-services/model/pom.xml +++ b/web-services/model/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 7.0.0-SNAPSHOT + 7.0.0 datawave-ws-model ejb diff --git a/web-services/modification/pom.xml b/web-services/modification/pom.xml index edaeb5309cf..c709c254c83 100644 --- a/web-services/modification/pom.xml +++ b/web-services/modification/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 7.0.0-SNAPSHOT + 7.0.0 datawave-ws-modification ejb diff --git a/web-services/pom.xml b/web-services/pom.xml index 91a81aba0f9..2cf7b9755d4 100644 --- a/web-services/pom.xml +++ b/web-services/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-parent - 7.0.0-SNAPSHOT + 7.0.0 gov.nsa.datawave.webservices datawave-ws-parent diff --git a/web-services/query-websocket/pom.xml b/web-services/query-websocket/pom.xml index bb6c8a9a8cd..a247469cb02 100644 --- a/web-services/query-websocket/pom.xml +++ b/web-services/query-websocket/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 7.0.0-SNAPSHOT + 7.0.0 datawave-ws-query-websocket war diff --git a/web-services/query/pom.xml b/web-services/query/pom.xml index f523cbb2e83..7baa010cefb 100644 --- a/web-services/query/pom.xml +++ b/web-services/query/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 7.0.0-SNAPSHOT + 7.0.0 datawave-ws-query ejb diff --git a/web-services/rest-api/pom.xml b/web-services/rest-api/pom.xml index 5fa492c4279..32a2cbf7435 100644 --- a/web-services/rest-api/pom.xml +++ b/web-services/rest-api/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 7.0.0-SNAPSHOT + 7.0.0 datawave-ws-rest-api war diff --git a/web-services/security/pom.xml b/web-services/security/pom.xml index d4cc3923f57..e133ae18d12 100644 --- a/web-services/security/pom.xml +++ b/web-services/security/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 7.0.0-SNAPSHOT + 7.0.0 datawave-ws-security ejb diff --git a/web-services/web-root/pom.xml b/web-services/web-root/pom.xml index d6c269e5220..ff76a98b539 100644 --- a/web-services/web-root/pom.xml +++ b/web-services/web-root/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 7.0.0-SNAPSHOT + 7.0.0 datawave-ws-web-root war From 783268fd3e7b9be88c6888c8cb8ea333b5b06f9e Mon Sep 17 00:00:00 2001 From: Whitney O'Meara Date: Mon, 20 May 2024 19:53:06 +0000 Subject: [PATCH 07/20] updated to next snapshot release --- common-test/pom.xml | 2 +- contrib/datawave-quickstart/docker/pom.xml | 2 +- core/cached-results/pom.xml | 2 +- core/common-util/pom.xml | 2 +- core/common/pom.xml | 2 +- core/connection-pool/pom.xml | 2 +- core/map-reduce/pom.xml | 2 +- core/modification/pom.xml | 2 +- core/pom.xml | 2 +- core/query/pom.xml | 2 +- core/utils/pom.xml | 2 +- docs/pom.xml | 2 +- microservices/configcheck/pom.xml | 2 +- microservices/pom.xml | 2 +- microservices/services/pom.xml | 2 +- microservices/starters/pom.xml | 2 +- pom.xml | 2 +- warehouse/accumulo-extensions/pom.xml | 2 +- warehouse/age-off-utils/pom.xml | 2 +- warehouse/age-off/pom.xml | 2 +- warehouse/assemble/datawave/pom.xml | 2 +- warehouse/assemble/pom.xml | 2 +- warehouse/assemble/webservice/pom.xml | 2 +- warehouse/common/pom.xml | 2 +- warehouse/core/pom.xml | 2 +- warehouse/data-dictionary-core/pom.xml | 2 +- warehouse/edge-dictionary-core/pom.xml | 2 +- warehouse/edge-model-configuration-core/pom.xml | 2 +- warehouse/index-stats/pom.xml | 2 +- warehouse/ingest-configuration/pom.xml | 2 +- warehouse/ingest-core/pom.xml | 2 +- warehouse/ingest-csv/pom.xml | 2 +- warehouse/ingest-json/pom.xml | 2 +- warehouse/ingest-nyctlc/pom.xml | 2 +- warehouse/ingest-scripts/pom.xml | 2 +- warehouse/ingest-ssdeep/pom.xml | 2 +- warehouse/ingest-wikipedia/pom.xml | 2 +- warehouse/metrics-core/pom.xml | 2 +- warehouse/ops-tools/config-compare/pom.xml | 2 +- warehouse/ops-tools/index-validation/pom.xml | 2 +- warehouse/ops-tools/pom.xml | 2 +- warehouse/pom.xml | 2 +- warehouse/query-core/pom.xml | 2 +- warehouse/regression-testing/pom.xml | 2 +- warehouse/ssdeep-common/pom.xml | 2 +- web-services/accumulo/pom.xml | 2 +- web-services/atom/pom.xml | 2 +- web-services/cached-results/pom.xml | 2 +- web-services/client/pom.xml | 2 +- web-services/common-util/pom.xml | 2 +- web-services/common/pom.xml | 2 +- web-services/deploy/application/pom.xml | 2 +- web-services/deploy/configuration/pom.xml | 2 +- web-services/deploy/docs/pom.xml | 2 +- web-services/deploy/pom.xml | 2 +- web-services/deploy/spring-framework-integration/pom.xml | 2 +- web-services/dictionary/pom.xml | 2 +- web-services/examples/client-login/pom.xml | 2 +- web-services/examples/http-client/pom.xml | 2 +- web-services/examples/jms-client/pom.xml | 2 +- web-services/examples/pom.xml | 2 +- web-services/examples/query-war/pom.xml | 2 +- web-services/map-reduce-embedded/pom.xml | 2 +- web-services/map-reduce-status/pom.xml | 2 +- web-services/map-reduce/pom.xml | 2 +- web-services/metrics/pom.xml | 2 +- web-services/model/pom.xml | 2 +- web-services/modification/pom.xml | 2 +- web-services/pom.xml | 2 +- web-services/query-websocket/pom.xml | 2 +- web-services/query/pom.xml | 2 +- web-services/rest-api/pom.xml | 2 +- web-services/security/pom.xml | 2 +- web-services/web-root/pom.xml | 2 +- 74 files changed, 74 insertions(+), 74 deletions(-) diff --git a/common-test/pom.xml b/common-test/pom.xml index 9a48af992a6..aba3e9f024b 100644 --- a/common-test/pom.xml +++ b/common-test/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-parent - 7.0.0 + 7.0.1-SNAPSHOT datawave-common-test ${project.artifactId} diff --git a/contrib/datawave-quickstart/docker/pom.xml b/contrib/datawave-quickstart/docker/pom.xml index c79ed71c686..007aaebc197 100644 --- a/contrib/datawave-quickstart/docker/pom.xml +++ b/contrib/datawave-quickstart/docker/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-parent - 7.0.0 + 7.0.1-SNAPSHOT ../../../pom.xml quickstart diff --git a/core/cached-results/pom.xml b/core/cached-results/pom.xml index 17ffa7719c5..55be5a7838b 100644 --- a/core/cached-results/pom.xml +++ b/core/cached-results/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.core datawave-core-parent - 7.0.0 + 7.0.1-SNAPSHOT datawave-core-cached-results ${project.artifactId} diff --git a/core/common-util/pom.xml b/core/common-util/pom.xml index 556f3089f4e..6875f21364b 100644 --- a/core/common-util/pom.xml +++ b/core/common-util/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.core datawave-core-parent - 7.0.0 + 7.0.1-SNAPSHOT datawave-core-common-util ${project.artifactId} diff --git a/core/common/pom.xml b/core/common/pom.xml index e1d29b24896..ea16dc9a11b 100644 --- a/core/common/pom.xml +++ b/core/common/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.core datawave-core-parent - 7.0.0 + 7.0.1-SNAPSHOT datawave-core-common ${project.artifactId} diff --git a/core/connection-pool/pom.xml b/core/connection-pool/pom.xml index f05e0bff334..7362abb1741 100644 --- a/core/connection-pool/pom.xml +++ b/core/connection-pool/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.core datawave-core-parent - 7.0.0 + 7.0.1-SNAPSHOT datawave-core-connection-pool ${project.artifactId} diff --git a/core/map-reduce/pom.xml b/core/map-reduce/pom.xml index e50971623ff..273a6022c12 100644 --- a/core/map-reduce/pom.xml +++ b/core/map-reduce/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.core datawave-core-parent - 7.0.0 + 7.0.1-SNAPSHOT datawave-core-map-reduce ${project.artifactId} diff --git a/core/modification/pom.xml b/core/modification/pom.xml index f23fc169ef3..a09c7598715 100644 --- a/core/modification/pom.xml +++ b/core/modification/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.core datawave-core-parent - 7.0.0 + 7.0.1-SNAPSHOT datawave-core-modification ${project.artifactId} diff --git a/core/pom.xml b/core/pom.xml index 70c7fd72bbd..e2eb690f216 100644 --- a/core/pom.xml +++ b/core/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-parent - 7.0.0 + 7.0.1-SNAPSHOT gov.nsa.datawave.core datawave-core-parent diff --git a/core/query/pom.xml b/core/query/pom.xml index c142147e80e..1019818ea56 100644 --- a/core/query/pom.xml +++ b/core/query/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.core datawave-core-parent - 7.0.0 + 7.0.1-SNAPSHOT datawave-core-query ${project.artifactId} diff --git a/core/utils/pom.xml b/core/utils/pom.xml index 3033d4a9d4d..c139b61fc72 100644 --- a/core/utils/pom.xml +++ b/core/utils/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.core datawave-core-parent - 7.0.0 + 7.0.1-SNAPSHOT gov.nsa.datawave.core datawave-utils-parent diff --git a/docs/pom.xml b/docs/pom.xml index 14e1e106286..f3a2a0e0403 100644 --- a/docs/pom.xml +++ b/docs/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-parent - 7.0.0 + 7.0.1-SNAPSHOT datawave-docs diff --git a/microservices/configcheck/pom.xml b/microservices/configcheck/pom.xml index 4ce45fbc9c2..769ba2328b5 100644 --- a/microservices/configcheck/pom.xml +++ b/microservices/configcheck/pom.xml @@ -3,7 +3,7 @@ 4.0.0 gov.nsa.datawave.microservice datawave-microservice-configcheck - 7.0.0 + 7.0.1-SNAPSHOT ${project.artifactId} diff --git a/microservices/pom.xml b/microservices/pom.xml index 550212f36cf..40214ebec9b 100644 --- a/microservices/pom.xml +++ b/microservices/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-parent - 7.0.0 + 7.0.1-SNAPSHOT gov.nsa.datawave.microservice datawave-microservice-build-parent diff --git a/microservices/services/pom.xml b/microservices/services/pom.xml index af5076bb24a..d0fa560dc2d 100644 --- a/microservices/services/pom.xml +++ b/microservices/services/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.microservice datawave-microservice-build-parent - 7.0.0 + 7.0.1-SNAPSHOT datawave-microservice-service-build-parent pom diff --git a/microservices/starters/pom.xml b/microservices/starters/pom.xml index adbd6d1ce8d..fc1c44da46d 100644 --- a/microservices/starters/pom.xml +++ b/microservices/starters/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.microservice datawave-microservice-build-parent - 7.0.0 + 7.0.1-SNAPSHOT datawave-microservice-starter-build-parent pom diff --git a/pom.xml b/pom.xml index 1b555463f24..9f90ebc9e44 100644 --- a/pom.xml +++ b/pom.xml @@ -3,7 +3,7 @@ 4.0.0 gov.nsa.datawave datawave-parent - 7.0.0 + 7.0.1-SNAPSHOT pom DataWave DataWave is a Java-based ingest and query framework that leverages Apache Accumulo to provide fast, secure access to your data. diff --git a/warehouse/accumulo-extensions/pom.xml b/warehouse/accumulo-extensions/pom.xml index 9ade5414166..7a49adbc994 100644 --- a/warehouse/accumulo-extensions/pom.xml +++ b/warehouse/accumulo-extensions/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 7.0.0 + 7.0.1-SNAPSHOT datawave-accumulo-extensions ${project.artifactId} diff --git a/warehouse/age-off-utils/pom.xml b/warehouse/age-off-utils/pom.xml index c09f1f593ed..5906583b761 100644 --- a/warehouse/age-off-utils/pom.xml +++ b/warehouse/age-off-utils/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 7.0.0 + 7.0.1-SNAPSHOT datawave-age-off-utils ${project.artifactId} diff --git a/warehouse/age-off/pom.xml b/warehouse/age-off/pom.xml index ec61330ac90..c159e9b1322 100644 --- a/warehouse/age-off/pom.xml +++ b/warehouse/age-off/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 7.0.0 + 7.0.1-SNAPSHOT datawave-age-off ${project.artifactId} diff --git a/warehouse/assemble/datawave/pom.xml b/warehouse/assemble/datawave/pom.xml index 9499e5e79f6..782375eea2f 100644 --- a/warehouse/assemble/datawave/pom.xml +++ b/warehouse/assemble/datawave/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave assemble-parent - 7.0.0 + 7.0.1-SNAPSHOT assemble-datawave jar diff --git a/warehouse/assemble/pom.xml b/warehouse/assemble/pom.xml index 7c6387e17e5..d9b582544b2 100644 --- a/warehouse/assemble/pom.xml +++ b/warehouse/assemble/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 7.0.0 + 7.0.1-SNAPSHOT assemble-parent pom diff --git a/warehouse/assemble/webservice/pom.xml b/warehouse/assemble/webservice/pom.xml index 325d6fb3d6d..231e1a94c42 100644 --- a/warehouse/assemble/webservice/pom.xml +++ b/warehouse/assemble/webservice/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave assemble-parent - 7.0.0 + 7.0.1-SNAPSHOT assemble-webservice ${project.artifactId} diff --git a/warehouse/common/pom.xml b/warehouse/common/pom.xml index 7398d3e842a..28fdaa85126 100644 --- a/warehouse/common/pom.xml +++ b/warehouse/common/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 7.0.0 + 7.0.1-SNAPSHOT datawave-common ${project.artifactId} diff --git a/warehouse/core/pom.xml b/warehouse/core/pom.xml index 7d270e1399e..c2d0b18679c 100644 --- a/warehouse/core/pom.xml +++ b/warehouse/core/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 7.0.0 + 7.0.1-SNAPSHOT datawave-core jar diff --git a/warehouse/data-dictionary-core/pom.xml b/warehouse/data-dictionary-core/pom.xml index a93969c1431..6f5c1ad37e5 100644 --- a/warehouse/data-dictionary-core/pom.xml +++ b/warehouse/data-dictionary-core/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 7.0.0 + 7.0.1-SNAPSHOT datawave-data-dictionary-core jar diff --git a/warehouse/edge-dictionary-core/pom.xml b/warehouse/edge-dictionary-core/pom.xml index 3969bc0ee3b..d59c7164975 100644 --- a/warehouse/edge-dictionary-core/pom.xml +++ b/warehouse/edge-dictionary-core/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 7.0.0 + 7.0.1-SNAPSHOT datawave-edge-dictionary-core jar diff --git a/warehouse/edge-model-configuration-core/pom.xml b/warehouse/edge-model-configuration-core/pom.xml index 12be4d30b55..19da9daa523 100644 --- a/warehouse/edge-model-configuration-core/pom.xml +++ b/warehouse/edge-model-configuration-core/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 7.0.0 + 7.0.1-SNAPSHOT datawave-edge-model-configuration-core jar diff --git a/warehouse/index-stats/pom.xml b/warehouse/index-stats/pom.xml index 6103f95daa3..0d5cd3b4f52 100644 --- a/warehouse/index-stats/pom.xml +++ b/warehouse/index-stats/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 7.0.0 + 7.0.1-SNAPSHOT datawave-index-stats jar diff --git a/warehouse/ingest-configuration/pom.xml b/warehouse/ingest-configuration/pom.xml index a38112c92da..e7d2850a20f 100644 --- a/warehouse/ingest-configuration/pom.xml +++ b/warehouse/ingest-configuration/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 7.0.0 + 7.0.1-SNAPSHOT datawave-ingest-configuration diff --git a/warehouse/ingest-core/pom.xml b/warehouse/ingest-core/pom.xml index 32233b1d3a2..1b30d8f7e45 100644 --- a/warehouse/ingest-core/pom.xml +++ b/warehouse/ingest-core/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 7.0.0 + 7.0.1-SNAPSHOT datawave-ingest-core jar diff --git a/warehouse/ingest-csv/pom.xml b/warehouse/ingest-csv/pom.xml index 3a084ea7aa4..ceb4077f706 100644 --- a/warehouse/ingest-csv/pom.xml +++ b/warehouse/ingest-csv/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 7.0.0 + 7.0.1-SNAPSHOT datawave-ingest-csv jar diff --git a/warehouse/ingest-json/pom.xml b/warehouse/ingest-json/pom.xml index 35d375444a2..822fb927446 100644 --- a/warehouse/ingest-json/pom.xml +++ b/warehouse/ingest-json/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 7.0.0 + 7.0.1-SNAPSHOT datawave-ingest-json jar diff --git a/warehouse/ingest-nyctlc/pom.xml b/warehouse/ingest-nyctlc/pom.xml index 116db619628..669440473ee 100644 --- a/warehouse/ingest-nyctlc/pom.xml +++ b/warehouse/ingest-nyctlc/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 7.0.0 + 7.0.1-SNAPSHOT datawave-ingest-nyctlc jar diff --git a/warehouse/ingest-scripts/pom.xml b/warehouse/ingest-scripts/pom.xml index 7e68eb8ab93..b5f380e8834 100644 --- a/warehouse/ingest-scripts/pom.xml +++ b/warehouse/ingest-scripts/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 7.0.0 + 7.0.1-SNAPSHOT datawave-ingest-scripts ${project.artifactId} diff --git a/warehouse/ingest-ssdeep/pom.xml b/warehouse/ingest-ssdeep/pom.xml index b1a3f019947..192534bc710 100644 --- a/warehouse/ingest-ssdeep/pom.xml +++ b/warehouse/ingest-ssdeep/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 7.0.0 + 7.0.1-SNAPSHOT datawave-ingest-ssdeep diff --git a/warehouse/ingest-wikipedia/pom.xml b/warehouse/ingest-wikipedia/pom.xml index 6e73574e166..71ce93d75cc 100644 --- a/warehouse/ingest-wikipedia/pom.xml +++ b/warehouse/ingest-wikipedia/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 7.0.0 + 7.0.1-SNAPSHOT datawave-ingest-wikipedia jar diff --git a/warehouse/metrics-core/pom.xml b/warehouse/metrics-core/pom.xml index 7fa6c9afe28..f3fd1ca87c5 100644 --- a/warehouse/metrics-core/pom.xml +++ b/warehouse/metrics-core/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 7.0.0 + 7.0.1-SNAPSHOT datawave-metrics-core jar diff --git a/warehouse/ops-tools/config-compare/pom.xml b/warehouse/ops-tools/config-compare/pom.xml index a5b0fa60231..c8bb8a5ef8e 100644 --- a/warehouse/ops-tools/config-compare/pom.xml +++ b/warehouse/ops-tools/config-compare/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-ops-tools-parent - 7.0.0 + 7.0.1-SNAPSHOT datawave-ops-tools-config-compare diff --git a/warehouse/ops-tools/index-validation/pom.xml b/warehouse/ops-tools/index-validation/pom.xml index 5a4f73ca377..2302db58a32 100644 --- a/warehouse/ops-tools/index-validation/pom.xml +++ b/warehouse/ops-tools/index-validation/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-ops-tools-parent - 7.0.0 + 7.0.1-SNAPSHOT datawave-ops-tools-index-validation jar diff --git a/warehouse/ops-tools/pom.xml b/warehouse/ops-tools/pom.xml index 3c09428e7f4..b3ca975ae11 100644 --- a/warehouse/ops-tools/pom.xml +++ b/warehouse/ops-tools/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 7.0.0 + 7.0.1-SNAPSHOT datawave-ops-tools-parent pom diff --git a/warehouse/pom.xml b/warehouse/pom.xml index d1fbd7bb320..06b6f2ba1e7 100644 --- a/warehouse/pom.xml +++ b/warehouse/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-parent - 7.0.0 + 7.0.1-SNAPSHOT datawave-warehouse-parent pom diff --git a/warehouse/query-core/pom.xml b/warehouse/query-core/pom.xml index 5083544ca67..81a2fa44f77 100644 --- a/warehouse/query-core/pom.xml +++ b/warehouse/query-core/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 7.0.0 + 7.0.1-SNAPSHOT datawave-query-core jar diff --git a/warehouse/regression-testing/pom.xml b/warehouse/regression-testing/pom.xml index c6705588161..a726ba2ac65 100644 --- a/warehouse/regression-testing/pom.xml +++ b/warehouse/regression-testing/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 7.0.0 + 7.0.1-SNAPSHOT datawave-regression-testing ${project.artifactId} diff --git a/warehouse/ssdeep-common/pom.xml b/warehouse/ssdeep-common/pom.xml index 7b3bd49cfe5..b86feecc6c8 100644 --- a/warehouse/ssdeep-common/pom.xml +++ b/warehouse/ssdeep-common/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-warehouse-parent - 7.0.0 + 7.0.1-SNAPSHOT datawave-ssdeep-common diff --git a/web-services/accumulo/pom.xml b/web-services/accumulo/pom.xml index 0d1a99aba07..a42adeccdd1 100644 --- a/web-services/accumulo/pom.xml +++ b/web-services/accumulo/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 7.0.0 + 7.0.1-SNAPSHOT datawave-ws-accumulo ejb diff --git a/web-services/atom/pom.xml b/web-services/atom/pom.xml index d75271e5a8f..9cdd9582e31 100644 --- a/web-services/atom/pom.xml +++ b/web-services/atom/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 7.0.0 + 7.0.1-SNAPSHOT datawave-ws-atom ejb diff --git a/web-services/cached-results/pom.xml b/web-services/cached-results/pom.xml index 9a15e1a6764..a2b25c2990e 100644 --- a/web-services/cached-results/pom.xml +++ b/web-services/cached-results/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 7.0.0 + 7.0.1-SNAPSHOT datawave-ws-cached-results ejb diff --git a/web-services/client/pom.xml b/web-services/client/pom.xml index 0c77d6c193c..8ceecdabafb 100644 --- a/web-services/client/pom.xml +++ b/web-services/client/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 7.0.0 + 7.0.1-SNAPSHOT datawave-ws-client jar diff --git a/web-services/common-util/pom.xml b/web-services/common-util/pom.xml index 4760dc28da9..6216c7a134b 100644 --- a/web-services/common-util/pom.xml +++ b/web-services/common-util/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 7.0.0 + 7.0.1-SNAPSHOT datawave-ws-common-util jar diff --git a/web-services/common/pom.xml b/web-services/common/pom.xml index e023d6cd864..5643b879a13 100644 --- a/web-services/common/pom.xml +++ b/web-services/common/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 7.0.0 + 7.0.1-SNAPSHOT datawave-ws-common ejb diff --git a/web-services/deploy/application/pom.xml b/web-services/deploy/application/pom.xml index f2fdee7ccaa..5cfe516a66c 100644 --- a/web-services/deploy/application/pom.xml +++ b/web-services/deploy/application/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-deploy-parent - 7.0.0 + 7.0.1-SNAPSHOT datawave-ws-deploy-application ear diff --git a/web-services/deploy/configuration/pom.xml b/web-services/deploy/configuration/pom.xml index 9b976190148..eb44600b34a 100644 --- a/web-services/deploy/configuration/pom.xml +++ b/web-services/deploy/configuration/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-deploy-parent - 7.0.0 + 7.0.1-SNAPSHOT datawave-ws-deploy-configuration jar diff --git a/web-services/deploy/docs/pom.xml b/web-services/deploy/docs/pom.xml index 01eeb59e685..4e99426e2a1 100644 --- a/web-services/deploy/docs/pom.xml +++ b/web-services/deploy/docs/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-deploy-parent - 7.0.0 + 7.0.1-SNAPSHOT datawave-ws-deploy-docs war diff --git a/web-services/deploy/pom.xml b/web-services/deploy/pom.xml index 360bb0e4027..2dc52b67ba0 100644 --- a/web-services/deploy/pom.xml +++ b/web-services/deploy/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 7.0.0 + 7.0.1-SNAPSHOT gov.nsa.datawave.webservices datawave-ws-deploy-parent diff --git a/web-services/deploy/spring-framework-integration/pom.xml b/web-services/deploy/spring-framework-integration/pom.xml index ee34ad3c616..bcf07fa2317 100644 --- a/web-services/deploy/spring-framework-integration/pom.xml +++ b/web-services/deploy/spring-framework-integration/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-deploy-parent - 7.0.0 + 7.0.1-SNAPSHOT spring-framework-integration ${project.artifactId} diff --git a/web-services/dictionary/pom.xml b/web-services/dictionary/pom.xml index be9c2b383d8..5a72d7be850 100644 --- a/web-services/dictionary/pom.xml +++ b/web-services/dictionary/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 7.0.0 + 7.0.1-SNAPSHOT datawave-ws-dictionary ejb diff --git a/web-services/examples/client-login/pom.xml b/web-services/examples/client-login/pom.xml index c0930e03558..cd95fe8273b 100644 --- a/web-services/examples/client-login/pom.xml +++ b/web-services/examples/client-login/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-examples-parent - 7.0.0 + 7.0.1-SNAPSHOT datawave-ws-examples-client-login ejb diff --git a/web-services/examples/http-client/pom.xml b/web-services/examples/http-client/pom.xml index 61922d4f1ca..a2ab7cf4aa9 100644 --- a/web-services/examples/http-client/pom.xml +++ b/web-services/examples/http-client/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-examples-parent - 7.0.0 + 7.0.1-SNAPSHOT datawave-ws-examples-http-client jar diff --git a/web-services/examples/jms-client/pom.xml b/web-services/examples/jms-client/pom.xml index 8dc609d7954..31487683553 100644 --- a/web-services/examples/jms-client/pom.xml +++ b/web-services/examples/jms-client/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-examples-parent - 7.0.0 + 7.0.1-SNAPSHOT datawave-ws-examples-jms-client jar diff --git a/web-services/examples/pom.xml b/web-services/examples/pom.xml index 16d48731c02..c7d41051604 100644 --- a/web-services/examples/pom.xml +++ b/web-services/examples/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 7.0.0 + 7.0.1-SNAPSHOT datawave-ws-examples-parent pom diff --git a/web-services/examples/query-war/pom.xml b/web-services/examples/query-war/pom.xml index eb1aae6675f..d1af8ebfbe8 100644 --- a/web-services/examples/query-war/pom.xml +++ b/web-services/examples/query-war/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-examples-parent - 7.0.0 + 7.0.1-SNAPSHOT datawave-ws-examples-query-war war diff --git a/web-services/map-reduce-embedded/pom.xml b/web-services/map-reduce-embedded/pom.xml index 0418d8af139..457f2e88d52 100644 --- a/web-services/map-reduce-embedded/pom.xml +++ b/web-services/map-reduce-embedded/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 7.0.0 + 7.0.1-SNAPSHOT datawave-ws-map-reduce-embedded jar diff --git a/web-services/map-reduce-status/pom.xml b/web-services/map-reduce-status/pom.xml index 64cae517247..2a9693b585d 100644 --- a/web-services/map-reduce-status/pom.xml +++ b/web-services/map-reduce-status/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 7.0.0 + 7.0.1-SNAPSHOT datawave-ws-map-reduce-status ejb diff --git a/web-services/map-reduce/pom.xml b/web-services/map-reduce/pom.xml index 19df5bdd010..12c9b93f54a 100644 --- a/web-services/map-reduce/pom.xml +++ b/web-services/map-reduce/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 7.0.0 + 7.0.1-SNAPSHOT datawave-ws-map-reduce ejb diff --git a/web-services/metrics/pom.xml b/web-services/metrics/pom.xml index 2238dcc5603..5025b7e3a60 100644 --- a/web-services/metrics/pom.xml +++ b/web-services/metrics/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 7.0.0 + 7.0.1-SNAPSHOT datawave-ws-metrics ejb diff --git a/web-services/model/pom.xml b/web-services/model/pom.xml index 7121614b1ce..c538f512e12 100644 --- a/web-services/model/pom.xml +++ b/web-services/model/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 7.0.0 + 7.0.1-SNAPSHOT datawave-ws-model ejb diff --git a/web-services/modification/pom.xml b/web-services/modification/pom.xml index c709c254c83..4dad69af36d 100644 --- a/web-services/modification/pom.xml +++ b/web-services/modification/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 7.0.0 + 7.0.1-SNAPSHOT datawave-ws-modification ejb diff --git a/web-services/pom.xml b/web-services/pom.xml index 2cf7b9755d4..7501ac0f849 100644 --- a/web-services/pom.xml +++ b/web-services/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave datawave-parent - 7.0.0 + 7.0.1-SNAPSHOT gov.nsa.datawave.webservices datawave-ws-parent diff --git a/web-services/query-websocket/pom.xml b/web-services/query-websocket/pom.xml index a247469cb02..c6e1943bf1b 100644 --- a/web-services/query-websocket/pom.xml +++ b/web-services/query-websocket/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 7.0.0 + 7.0.1-SNAPSHOT datawave-ws-query-websocket war diff --git a/web-services/query/pom.xml b/web-services/query/pom.xml index 7baa010cefb..4e610cbaccc 100644 --- a/web-services/query/pom.xml +++ b/web-services/query/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 7.0.0 + 7.0.1-SNAPSHOT datawave-ws-query ejb diff --git a/web-services/rest-api/pom.xml b/web-services/rest-api/pom.xml index 32a2cbf7435..8fd37657ae6 100644 --- a/web-services/rest-api/pom.xml +++ b/web-services/rest-api/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 7.0.0 + 7.0.1-SNAPSHOT datawave-ws-rest-api war diff --git a/web-services/security/pom.xml b/web-services/security/pom.xml index e133ae18d12..469e2173b32 100644 --- a/web-services/security/pom.xml +++ b/web-services/security/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 7.0.0 + 7.0.1-SNAPSHOT datawave-ws-security ejb diff --git a/web-services/web-root/pom.xml b/web-services/web-root/pom.xml index ff76a98b539..79ace8f97b4 100644 --- a/web-services/web-root/pom.xml +++ b/web-services/web-root/pom.xml @@ -4,7 +4,7 @@ gov.nsa.datawave.webservices datawave-ws-parent - 7.0.0 + 7.0.1-SNAPSHOT datawave-ws-web-root war From dbaf436766a5b8218789a85428bedb1199279331 Mon Sep 17 00:00:00 2001 From: Whitney O'Meara Date: Tue, 21 May 2024 12:25:57 +0000 Subject: [PATCH 08/20] updated submodules --- microservices/services/mapreduce-query | 2 +- microservices/services/query | 2 +- microservices/services/query-executor | 2 +- microservices/starters/query | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/microservices/services/mapreduce-query b/microservices/services/mapreduce-query index 814d56dde06..fa3ecc4dd0c 160000 --- a/microservices/services/mapreduce-query +++ b/microservices/services/mapreduce-query @@ -1 +1 @@ -Subproject commit 814d56dde0699b9482bea398a4046ca7395e627d +Subproject commit fa3ecc4dd0c19da7add0c32efbdfe42ce625b102 diff --git a/microservices/services/query b/microservices/services/query index 456e37aab5e..7c0b713a9ed 160000 --- a/microservices/services/query +++ b/microservices/services/query @@ -1 +1 @@ -Subproject commit 456e37aab5e2316e20ed3ab6d1e16a44ce57dd18 +Subproject commit 7c0b713a9edbe13b8f1d8ff20a7541a717dd5535 diff --git a/microservices/services/query-executor b/microservices/services/query-executor index 3da942f90a7..f9b50c58f31 160000 --- a/microservices/services/query-executor +++ b/microservices/services/query-executor @@ -1 +1 @@ -Subproject commit 3da942f90a783e617729dbb46ea7a134537ccaa5 +Subproject commit f9b50c58f31f9e3bcca675b49fff45901872ed30 diff --git a/microservices/starters/query b/microservices/starters/query index eaef27ea9b2..165530a2b38 160000 --- a/microservices/starters/query +++ b/microservices/starters/query @@ -1 +1 @@ -Subproject commit eaef27ea9b21cf07a6c8b513a27c63e51454ee0a +Subproject commit 165530a2b3802a696ec665412516d9d43b850ba7 From 2175964a6a96ee001399a7bd674cd3e358e8cfd8 Mon Sep 17 00:00:00 2001 From: Whitney O'Meara Date: Tue, 21 May 2024 16:01:16 +0000 Subject: [PATCH 09/20] added CODEOWNERS to protect changes made to QueryExecutorBean and LookupUUIDUtil --- .github/CODEOWNERS | 6 ++++++ 1 file changed, 6 insertions(+) create mode 100644 .github/CODEOWNERS diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 00000000000..9f53a44a7e1 --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1,6 @@ + +# In order to ensure the query microservices are consistent with the wildfly webservice, we need +# to ensure that changes made to QueryExecutorBean match QueryManagementService, and changes made +# to LookupUUIDUtil match LookupService in the Query Service. +/web-services/query/src/main/java/datawave/webservice/query/runner/QueryExecutorBean.java @jwomeara +/web-services/query/src/main/java/datawave/webservice/query/util/LookupUUIDUtil.java @jwomeara From ea2a8d753225f1289898851e8a26c4fd6c504522 Mon Sep 17 00:00:00 2001 From: Whitney O'Meara Date: Tue, 21 May 2024 16:03:46 +0000 Subject: [PATCH 10/20] simplified codeowners --- .github/CODEOWNERS | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 9f53a44a7e1..8e21aeb3383 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -2,5 +2,5 @@ # In order to ensure the query microservices are consistent with the wildfly webservice, we need # to ensure that changes made to QueryExecutorBean match QueryManagementService, and changes made # to LookupUUIDUtil match LookupService in the Query Service. -/web-services/query/src/main/java/datawave/webservice/query/runner/QueryExecutorBean.java @jwomeara -/web-services/query/src/main/java/datawave/webservice/query/util/LookupUUIDUtil.java @jwomeara +QueryExecutorBean.java @jwomeara +LookupUUIDUtil.java @jwomeara From e54f368f70dcb10ca30a4e8676b8c9179c4c62bc Mon Sep 17 00:00:00 2001 From: Whitney O'Meara Date: Tue, 21 May 2024 16:06:48 +0000 Subject: [PATCH 11/20] added RunningQuery.java to CODEOWNERS file --- .github/CODEOWNERS | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 8e21aeb3383..7cda5467009 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -4,3 +4,4 @@ # to LookupUUIDUtil match LookupService in the Query Service. QueryExecutorBean.java @jwomeara LookupUUIDUtil.java @jwomeara +RunningQuery.java @jwomeara From 7e8a08fdce87225eb098d573d16d55b5d39b74bc Mon Sep 17 00:00:00 2001 From: alerman Date: Wed, 22 May 2024 06:34:32 -0400 Subject: [PATCH 12/20] Update and add logging to docker entrypoint (#2397) --- .../application/src/main/docker/docker-entrypoint.sh | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/web-services/deploy/application/src/main/docker/docker-entrypoint.sh b/web-services/deploy/application/src/main/docker/docker-entrypoint.sh index 4d0b309f293..ce9c8721ad0 100644 --- a/web-services/deploy/application/src/main/docker/docker-entrypoint.sh +++ b/web-services/deploy/application/src/main/docker/docker-entrypoint.sh @@ -15,7 +15,7 @@ wait_and_shutdown(){ TIMEOUT_MINUTES=75 fi - echo "Sending wait and shutdown command to the web service. Will wait up to $TIMEOUT_MINUTES for queries to complete." + echo "Sending wait and shutdown command to the web service. Will wait up to $TIMEOUT_MINUTES minutes for queries to complete." curl --fail -s -o /tmp/curl_shutdown.log http://localhost:8080/DataWave/Common/Health/shutdown?timeoutMinutes=$TIMEOUT_MINUTES CURL_STATUS=$? if [ $CURL_STATUS -ne 0 ]; then @@ -29,6 +29,12 @@ wait_and_shutdown(){ fi } + +echo "Capturing ENV Properties" +printenv > env.properties +echo "Setting Runtime Config" +$WILDFLY_HOME/bin/jboss-cli.sh --file=./runtime-config.cli --properties=env.properties + if [[ "$@" != *"bin/standalone.sh"* ]]; then exec "$@" else @@ -63,9 +69,6 @@ else trap 'kill -WINCH $CMD_PID' WINCH trap 'kill -USR2 $CMD_PID' USR2 - printenv > env.properties - $WILDFLY_HOME/bin/jboss-cli.sh --file=./runtime-config.cli --properties=env.properties - eval "$@" "&" CMD_PID=${!} From 08b2d17a4cb88c8f3492b3ce59c7a684af9122a7 Mon Sep 17 00:00:00 2001 From: foster33 <84727868+foster33@users.noreply.github.com> Date: Wed, 22 May 2024 09:02:56 -0400 Subject: [PATCH 13/20] Remove reference to apidocs (#2385) --- web-services/deploy/docs/docs/index.html | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/web-services/deploy/docs/docs/index.html b/web-services/deploy/docs/docs/index.html index 9bf65c53928..7cedff3a0bd 100644 --- a/web-services/deploy/docs/docs/index.html +++ b/web-services/deploy/docs/docs/index.html @@ -45,11 +45,11 @@

    Welcome to DataWave

    -
    +
    From 71207e6de76b32c527fcf2d6f1e6901afee9c322 Mon Sep 17 00:00:00 2001 From: matthpeterson Date: Mon, 20 May 2024 10:55:24 -0400 Subject: [PATCH 14/20] Age Off File creation improvements (#2395) * Stop closing writer after each rule * Remove ismerge child element in generic case. Just use mode='merge' instead * Add header to xml files --------- Co-authored-by: Matthew Peterson --- .../age/off/util/AgeOffFileGenerator.java | 1 + .../age/off/util/AgeOffRuleFormatter.java | 3 +- .../age/off/util/RuleConfigDocument.java | 10 ---- .../age/off/util/AgeOffFileGeneratorTest.java | 48 +++++++++++++++---- .../age/off/util/AgeOffRuleFormatterTest.java | 2 - .../age/off/util/RuleConfigDocumentTest.java | 2 - 6 files changed, 41 insertions(+), 25 deletions(-) diff --git a/warehouse/age-off-utils/src/main/java/datawave/age/off/util/AgeOffFileGenerator.java b/warehouse/age-off-utils/src/main/java/datawave/age/off/util/AgeOffFileGenerator.java index a55e2d9c570..b10723f6a5e 100644 --- a/warehouse/age-off-utils/src/main/java/datawave/age/off/util/AgeOffFileGenerator.java +++ b/warehouse/age-off-utils/src/main/java/datawave/age/off/util/AgeOffFileGenerator.java @@ -70,6 +70,7 @@ private void writeRule(AgeOffRuleConfiguration ruleConfiguration) throws IOExcep } private void openConfigurationElement() throws IOException { + this.writer.write("\n"); this.writer.write("\n"); } } diff --git a/warehouse/age-off-utils/src/main/java/datawave/age/off/util/AgeOffRuleFormatter.java b/warehouse/age-off-utils/src/main/java/datawave/age/off/util/AgeOffRuleFormatter.java index 93e2e145ae8..152f23e7bdc 100644 --- a/warehouse/age-off-utils/src/main/java/datawave/age/off/util/AgeOffRuleFormatter.java +++ b/warehouse/age-off-utils/src/main/java/datawave/age/off/util/AgeOffRuleFormatter.java @@ -35,7 +35,7 @@ public AgeOffRuleFormatter(AgeOffRuleConfiguration configuration) { } /** - * Outputs the configured rule to the writer + * Outputs the configured rule to the writer. Will not close the writer. * * @param writer * output writer @@ -48,7 +48,6 @@ void format(Writer writer) throws IOException { AgeOffRuleLoader.RuleConfig ruleConfig = createRuleConfig(this.configuration); writer.write(transformToXmlString(ruleConfig)); - writer.close(); } private AgeOffRuleLoader.RuleConfig createRuleConfig(AgeOffRuleConfiguration configuration) throws IOException { diff --git a/warehouse/age-off-utils/src/main/java/datawave/age/off/util/RuleConfigDocument.java b/warehouse/age-off-utils/src/main/java/datawave/age/off/util/RuleConfigDocument.java index 3e30f49a1f7..ad4eef2600c 100644 --- a/warehouse/age-off-utils/src/main/java/datawave/age/off/util/RuleConfigDocument.java +++ b/warehouse/age-off-utils/src/main/java/datawave/age/off/util/RuleConfigDocument.java @@ -15,7 +15,6 @@ public class RuleConfigDocument extends DocumentImpl { private static final String FILTER_CLASS_ELEMENT_NAME = "filterClass"; private static final String MATCH_PATTERN_ELEMENT_NAME = "matchPattern"; - private static final String IS_MERGE_ELEMENT_NAME = "ismerge"; private static final String TTL_ELEMENT_NAME = "ttl"; private static final String TTL_UNITS_ATTRIBUTE_NAME = "units"; private static final String RULE_ELEMENT_NAME = "rule"; @@ -53,7 +52,6 @@ private Element createRuleElement() { private void appendElementsToRule() { appendFilterClassElement(); - appendMergeElement(); appendTtlElement(); appendMatchPatternElement(); appendCustomElements(); @@ -95,14 +93,6 @@ private void appendTtlElement() { } } - private void appendMergeElement() { - if (this.ruleConfig.isMerge) { - Element mergeElement = super.createElement(IS_MERGE_ELEMENT_NAME); - mergeElement.setTextContent(Boolean.TRUE.toString()); - rule.appendChild(mergeElement); - } - } - private void enableCommentEscaping(Element rule) { adjustEscaping(rule, StreamResult.PI_ENABLE_OUTPUT_ESCAPING); } diff --git a/warehouse/age-off-utils/src/test/java/datawave/age/off/util/AgeOffFileGeneratorTest.java b/warehouse/age-off-utils/src/test/java/datawave/age/off/util/AgeOffFileGeneratorTest.java index 350cee591a3..11a1b38a7e6 100644 --- a/warehouse/age-off-utils/src/test/java/datawave/age/off/util/AgeOffFileGeneratorTest.java +++ b/warehouse/age-off-utils/src/test/java/datawave/age/off/util/AgeOffFileGeneratorTest.java @@ -5,19 +5,25 @@ import static org.junit.Assert.assertTrue; import java.io.ByteArrayInputStream; +import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.StringWriter; +import java.io.Writer; import java.lang.reflect.InvocationTargetException; import java.net.URISyntaxException; import java.nio.charset.StandardCharsets; +import java.nio.file.Files; import java.util.List; +import java.util.stream.Collectors; import org.apache.accumulo.core.data.Key; import org.apache.accumulo.core.data.Value; import org.apache.accumulo.core.iterators.IteratorEnvironment; import org.apache.xerces.dom.DocumentImpl; +import org.junit.Rule; import org.junit.Test; +import org.junit.rules.TemporaryFolder; import org.w3c.dom.Element; import org.w3c.dom.Node; @@ -33,12 +39,12 @@ public class AgeOffFileGeneratorTest { // @formatter:off private static final String EXPECTED_FILE_CONTENTS = + "\n" + "\n" + " " + PARENT_FILE_NAME + "\n" + " \n" + " \n" + " datawave.iterators.filter.ColumnVisibilityLabeledFilter\n" + - " true\n" + " \n" + " dryFood bakingPowder=365d\n" + " dryFood driedBeans=548d\n" + @@ -66,12 +72,12 @@ public class AgeOffFileGeneratorTest { "\n"; private static final String OTHER_EXPECTED_FILE_CONTENTS = + "\n" + "\n" + " test-root-field.xml\n" + " \n" + " \n" + " datawave.iterators.filter.ageoff.DataTypeAgeOffFilter\n" + - " true\n" + " true\n" + " \n" + " \n" + @@ -196,8 +202,23 @@ public class AgeOffFileGeneratorTest { "\n"; // @formatter:on + @Rule + public TemporaryFolder temporaryFolder = new TemporaryFolder(); + @Test - public void createFileWithMultipleRules() throws IOException { + public void generateFileContentsWithMultipleRules() throws IOException { + AgeOffFileConfiguration.Builder builder = createBuilderForMultipleRules(); + assertEquals(EXPECTED_FILE_CONTENTS, generateFileContentsInMemory(builder)); + } + + @Test + public void writeMultipleRulesToLocalFile() throws IOException { + File temporaryFile = writeToFile(createBuilderForMultipleRules()); + String actualResult = Files.readAllLines(temporaryFile.toPath()).stream().collect(Collectors.joining("\n")) + "\n"; + assertEquals(EXPECTED_FILE_CONTENTS, actualResult); + } + + private AgeOffFileConfiguration.Builder createBuilderForMultipleRules() { AgeOffRuleConfiguration.Builder colVisFilterRule = defineColVisFilterRule(); AgeOffRuleConfiguration.Builder testFilterRule = defineTestFilterRule(); AgeOffRuleConfiguration.Builder dataTypeRule = defineDataTypeRule(); @@ -208,7 +229,7 @@ public void createFileWithMultipleRules() throws IOException { builder.addNextRule(dataTypeRule); builder.addNextRule(testFilterRule); builder.withIndentation(" "); - assertEquals(EXPECTED_FILE_CONTENTS, generateFile(builder)); + return builder; } @Test @@ -222,7 +243,7 @@ public void createAnotherFileWithMultipleRules() throws IOException, URISyntaxEx builder.addNextRule(fieldRule); builder.addNextRule(defineColQualifierRule()); builder.withIndentation(" "); - assertEquals(OTHER_EXPECTED_FILE_CONTENTS, generateFile(builder)); + assertEquals(OTHER_EXPECTED_FILE_CONTENTS, generateFileContentsInMemory(builder)); } @Test @@ -351,11 +372,20 @@ private AgeOffRuleConfiguration.Builder defineFieldAgeOffRule() { return builder; } - private String generateFile(AgeOffFileConfiguration.Builder builder) throws IOException { - StringWriter out = new StringWriter(); + private String generateFileContentsInMemory(AgeOffFileConfiguration.Builder builder) throws IOException { + StringWriter writer = new StringWriter(); + AgeOffFileGenerator generator = new AgeOffFileGenerator(builder.build()); + generator.format(writer); + return writer.toString(); + } + + private File writeToFile(AgeOffFileConfiguration.Builder builder) throws IOException { + File temporaryFile = temporaryFolder.newFile(); + Writer writer = Files.newBufferedWriter(temporaryFile.toPath()); AgeOffFileGenerator generator = new AgeOffFileGenerator(builder.build()); - generator.format(out); - return out.toString(); + generator.format(writer); + writer.close(); + return temporaryFile; } private class TestProvider implements AgeOffRuleLoader.AgeOffFileLoaderDependencyProvider { diff --git a/warehouse/age-off-utils/src/test/java/datawave/age/off/util/AgeOffRuleFormatterTest.java b/warehouse/age-off-utils/src/test/java/datawave/age/off/util/AgeOffRuleFormatterTest.java index 04efb20e1aa..fe0fa744e67 100644 --- a/warehouse/age-off-utils/src/test/java/datawave/age/off/util/AgeOffRuleFormatterTest.java +++ b/warehouse/age-off-utils/src/test/java/datawave/age/off/util/AgeOffRuleFormatterTest.java @@ -22,7 +22,6 @@ public void createRuleFromCsv() throws IOException { String expectedOutputText = "\n" + " datawave.ingest.util.cache.watch.TestTrieFilter\n" + - " true\n" + " \n" + " dryFood bakingPowder=365d\n" + " dryFood driedBeans=548d\n" + @@ -111,7 +110,6 @@ public void createRuleForDataTypeIndexTable() throws IOException { String expectedOutputText = "\n" + " datawave.iterators.filter.ageoff.DataTypeAgeOffFilter\n" + - " true\n" + " true\n" + "\n"; // @formatter:on diff --git a/warehouse/age-off-utils/src/test/java/datawave/age/off/util/RuleConfigDocumentTest.java b/warehouse/age-off-utils/src/test/java/datawave/age/off/util/RuleConfigDocumentTest.java index 5c965f1d189..9d2559ead50 100644 --- a/warehouse/age-off-utils/src/test/java/datawave/age/off/util/RuleConfigDocumentTest.java +++ b/warehouse/age-off-utils/src/test/java/datawave/age/off/util/RuleConfigDocumentTest.java @@ -75,7 +75,6 @@ public void includesMerge() throws IOException { // @formatter:off String expected = "\n" + " myclass\n" + - " true\n" + "\n"; // @formatter:on assertEquals(actual, expected, actual); @@ -127,7 +126,6 @@ public void includesAll() throws IOException { // @formatter:off String expected = "\n" + " myclass\n" + - " true\n" + " 2468\n" + " \n" + "1234\n" + From 130cabfd61c6c20a5d0962bb348b99dcdaa82ea3 Mon Sep 17 00:00:00 2001 From: Whitney O'Meara Date: Wed, 22 May 2024 18:10:45 +0000 Subject: [PATCH 15/20] updated metrics submodule --- microservices/services/query-metric | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/microservices/services/query-metric b/microservices/services/query-metric index 2a8e390424a..0b9aea21b03 160000 --- a/microservices/services/query-metric +++ b/microservices/services/query-metric @@ -1 +1 @@ -Subproject commit 2a8e390424a0cd262c647385581d5dc5f181bfe0 +Subproject commit 0b9aea21b037c11932b551ea653d7c2b47f0c5a0 From d51c4e936a2d0257db2961f895684264c2f02b10 Mon Sep 17 00:00:00 2001 From: alerman Date: Tue, 28 May 2024 14:45:34 -0400 Subject: [PATCH 16/20] Run accumulo checks at WARN, and fix some easier spots (#2389) * run accumulo import checks * Clean up some of the references to accumulo code * Adding a few more easy ones * Update missed reference to accumulo internal code * Add root location prefix to fix sub-dir builds Resolves the proper base directory for the import-control file so builds from sub-directories will still work. --------- Co-authored-by: Daniel Roberts --- checkstyle.xml | 13 +++++---- import-control-accumulo.xml | 28 +++++++++++++++++++ pom.xml | 4 ++- .../datawave/mr/bulk/BulkInputFormat.java | 9 +++--- .../src/main/java/datawave/util/TextUtil.java | 9 ++++++ .../job/CBMutationOutputFormatter.java | 1 - .../ingest/mapreduce/job/IngestJob.java | 10 +++---- .../ingest/mapreduce/job/ShardReindexJob.java | 1 - .../mapreduce/job/ShardedTableMapFile.java | 9 ++++-- .../analytic/FileByteSummaryLoader.java | 1 - ...DatawaveFieldIndexCachingIteratorJexl.java | 9 +++--- 11 files changed, 67 insertions(+), 27 deletions(-) create mode 100644 import-control-accumulo.xml diff --git a/checkstyle.xml b/checkstyle.xml index 8c0e64b8253..6ac2c34afa1 100644 --- a/checkstyle.xml +++ b/checkstyle.xml @@ -2,13 +2,14 @@ + + - - - - - - + + + + + diff --git a/import-control-accumulo.xml b/import-control-accumulo.xml new file mode 100644 index 00000000000..3227801483a --- /dev/null +++ b/import-control-accumulo.xml @@ -0,0 +1,28 @@ + + + + + + + + + + + + + + + + + + + + diff --git a/pom.xml b/pom.xml index 9f90ebc9e44..5b3ee5a2ce3 100644 --- a/pom.xml +++ b/pom.xml @@ -1403,8 +1403,10 @@ 3.1.0 checkstyle.xml - false + true false + true + basedir=${datawave.root} diff --git a/warehouse/core/src/main/java/datawave/mr/bulk/BulkInputFormat.java b/warehouse/core/src/main/java/datawave/mr/bulk/BulkInputFormat.java index 6b517b8c443..1e814a23eb3 100644 --- a/warehouse/core/src/main/java/datawave/mr/bulk/BulkInputFormat.java +++ b/warehouse/core/src/main/java/datawave/mr/bulk/BulkInputFormat.java @@ -18,6 +18,8 @@ import java.util.Set; import java.util.StringTokenizer; import java.util.UUID; +import java.util.concurrent.ThreadLocalRandom; +import java.util.concurrent.TimeUnit; import org.apache.accumulo.core.client.Accumulo; import org.apache.accumulo.core.client.AccumuloClient; @@ -54,8 +56,6 @@ import org.apache.accumulo.core.security.TablePermission; import org.apache.accumulo.core.singletons.SingletonReservation; import org.apache.accumulo.core.util.Pair; -import org.apache.accumulo.core.util.TextUtil; -import org.apache.accumulo.core.util.UtilWaitThread; import org.apache.accumulo.core.util.format.DefaultFormatter; import org.apache.accumulo.core.util.threads.Threads; import org.apache.commons.codec.binary.Base64; @@ -89,6 +89,7 @@ import datawave.mr.bulk.split.LocationStrategy; import datawave.mr.bulk.split.RangeSplit; import datawave.mr.bulk.split.SplitStrategy; +import datawave.util.TextUtil; public class BulkInputFormat extends InputFormat { @@ -1115,7 +1116,7 @@ public List getSplits(JobContext job) throws IOException { binnedRanges = binOfflineTable(job, tableName, ranges); while (binnedRanges == null) { // Some tablets were still online, try again - UtilWaitThread.sleep(100L + (int) (Math.random() * 100)); // sleep randomly between 100 and 200 ms + TimeUnit.MILLISECONDS.sleep(ThreadLocalRandom.current().nextInt(100, 200)); binnedRanges = binOfflineTable(job, tableName, ranges); } } else { @@ -1138,7 +1139,7 @@ public List getSplits(JobContext job) throws IOException { } binnedRanges.clear(); log.warn("Unable to locate bins for specified ranges. Retrying."); - UtilWaitThread.sleep(100 + (int) (Math.random() * 100)); // sleep randomly between 100 and 200 ms + TimeUnit.MILLISECONDS.sleep(ThreadLocalRandom.current().nextInt(100, 200)); tl.invalidateCache(); } diff --git a/warehouse/core/src/main/java/datawave/util/TextUtil.java b/warehouse/core/src/main/java/datawave/util/TextUtil.java index 33e03e24b8a..00d60408648 100644 --- a/warehouse/core/src/main/java/datawave/util/TextUtil.java +++ b/warehouse/core/src/main/java/datawave/util/TextUtil.java @@ -107,4 +107,13 @@ public static String fromUtf8(byte[] bytes) { throw new IllegalArgumentException(e); } } + + public static byte[] getBytes(Text text) { + byte[] bytes = text.getBytes(); + if (bytes.length != text.getLength()) { + bytes = new byte[text.getLength()]; + System.arraycopy(text.getBytes(), 0, bytes, 0, bytes.length); + } + return bytes; + } } diff --git a/warehouse/ingest-core/src/main/java/datawave/ingest/mapreduce/job/CBMutationOutputFormatter.java b/warehouse/ingest-core/src/main/java/datawave/ingest/mapreduce/job/CBMutationOutputFormatter.java index ff06c59a37b..368c7cbfb67 100644 --- a/warehouse/ingest-core/src/main/java/datawave/ingest/mapreduce/job/CBMutationOutputFormatter.java +++ b/warehouse/ingest-core/src/main/java/datawave/ingest/mapreduce/job/CBMutationOutputFormatter.java @@ -8,7 +8,6 @@ import org.apache.accumulo.core.data.Mutation; import org.apache.accumulo.hadoop.mapreduce.AccumuloOutputFormat; import org.apache.accumulo.hadoop.mapreduce.OutputFormatBuilder; -import org.apache.accumulo.hadoopImpl.mapreduce.OutputFormatBuilderImpl; import org.apache.accumulo.hadoopImpl.mapreduce.lib.OutputConfigurator; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.Text; diff --git a/warehouse/ingest-core/src/main/java/datawave/ingest/mapreduce/job/IngestJob.java b/warehouse/ingest-core/src/main/java/datawave/ingest/mapreduce/job/IngestJob.java index 3945fcd6e66..84a55099243 100644 --- a/warehouse/ingest-core/src/main/java/datawave/ingest/mapreduce/job/IngestJob.java +++ b/warehouse/ingest-core/src/main/java/datawave/ingest/mapreduce/job/IngestJob.java @@ -31,13 +31,10 @@ import java.util.Observer; import java.util.Set; -import org.apache.accumulo.core.Constants; -import org.apache.accumulo.core.client.Accumulo; import org.apache.accumulo.core.client.AccumuloException; import org.apache.accumulo.core.client.AccumuloSecurityException; import org.apache.accumulo.core.client.TableExistsException; import org.apache.accumulo.core.client.TableNotFoundException; -import org.apache.accumulo.core.client.security.tokens.PasswordToken; import org.apache.accumulo.core.data.ColumnUpdate; import org.apache.accumulo.core.data.Key; import org.apache.accumulo.core.data.KeyValue; @@ -1582,6 +1579,8 @@ public static void verboseCounters(TaskInputOutputContext context, String locati key.getKey().getColumnQualifier().getBytes(), key.getKey().getColumnVisibility(), value.get()); } + public final static int MAX_DATA_TO_PRINT = 64; + /** * Output a verbose counter * @@ -1606,9 +1605,8 @@ public static void verboseCounters(TaskInputOutputContext context, String locati public static void verboseCounter(TaskInputOutputContext context, String location, Text tableName, byte[] row, byte[] colFamily, byte[] colQualifier, Text colVis, byte[] val) { String labelString = new ColumnVisibility(colVis).toString(); - String s = Key.toPrintableString(row, 0, row.length, Constants.MAX_DATA_TO_PRINT) + " " - + Key.toPrintableString(colFamily, 0, colFamily.length, Constants.MAX_DATA_TO_PRINT) + ":" - + Key.toPrintableString(colQualifier, 0, colQualifier.length, Constants.MAX_DATA_TO_PRINT) + " " + labelString + " " + String s = Key.toPrintableString(row, 0, row.length, MAX_DATA_TO_PRINT) + " " + Key.toPrintableString(colFamily, 0, colFamily.length, MAX_DATA_TO_PRINT) + + ":" + Key.toPrintableString(colQualifier, 0, colQualifier.length, MAX_DATA_TO_PRINT) + " " + labelString + " " + (val == null ? "null" : String.valueOf(val.length) + " value bytes"); s = s.replace('\n', ' '); diff --git a/warehouse/ingest-core/src/main/java/datawave/ingest/mapreduce/job/ShardReindexJob.java b/warehouse/ingest-core/src/main/java/datawave/ingest/mapreduce/job/ShardReindexJob.java index 4f54617dabc..179e4871d01 100644 --- a/warehouse/ingest-core/src/main/java/datawave/ingest/mapreduce/job/ShardReindexJob.java +++ b/warehouse/ingest-core/src/main/java/datawave/ingest/mapreduce/job/ShardReindexJob.java @@ -27,7 +27,6 @@ import org.apache.accumulo.core.data.Range; import org.apache.accumulo.core.data.Value; import org.apache.accumulo.hadoop.mapreduce.AccumuloInputFormat; -import org.apache.accumulo.hadoopImpl.mapreduce.lib.InputConfigurator; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; diff --git a/warehouse/ingest-core/src/main/java/datawave/ingest/mapreduce/job/ShardedTableMapFile.java b/warehouse/ingest-core/src/main/java/datawave/ingest/mapreduce/job/ShardedTableMapFile.java index 773898d30a6..25a002b5772 100644 --- a/warehouse/ingest-core/src/main/java/datawave/ingest/mapreduce/job/ShardedTableMapFile.java +++ b/warehouse/ingest-core/src/main/java/datawave/ingest/mapreduce/job/ShardedTableMapFile.java @@ -11,6 +11,7 @@ import java.util.Map.Entry; import java.util.Set; import java.util.TreeMap; +import java.util.concurrent.TimeUnit; import org.apache.accumulo.core.client.AccumuloClient; import org.apache.accumulo.core.client.AccumuloException; @@ -19,7 +20,6 @@ import org.apache.accumulo.core.client.admin.TableOperations; import org.apache.accumulo.core.data.Range; import org.apache.accumulo.core.data.TabletId; -import org.apache.accumulo.core.util.UtilWaitThread; import org.apache.commons.lang.time.DateUtils; import org.apache.commons.lang3.mutable.MutableInt; import org.apache.hadoop.conf.Configuration; @@ -355,7 +355,12 @@ public static Map getLocations(Logger log, AccumuloHelper accumuloH keepRetrying = false; } catch (Exception e) { log.warn(e.getClass().getName() + ":" + e.getMessage() + " ... retrying ...", e); - UtilWaitThread.sleep(3000); + try { + TimeUnit.MILLISECONDS.sleep(3000); + } catch (InterruptedException ex) { + log.error(e.getMessage(), ex); + } + splitToLocation.clear(); } } diff --git a/warehouse/metrics-core/src/main/java/datawave/metrics/analytic/FileByteSummaryLoader.java b/warehouse/metrics-core/src/main/java/datawave/metrics/analytic/FileByteSummaryLoader.java index da33345fc11..3a58363d3da 100644 --- a/warehouse/metrics-core/src/main/java/datawave/metrics/analytic/FileByteSummaryLoader.java +++ b/warehouse/metrics-core/src/main/java/datawave/metrics/analytic/FileByteSummaryLoader.java @@ -7,7 +7,6 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; -import org.apache.accumulo.core.Constants; import org.apache.accumulo.core.client.AccumuloException; import org.apache.accumulo.core.client.AccumuloSecurityException; import org.apache.accumulo.core.client.ClientConfiguration; diff --git a/warehouse/query-core/src/main/java/datawave/core/iterators/DatawaveFieldIndexCachingIteratorJexl.java b/warehouse/query-core/src/main/java/datawave/core/iterators/DatawaveFieldIndexCachingIteratorJexl.java index adfd1785c5e..a6d588d2a50 100644 --- a/warehouse/query-core/src/main/java/datawave/core/iterators/DatawaveFieldIndexCachingIteratorJexl.java +++ b/warehouse/query-core/src/main/java/datawave/core/iterators/DatawaveFieldIndexCachingIteratorJexl.java @@ -24,7 +24,6 @@ import org.apache.accumulo.core.iterators.IteratorEnvironment; import org.apache.accumulo.core.iterators.SortedKeyValueIterator; import org.apache.accumulo.core.iterators.WrappingIterator; -import org.apache.accumulo.core.iteratorsImpl.system.IterationInterruptedException; import org.apache.commons.pool.impl.GenericObjectPool; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; @@ -812,7 +811,7 @@ else if (key.compareTo(this.lastRangeSeeked.getStartKey()) < 0) { if (this.setControl.isCancelledQuery()) { log.debug("Ivarator query was cancelled"); - throw new IterationInterruptedException("Ivarator query was cancelled"); + throw new RuntimeException("Ivarator query was cancelled"); } // if we have any persisted data or we have scanned a significant number of keys, then persist it completely @@ -831,7 +830,7 @@ else if (key.compareTo(this.lastRangeSeeked.getStartKey()) < 0) { throw new IvaratorException("Ivarator query timed out"); } else { log.debug("Ivarator query was cancelled"); - throw new IterationInterruptedException("Ivarator query was cancelled"); + throw new RuntimeException("Ivarator query was cancelled"); } } @@ -1028,7 +1027,7 @@ protected SortedKeyValueIterator takePoolSource() { try { source = ivaratorSourcePool.borrowObject(); } catch (Exception e) { - throw new IterationInterruptedException("Unable to borrow object from ivarator source pool. " + e.getMessage()); + throw new RuntimeException("Unable to borrow object from ivarator source pool. " + e.getMessage()); } return source; } @@ -1043,7 +1042,7 @@ protected void returnPoolSource(SortedKeyValueIterator source) { try { ivaratorSourcePool.returnObject(source); } catch (Exception e) { - throw new IterationInterruptedException("Unable to return object to ivarator source pool. " + e.getMessage()); + throw new RuntimeException("Unable to return object to ivarator source pool. " + e.getMessage()); } } From b3d1d13ed53dac2b3dd2821edf601e662e469111 Mon Sep 17 00:00:00 2001 From: matthpeterson Date: Wed, 29 May 2024 10:55:17 -0400 Subject: [PATCH 17/20] Make StatsJob's job name descriptive (#1677) * Make StatsJob's job name descriptive * formatting after commit resolution --------- Co-authored-by: matthpeterson Co-authored-by: Keith Ratcliffe Co-authored-by: Moriarty <22225248+apmoriarty@users.noreply.github.com> Co-authored-by: palindrome <31748527+hlgp@users.noreply.github.com> Co-authored-by: hlgp --- .../src/main/java/datawave/mapreduce/shardStats/StatsJob.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/warehouse/index-stats/src/main/java/datawave/mapreduce/shardStats/StatsJob.java b/warehouse/index-stats/src/main/java/datawave/mapreduce/shardStats/StatsJob.java index c79cf5e2160..88ad8c19a62 100644 --- a/warehouse/index-stats/src/main/java/datawave/mapreduce/shardStats/StatsJob.java +++ b/warehouse/index-stats/src/main/java/datawave/mapreduce/shardStats/StatsJob.java @@ -142,6 +142,8 @@ protected void configureInputFormat(Job job, AccumuloHelper cbHelper, Configurat protected void configureJob(Job job, Configuration conf, Path workDirPath, FileSystem outputFs) throws Exception { super.configureJob(job, conf, workDirPath, outputFs); + job.setJobName("StatsJob - " + job.getJobName()); + job.setReducerClass(StatsHyperLogReducer.class); } From 7eaeebdbf91545ef376db370dee57517cb5e4eea Mon Sep 17 00:00:00 2001 From: Whitney O'Meara Date: Wed, 29 May 2024 20:00:29 +0000 Subject: [PATCH 18/20] updated query metric version in docker compose --- docker/docker-compose.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml index 23167518282..690950b145c 100644 --- a/docker/docker-compose.yml +++ b/docker/docker-compose.yml @@ -270,7 +270,7 @@ services: metrics: entrypoint: ["java","-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=*:5007","-jar","app.jar"] - image: datawave/query-metric-service:4.0.1-SNAPSHOT + image: datawave/query-metric-service:4.0.2-SNAPSHOT command: - --spring.output.ansi.enabled=ALWAYS - --spring.profiles.active=consul,compose,remoteauth From ec3be20ad6e692fe4a603e64f511f2f840a44f40 Mon Sep 17 00:00:00 2001 From: Ivan Bella Date: Thu, 16 May 2024 09:44:31 +0000 Subject: [PATCH 19/20] Fixes to the most-recent-unique functionality (#2392) --- .../query/planner/QueryOptionsSwitch.java | 3 +++ .../query/tables/ShardQueryLogic.java | 4 +-- .../sortedset/RewritableSortedSetImpl.java | 27 +++++++++++-------- 3 files changed, 21 insertions(+), 13 deletions(-) diff --git a/warehouse/query-core/src/main/java/datawave/query/planner/QueryOptionsSwitch.java b/warehouse/query-core/src/main/java/datawave/query/planner/QueryOptionsSwitch.java index 0c1fbc151a9..5959db9038b 100644 --- a/warehouse/query-core/src/main/java/datawave/query/planner/QueryOptionsSwitch.java +++ b/warehouse/query-core/src/main/java/datawave/query/planner/QueryOptionsSwitch.java @@ -66,6 +66,9 @@ public static void apply(Map optionsMap, ShardQueryConfiguration uniqueFields.setMostRecent(config.getUniqueFields().isMostRecent()); config.setUniqueFields(uniqueFields); break; + case QueryParameters.MOST_RECENT_UNIQUE: + config.getUniqueFields().setMostRecent(Boolean.parseBoolean(value)); + break; case QueryParameters.EXCERPT_FIELDS: ExcerptFields excerptFields = ExcerptFields.from(value); config.setExcerptFields(excerptFields); diff --git a/warehouse/query-core/src/main/java/datawave/query/tables/ShardQueryLogic.java b/warehouse/query-core/src/main/java/datawave/query/tables/ShardQueryLogic.java index 6854c34bd2c..13451949d05 100644 --- a/warehouse/query-core/src/main/java/datawave/query/tables/ShardQueryLogic.java +++ b/warehouse/query-core/src/main/java/datawave/query/tables/ShardQueryLogic.java @@ -929,7 +929,8 @@ protected void loadQueryParameters(ShardQueryConfiguration config, Query setting UniqueFields uniqueFields = UniqueFields.from(uniqueFieldsParam); // Only set the unique fields if we were actually given some if (!uniqueFields.isEmpty()) { - this.setUniqueFields(uniqueFields); + // preserve the most recent flag + uniqueFields.setMostRecent(config.getUniqueFields().isMostRecent()); config.setUniqueFields(uniqueFields); } } @@ -937,7 +938,6 @@ protected void loadQueryParameters(ShardQueryConfiguration config, Query setting // Get the most recent flag String mostRecentUnique = settings.findParameter(QueryParameters.MOST_RECENT_UNIQUE).getParameterValue().trim(); if (StringUtils.isNotBlank(mostRecentUnique)) { - this.getUniqueFields().setMostRecent(Boolean.valueOf(mostRecentUnique)); config.getUniqueFields().setMostRecent(Boolean.valueOf(mostRecentUnique)); } diff --git a/warehouse/query-core/src/main/java/datawave/query/util/sortedset/RewritableSortedSetImpl.java b/warehouse/query-core/src/main/java/datawave/query/util/sortedset/RewritableSortedSetImpl.java index 4fc42e16eba..0b71f9d43ca 100644 --- a/warehouse/query-core/src/main/java/datawave/query/util/sortedset/RewritableSortedSetImpl.java +++ b/warehouse/query-core/src/main/java/datawave/query/util/sortedset/RewritableSortedSetImpl.java @@ -24,7 +24,7 @@ */ public class RewritableSortedSetImpl implements RewritableSortedSet, Cloneable { private static Logger log = Logger.getLogger(RewritableSortedSetImpl.class); - // using a map to enable replacement of the actual set member (see uses of collisionSelection) + // using a map to enable replacement of the actual set member (see uses of addResolvingCollisions) protected NavigableMap set = null; // When the set contains X and we are adding Y where X == Y, then use this strategy // to decide which to keep. @@ -128,18 +128,18 @@ public boolean contains(Object o) { @Override public Iterator iterator() { - return set.keySet().iterator(); + return set.values().iterator(); } @Override public Object[] toArray() { - return set.keySet().toArray(); + return set.values().toArray(); } @SuppressWarnings({"unchecked"}) @Override public T[] toArray(T[] a) { - return set.keySet().toArray(a); + return set.values().toArray(a); } @Override @@ -153,11 +153,16 @@ public E get(E e) { } private boolean addResolvingCollisions(E e) { - if ((rewriteStrategy != null) && set.containsKey(e) && rewriteStrategy.rewrite(set.get(e), e)) { - set.remove(e); - } // return true if this is a new element to the set - return (set.put(e, e) == null); + if (set.containsKey(e)) { + if ((rewriteStrategy != null) && rewriteStrategy.rewrite(set.get(e), e)) { + set.put(e, e); + return true; + } + return false; + } + set.put(e, e); + return true; } @Override @@ -234,7 +239,7 @@ public E first() { QueryException qe = new QueryException(DatawaveErrorCode.FETCH_FIRST_ELEMENT_ERROR); throw (NoSuchElementException) (new NoSuchElementException().initCause(qe)); } else { - return first; + return set.get(first); } } @@ -250,13 +255,13 @@ public E last() { QueryException qe = new QueryException(DatawaveErrorCode.FETCH_LAST_ELEMENT_ERROR); throw (NoSuchElementException) (new NoSuchElementException().initCause(qe)); } else { - return last; + return set.get(last); } } @Override public String toString() { - return set.toString(); + return set.values().toString(); } /** From 454bb9aa2db20234609f51f35029bd19b5bd05d0 Mon Sep 17 00:00:00 2001 From: Ivan Bella Date: Fri, 31 May 2024 20:06:11 +0000 Subject: [PATCH 20/20] Updated to ensure the most recent unique functions parse correctly. --- .../query/attributes/UniqueFields.java | 39 +++++++++++++------ .../query/config/ShardQueryConfiguration.java | 2 +- .../functions/QueryFunctionsDescriptor.java | 9 +++++ .../functions/jexl/MostRecentUnique.java | 2 +- .../functions/jexl/MostRecentUniqueByDay.java | 2 +- .../jexl/MostRecentUniqueByHour.java | 2 +- .../jexl/MostRecentUniqueByMinute.java | 2 +- .../jexl/MostRecentUniqueByMonth.java | 2 +- .../jexl/MostRecentUniqueBySecond.java | 2 +- .../jexl/MostRecentUniqueByTenthOfHour.java | 2 +- .../jexl/MostRecentUniqueByYear.java | 2 +- .../query/planner/DefaultQueryPlanner.java | 1 + .../query/planner/QueryOptionsSwitch.java | 1 + .../test/java/datawave/query/UniqueTest.java | 15 +++++++ 14 files changed, 63 insertions(+), 20 deletions(-) diff --git a/warehouse/query-core/src/main/java/datawave/query/attributes/UniqueFields.java b/warehouse/query-core/src/main/java/datawave/query/attributes/UniqueFields.java index 32cc9e67f57..0c861f54e6b 100644 --- a/warehouse/query-core/src/main/java/datawave/query/attributes/UniqueFields.java +++ b/warehouse/query-core/src/main/java/datawave/query/attributes/UniqueFields.java @@ -32,6 +32,7 @@ public class UniqueFields implements Serializable, Cloneable { private final TreeMultimap fieldMap = TreeMultimap.create(); private boolean mostRecent = false; + private static String MOST_RECENT_UNIQUE = "_MOST_RECENT_"; /** * Returns a new {@link UniqueFields} parsed from this string. The provided string is expected to have the format returned by @@ -76,8 +77,12 @@ public static UniqueFields from(String string) { if (nextComma == -1 && nextStartBracket == -1) { String field = string.substring(currentIndex); if (!field.isEmpty()) { - // Add the field only if its not blank. Ignore cases with consecutive trailing commas like field1[ALL],, - uniqueFields.put(field, UniqueGranularity.ALL); + if (field.equals(MOST_RECENT_UNIQUE)) { + uniqueFields.setMostRecent(true); + } else { + // Add the field only if its not blank. Ignore cases with consecutive trailing commas like field1[ALL],, + uniqueFields.put(field, UniqueGranularity.ALL); + } } break; // There are no more fields to be parsed. } else if (nextComma != -1 && (nextStartBracket == -1 || nextComma < nextStartBracket)) { @@ -91,8 +96,12 @@ public static UniqueFields from(String string) { // Add the field with the ALL granularity. String field = string.substring(currentIndex, nextComma); if (!field.isEmpty()) { - // Add the field only if its not blank. Ignore cases with consecutive commas like field1,,field2[DAY] - uniqueFields.put(field, UniqueGranularity.ALL); + if (field.equals(MOST_RECENT_UNIQUE)) { + uniqueFields.setMostRecent(true); + } else { + // Add the field only if its not blank. Ignore cases with consecutive commas like field1,,field2[DAY] + uniqueFields.put(field, UniqueGranularity.ALL); + } } currentIndex = nextComma + 1; // Advance to the start of the next field. } else { @@ -104,14 +113,18 @@ public static UniqueFields from(String string) { String field = string.substring(currentIndex, nextStartBracket); int nextEndBracket = string.indexOf(Constants.BRACKET_END, currentIndex); if (!field.isEmpty()) { - String granularityList = string.substring((nextStartBracket + 1), nextEndBracket); - // An empty granularity list, e.g. field[] is equivalent to field[ALL]. - if (granularityList.isEmpty()) { - uniqueFields.put(field, UniqueGranularity.ALL); + if (field.equals(MOST_RECENT_UNIQUE)) { + uniqueFields.setMostRecent(true); } else { - String[] granularities = StringUtils.split(granularityList, Constants.COMMA); - for (String granularity : granularities) { - uniqueFields.put(field, parseGranularity(granularity)); + String granularityList = string.substring((nextStartBracket + 1), nextEndBracket); + // An empty granularity list, e.g. field[] is equivalent to field[ALL]. + if (granularityList.isEmpty()) { + uniqueFields.put(field, UniqueGranularity.ALL); + } else { + String[] granularities = StringUtils.split(granularityList, Constants.COMMA); + for (String granularity : granularities) { + uniqueFields.put(field, parseGranularity(granularity)); + } } } } @@ -308,6 +321,10 @@ public String transformValue(String field, String value) { @Override public String toString() { StringBuilder sb = new StringBuilder(); + if (mostRecent) { + sb.append(MOST_RECENT_UNIQUE); + sb.append(Constants.COMMA); + } Iterator fieldIterator = fieldMap.keySet().iterator(); while (fieldIterator.hasNext()) { // Write the field. diff --git a/warehouse/query-core/src/main/java/datawave/query/config/ShardQueryConfiguration.java b/warehouse/query-core/src/main/java/datawave/query/config/ShardQueryConfiguration.java index dbc717578b6..2dbfdf655b4 100644 --- a/warehouse/query-core/src/main/java/datawave/query/config/ShardQueryConfiguration.java +++ b/warehouse/query-core/src/main/java/datawave/query/config/ShardQueryConfiguration.java @@ -389,7 +389,6 @@ public class ShardQueryConfiguration extends GenericQueryConfiguration implement private int groupFieldsBatchSize; private boolean accrueStats = false; private UniqueFields uniqueFields = new UniqueFields(); - private boolean mostRecentUnique = false; private boolean cacheModel = false; /** * should the sizes of documents be tracked for this query @@ -685,6 +684,7 @@ public ShardQueryConfiguration(ShardQueryConfiguration other) { this.setGroupFieldsBatchSize(other.getGroupFieldsBatchSize()); this.setAccrueStats(other.getAccrueStats()); this.setUniqueFields(other.getUniqueFields()); + log.info("Checkpointing with " + getUniqueFields()); this.setUniqueCacheBufferSize(other.getUniqueCacheBufferSize()); this.setCacheModel(other.getCacheModel()); this.setTrackSizes(other.isTrackSizes()); diff --git a/warehouse/query-core/src/main/java/datawave/query/jexl/functions/QueryFunctionsDescriptor.java b/warehouse/query-core/src/main/java/datawave/query/jexl/functions/QueryFunctionsDescriptor.java index 2aa3849546c..0dbcc3de4f1 100644 --- a/warehouse/query-core/src/main/java/datawave/query/jexl/functions/QueryFunctionsDescriptor.java +++ b/warehouse/query-core/src/main/java/datawave/query/jexl/functions/QueryFunctionsDescriptor.java @@ -205,6 +205,15 @@ private static void verify(String name, int numArgs) { case QueryOptionsFromQueryVisitor.UniqueFunction.UNIQUE_BY_DAY_FUNCTION: case QueryOptionsFromQueryVisitor.UniqueFunction.UNIQUE_BY_MONTH_FUNCTION: case QueryOptionsFromQueryVisitor.UniqueFunction.UNIQUE_BY_YEAR_FUNCTION: + case QueryFunctions.MOST_RECENT_PREFIX + QueryFunctions.UNIQUE_FUNCTION: + case QueryFunctions.MOST_RECENT_PREFIX + QueryOptionsFromQueryVisitor.UniqueFunction.UNIQUE_BY_MILLISECOND_FUNCTION: + case QueryFunctions.MOST_RECENT_PREFIX + QueryOptionsFromQueryVisitor.UniqueFunction.UNIQUE_BY_SECOND_FUNCTION: + case QueryFunctions.MOST_RECENT_PREFIX + QueryOptionsFromQueryVisitor.UniqueFunction.UNIQUE_BY_MINUTE_FUNCTION: + case QueryFunctions.MOST_RECENT_PREFIX + QueryOptionsFromQueryVisitor.UniqueFunction.UNIQUE_BY_TENTH_OF_HOUR_FUNCTION: + case QueryFunctions.MOST_RECENT_PREFIX + QueryOptionsFromQueryVisitor.UniqueFunction.UNIQUE_BY_HOUR_FUNCTION: + case QueryFunctions.MOST_RECENT_PREFIX + QueryOptionsFromQueryVisitor.UniqueFunction.UNIQUE_BY_DAY_FUNCTION: + case QueryFunctions.MOST_RECENT_PREFIX + QueryOptionsFromQueryVisitor.UniqueFunction.UNIQUE_BY_MONTH_FUNCTION: + case QueryFunctions.MOST_RECENT_PREFIX + QueryOptionsFromQueryVisitor.UniqueFunction.UNIQUE_BY_YEAR_FUNCTION: case QueryFunctions.GROUPBY_FUNCTION: case QueryFunctions.EXCERPT_FIELDS_FUNCTION: case QueryFunctions.MATCH_REGEX: diff --git a/warehouse/query-core/src/main/java/datawave/query/language/functions/jexl/MostRecentUnique.java b/warehouse/query-core/src/main/java/datawave/query/language/functions/jexl/MostRecentUnique.java index 1e0de067959..ed94abec1ae 100644 --- a/warehouse/query-core/src/main/java/datawave/query/language/functions/jexl/MostRecentUnique.java +++ b/warehouse/query-core/src/main/java/datawave/query/language/functions/jexl/MostRecentUnique.java @@ -66,7 +66,7 @@ public String toString() { @Override public QueryFunction duplicate() { - return new Unique(); + return new MostRecentUnique(); } } diff --git a/warehouse/query-core/src/main/java/datawave/query/language/functions/jexl/MostRecentUniqueByDay.java b/warehouse/query-core/src/main/java/datawave/query/language/functions/jexl/MostRecentUniqueByDay.java index 4b0449246e8..845bd863ae3 100644 --- a/warehouse/query-core/src/main/java/datawave/query/language/functions/jexl/MostRecentUniqueByDay.java +++ b/warehouse/query-core/src/main/java/datawave/query/language/functions/jexl/MostRecentUniqueByDay.java @@ -17,6 +17,6 @@ public MostRecentUniqueByDay() { @Override public QueryFunction duplicate() { - return new UniqueByDay(); + return new MostRecentUniqueByDay(); } } diff --git a/warehouse/query-core/src/main/java/datawave/query/language/functions/jexl/MostRecentUniqueByHour.java b/warehouse/query-core/src/main/java/datawave/query/language/functions/jexl/MostRecentUniqueByHour.java index fcfec8ddcc4..c831dac8aec 100644 --- a/warehouse/query-core/src/main/java/datawave/query/language/functions/jexl/MostRecentUniqueByHour.java +++ b/warehouse/query-core/src/main/java/datawave/query/language/functions/jexl/MostRecentUniqueByHour.java @@ -18,6 +18,6 @@ public MostRecentUniqueByHour() { @Override public QueryFunction duplicate() { - return new UniqueByHour(); + return new MostRecentUniqueByHour(); } } diff --git a/warehouse/query-core/src/main/java/datawave/query/language/functions/jexl/MostRecentUniqueByMinute.java b/warehouse/query-core/src/main/java/datawave/query/language/functions/jexl/MostRecentUniqueByMinute.java index fec744f7a94..f8b04bc4050 100644 --- a/warehouse/query-core/src/main/java/datawave/query/language/functions/jexl/MostRecentUniqueByMinute.java +++ b/warehouse/query-core/src/main/java/datawave/query/language/functions/jexl/MostRecentUniqueByMinute.java @@ -18,6 +18,6 @@ public MostRecentUniqueByMinute() { @Override public QueryFunction duplicate() { - return new UniqueByMinute(); + return new MostRecentUniqueByMinute(); } } diff --git a/warehouse/query-core/src/main/java/datawave/query/language/functions/jexl/MostRecentUniqueByMonth.java b/warehouse/query-core/src/main/java/datawave/query/language/functions/jexl/MostRecentUniqueByMonth.java index 79a5a494746..3c611479dd5 100644 --- a/warehouse/query-core/src/main/java/datawave/query/language/functions/jexl/MostRecentUniqueByMonth.java +++ b/warehouse/query-core/src/main/java/datawave/query/language/functions/jexl/MostRecentUniqueByMonth.java @@ -18,6 +18,6 @@ public MostRecentUniqueByMonth() { @Override public QueryFunction duplicate() { - return new UniqueByMonth(); + return new MostRecentUniqueByMonth(); } } diff --git a/warehouse/query-core/src/main/java/datawave/query/language/functions/jexl/MostRecentUniqueBySecond.java b/warehouse/query-core/src/main/java/datawave/query/language/functions/jexl/MostRecentUniqueBySecond.java index 369091107a6..8ff9eedbb45 100644 --- a/warehouse/query-core/src/main/java/datawave/query/language/functions/jexl/MostRecentUniqueBySecond.java +++ b/warehouse/query-core/src/main/java/datawave/query/language/functions/jexl/MostRecentUniqueBySecond.java @@ -18,6 +18,6 @@ public MostRecentUniqueBySecond() { @Override public QueryFunction duplicate() { - return new UniqueBySecond(); + return new MostRecentUniqueBySecond(); } } diff --git a/warehouse/query-core/src/main/java/datawave/query/language/functions/jexl/MostRecentUniqueByTenthOfHour.java b/warehouse/query-core/src/main/java/datawave/query/language/functions/jexl/MostRecentUniqueByTenthOfHour.java index a635eec6256..81948a62cb3 100644 --- a/warehouse/query-core/src/main/java/datawave/query/language/functions/jexl/MostRecentUniqueByTenthOfHour.java +++ b/warehouse/query-core/src/main/java/datawave/query/language/functions/jexl/MostRecentUniqueByTenthOfHour.java @@ -18,6 +18,6 @@ public MostRecentUniqueByTenthOfHour() { @Override public QueryFunction duplicate() { - return new UniqueByTenthOfHour(); + return new MostRecentUniqueByTenthOfHour(); } } diff --git a/warehouse/query-core/src/main/java/datawave/query/language/functions/jexl/MostRecentUniqueByYear.java b/warehouse/query-core/src/main/java/datawave/query/language/functions/jexl/MostRecentUniqueByYear.java index 68b6397cf5d..24d8c8c4471 100644 --- a/warehouse/query-core/src/main/java/datawave/query/language/functions/jexl/MostRecentUniqueByYear.java +++ b/warehouse/query-core/src/main/java/datawave/query/language/functions/jexl/MostRecentUniqueByYear.java @@ -18,6 +18,6 @@ public MostRecentUniqueByYear() { @Override public QueryFunction duplicate() { - return new UniqueByYear(); + return new MostRecentUniqueByYear(); } } diff --git a/warehouse/query-core/src/main/java/datawave/query/planner/DefaultQueryPlanner.java b/warehouse/query-core/src/main/java/datawave/query/planner/DefaultQueryPlanner.java index 30821b87c92..3c7fd8ce575 100644 --- a/warehouse/query-core/src/main/java/datawave/query/planner/DefaultQueryPlanner.java +++ b/warehouse/query-core/src/main/java/datawave/query/planner/DefaultQueryPlanner.java @@ -533,6 +533,7 @@ private void configureIterator(ShardQueryConfiguration config, IteratorSetting c addOption(cfg, QueryOptions.GROUP_FIELDS_BATCH_SIZE, config.getGroupFieldsBatchSizeAsString(), true); addOption(cfg, QueryOptions.UNIQUE_FIELDS, config.getUniqueFields().toString(), true); if (config.getUniqueFields().isMostRecent()) { + // this may be redundant with the uniqueFields.toString(), but other code relies on this explicitly being set addOption(cfg, QueryOptions.MOST_RECENT_UNIQUE, Boolean.toString(true), false); addOption(cfg, QueryOptions.UNIQUE_CACHE_BUFFER_SIZE, Integer.toString(config.getUniqueCacheBufferSize()), false); } diff --git a/warehouse/query-core/src/main/java/datawave/query/planner/QueryOptionsSwitch.java b/warehouse/query-core/src/main/java/datawave/query/planner/QueryOptionsSwitch.java index 5959db9038b..9c6eaedf486 100644 --- a/warehouse/query-core/src/main/java/datawave/query/planner/QueryOptionsSwitch.java +++ b/warehouse/query-core/src/main/java/datawave/query/planner/QueryOptionsSwitch.java @@ -67,6 +67,7 @@ public static void apply(Map optionsMap, ShardQueryConfiguration config.setUniqueFields(uniqueFields); break; case QueryParameters.MOST_RECENT_UNIQUE: + log.info("Setting unique fields to be most recent"); config.getUniqueFields().setMostRecent(Boolean.parseBoolean(value)); break; case QueryParameters.EXCERPT_FIELDS: diff --git a/warehouse/query-core/src/test/java/datawave/query/UniqueTest.java b/warehouse/query-core/src/test/java/datawave/query/UniqueTest.java index b4032285ac1..42e442f4276 100644 --- a/warehouse/query-core/src/test/java/datawave/query/UniqueTest.java +++ b/warehouse/query-core/src/test/java/datawave/query/UniqueTest.java @@ -373,6 +373,21 @@ public void testUniquenessWithModelAliases() throws Exception { runTestQueryWithUniqueness(expected, queryString, startDate, endDate, extraParameters); } + @Test + public void testRecentUniquenessWithModelAliases() throws Exception { + Map extraParameters = new HashMap<>(); + extraParameters.put("include.grouping.context", "true"); + extraParameters.put("query.syntax", "LUCENE"); + + Set> expected = new HashSet<>(); + expected.add(Sets.newHashSet(WiseGuysIngest.sopranoUID, WiseGuysIngest.corleoneUID, WiseGuysIngest.caponeUID)); + Date startDate = format.parse("20091231"); + Date endDate = format.parse("20150101"); + + String queryString = "UUID:/^[CS].*/ AND #MOST_RECENT_UNIQUE(BOTH_NULL)"; + runTestQueryWithUniqueness(expected, queryString, startDate, endDate, extraParameters); + } + @Test public void testMostRecentUniqueness() throws Exception { Map extraParameters = new HashMap<>();

    Table NameConnection PoolAuthorizationsReload Interval (ms)Max RowsLast RefreshRefreshing Now
    ").append(cache.getTableName()).append("").append(cache.getConnectionPoolName()).append("