diff --git a/.bettercodehub.yml b/.bettercodehub.yml deleted file mode 100644 index 09dfb086a..000000000 --- a/.bettercodehub.yml +++ /dev/null @@ -1,4 +0,0 @@ -component_depth: 1 -languages: -- java - diff --git a/.github/pages/javadoc-latest.html b/.github/pages/javadoc-latest.html deleted file mode 100644 index cfb2e03bd..000000000 --- a/.github/pages/javadoc-latest.html +++ /dev/null @@ -1,9 +0,0 @@ - - - - - - -redirecting to newest documentation... Click here if nothing happens. - - \ No newline at end of file diff --git a/.github/pages/latest.html b/.github/pages/latest.html deleted file mode 100644 index cfb2e03bd..000000000 --- a/.github/pages/latest.html +++ /dev/null @@ -1,9 +0,0 @@ - - - - - - -redirecting to newest documentation... Click here if nothing happens. - - \ No newline at end of file diff --git a/.github/workflows/lint.yml b/.github/workflows/check_version.yml similarity index 68% rename from .github/workflows/lint.yml rename to .github/workflows/check_version.yml index b93711e40..770bf137c 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/check_version.yml @@ -1,4 +1,5 @@ -name: lint +# Checks if version number has been updated +name: Version Check on: pull_request jobs: @@ -6,5 +7,5 @@ jobs: name: Release Tag existence Check runs-on: ubuntu-latest steps: - - uses: actions/checkout@main + - uses: actions/checkout@v3 - run: .github/scripts/tagcheck.sh v$(mvn help:evaluate -Dexpression=project.version -q -DforceStdout) \ No newline at end of file diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml deleted file mode 100644 index 0da84be32..000000000 --- a/.github/workflows/ci.yml +++ /dev/null @@ -1,99 +0,0 @@ -name: ci -on: - push: - branches: - - main -jobs: - deploy: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - name: Set up JDK 11 - uses: actions/setup-java@v2 - with: - java-version: '11' - distribution: 'adopt' - - name: Cache Maven packages - uses: actions/cache@v2 - with: - path: ~/.m2 - key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }} - restore-keys: ${{ runner.os }}-m2 - - uses: actions/setup-python@v2 - with: - python-version: 3.x - - shell: bash - run: mvn help:evaluate -Dexpression=major.minor.version -q -DforceStdout > version.log - - shell: bash - run: mvn help:evaluate -Dexpression=project.artifactId -q -DforceStdout > artifactid.log - - name: Set env version - run: echo "MM_VERSION=$(cat version.log)" >> $GITHUB_ENV - - name: Set env version - run: echo "RELEASE_VERSION=$(mvn help:evaluate -Dexpression=project.version -q -DforceStdout)" >> $GITHUB_ENV - - name: Set env name - run: echo "RELEASE_ARTIFACTID=$(cat artifactid.log)" >> $GITHUB_ENV - - name: test - run: echo ${{ env.RELEASE_VERSION }} ${{ env.RELEASE_ARTIFACTID }} - - run: pip install mkdocs-material - - run: pip install mkdocs-macros-plugin - - run: sed -i "s/\$VERSION/$(cat version.log)/g" mkdocs.yml - - run: sed -i "s/\$RELEASE_VERSION/${{ env.RELEASE_VERSION }}/g" mkdocs.yml - - run: mkdocs build -d site/$(cat version.log) - - run: mvn install -Dmaven.test.skip=true - - run: mvn javadoc:javadoc - - run: sed -i "s/\$VERSION/$(cat version.log)/g" .github/pages/latest.html - - run: sed -i "s/\$VERSION/$(cat version.log)/g" .github/pages/javadoc-latest.html - - name: Deploy Site - uses: peaceiris/actions-gh-pages@v3 - with: - github_token: ${{ secrets.GITHUB_TOKEN }} - publish_dir: ./site/${{ env.MM_VERSION }} - destination_dir: ./docs/${{ env.MM_VERSION }} - - name: Deploy Javadoc - uses: peaceiris/actions-gh-pages@v3 - with: - github_token: ${{ secrets.GITHUB_TOKEN }} - publish_dir: ./javadoc/${{ env.MM_VERSION }} - destination_dir: ./javadoc/${{ env.MM_VERSION }} - - name: Deploy latest.html - uses: peaceiris/actions-gh-pages@v3 - with: - github_token: ${{ secrets.GITHUB_TOKEN }} - publish_dir: .github/pages/ - keep_files: true - destination_dir: ./docs/ - - name: Deploy latest.html - uses: peaceiris/actions-gh-pages@v3 - with: - github_token: ${{ secrets.GITHUB_TOKEN }} - publish_dir: .github/pages/ - keep_files: true - destination_dir: ./docs/ - - run: mkdir iguana - - run: cp target/start-iguana.sh iguana/ - - run: cp target/iguana-${{ env.RELEASE_VERSION }}.jar iguana/iguana-${{ env.RELEASE_VERSION }}.jar - - run: cp example-suite.yml iguana/ - - run: zip -r iguana-${{ env.RELEASE_VERSION }}.zip iguana/ - - name: Create Release - id: create_release - uses: actions/create-release@v1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - tag_name: v${{ env.RELEASE_VERSION }} - release_name: version ${{ env.RELEASE_VERSION }} - draft: false - prerelease: false - body: "" - - uses: actions/upload-release-asset@v1.0.1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - upload_url: ${{ steps.create_release.outputs.upload_url }} - asset_path: iguana-${{ env.RELEASE_VERSION }}.zip - asset_name: iguana-${{ env.RELEASE_VERSION }}.zip - asset_content_type: application/zip - - name: Publish package - run: mvn --batch-mode deploy -Dmaven.test.skip=true - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} \ No newline at end of file diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml new file mode 100644 index 000000000..52f1e6b13 --- /dev/null +++ b/.github/workflows/deploy.yml @@ -0,0 +1,174 @@ +name: Deployment + +on: + push: + branches: + - main + +jobs: + find_version: + name: Find Release Version + runs-on: ubuntu-latest + outputs: + RELEASE_VERSION: ${{ steps.step_find.outputs.RELEASE_VERSION }} + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-java@v3 + with: + java-version: '17' + distribution: 'adopt' + cache: 'maven' + - name: 'Find velease version' + run: echo "RELEASE_VERSION=$(mvn help:evaluate -Dexpression=project.version -q -DforceStdout)" >> $GITHUB_OUTPUT + id: step_find + + deploy_to_maven: + name: Deploy to Maven Repository + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Set up JDK 17 + uses: actions/setup-java@v3 + with: + java-version: '17' + distribution: 'adopt' + cache: 'maven' + - name: Publish package + run: mvn --batch-mode deploy -Dmaven.test.skip=true + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: 'Upload artifact' + uses: actions/upload-artifact@v4 + with: + if-no-files-found: error + name: 'iguana-jar' + path: 'target/' + + + compile_native: + name: Compile Native Executable + runs-on: ubuntu-latest + needs: find_version + steps: + - uses: actions/checkout@v4 + - name: Set up GraalVM + uses: graalvm/setup-graalvm@v1 + with: + java-version: 22 + components: native-image + cache: maven + - name: 'Compile native-binary' + run: 'mvn -Dagent=true -Pnative package' + - name: 'Upload artifact' + uses: actions/upload-artifact@v4 + with: + name: 'iguana-native' + path: 'target/iguana' + if-no-files-found: error + + deploy_docs: + name: Deploy Documentation + runs-on: ubuntu-latest + needs: find_version + env: + RELEASE_VERSION: ${{ needs.find_version.outputs.RELEASE_VERSION }} + steps: + - uses: actions/checkout@v4 + - name: Set up JDK 17 + uses: actions/setup-java@v3 + with: + java-version: '17' + distribution: 'adopt' + cache: 'maven' + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: 3.x + - run: pip install mkdocs-material + - run: pip install mkdocs-macros-plugin + - run: sed -i "s/\$RELEASE_VERSION/${{ env.RELEASE_VERSION }}/g" mkdocs.yml + + - run: mkdocs build -d site/${{ env.RELEASE_VERSION }} + - run: mvn javadoc:javadoc + + - name: Deploy Site + uses: peaceiris/actions-gh-pages@v3 + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + publish_dir: ./site/${{ env.RELEASE_VERSION }} + destination_dir: ./docs/${{ env.RELEASE_VERSION }} + - name: Deploy Site + uses: peaceiris/actions-gh-pages@v3 + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + publish_dir: ./site/${{ env.RELEASE_VERSION }} + destination_dir: ./docs/latest + + - name: Deploy Javadoc + uses: peaceiris/actions-gh-pages@v3 + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + publish_dir: ./javadoc/${{ env.RELEASE_VERSION }}/apidocs + destination_dir: ./javadoc/${{ env.RELEASE_VERSION }} + - name: Deploy Javadoc + uses: peaceiris/actions-gh-pages@v3 + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + publish_dir: ./javadoc/${{ env.RELEASE_VERSION }}/apidocs + destination_dir: ./javadoc/latest + + - name: Find Ontology Version + run: echo "ONTOLOGY_VERSION=$(grep 'versionIRI' schema/iguana.owx | grep -Po '[0-9]+.[0-9]+.[0-9]+')" >> $GITHUB_OUTPUT + id: find_ontology_version + + - name: Fetch Ontologies + run: git fetch && git checkout origin/gh-pages ontology/ + - run: mkdir -p ontology/${{ steps.find_ontology_version.outputs.ONTOLOGY_VERSION }} + - run: cp schema/iguana.owx ontology/${{ steps.find_ontology_version.outputs.ONTOLOGY_VERSION }}/iguana.owx + - run: cp schema/iguana.owx ontology/iguana.owx + + - name: Deploy Ontology + uses: peaceiris/actions-gh-pages@v3 + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + publish_dir: ./ontology/ + destination_dir: ./ontology/ + + + deploy_gh_release: + name: Publish GitHub Release + runs-on: ubuntu-latest + needs: [compile_native, deploy_to_maven, find_version] + env: + RELEASE_VERSION: ${{ needs.find_version.outputs.RELEASE_VERSION }} + + steps: + - uses: actions/checkout@v4 + - name: Download artifacts from previous jobs + uses: actions/download-artifact@v4 + with: + path: artifacts/ + merge-multiple: true + - name: Prepare files + run: | + mkdir iguana + cp artifacts/start-iguana.sh iguana/ + cp artifacts/iguana.jar iguana/iguana.jar + cp artifacts/iguana iguana/iguana + cp example-suite.yml iguana/ + zip -r iguana-${{ env.RELEASE_VERSION }}.zip iguana/ + - name: Create Release + uses: softprops/action-gh-release@v2 + with: + tag_name: v${{ env.RELEASE_VERSION }} + name: version ${{ env.RELEASE_VERSION }} + draft: false + prerelease: false + body: "" + fail_on_unmatched_files: true + make_latest: true + token: ${{ secrets.GITHUB_TOKEN }} + files: | + iguana-${{ env.RELEASE_VERSION }}.zip + artifacts/iguana.jar + artifacts/iguana diff --git a/.github/workflows/maven.yml b/.github/workflows/maven.yml deleted file mode 100644 index 9d5b56ab7..000000000 --- a/.github/workflows/maven.yml +++ /dev/null @@ -1,23 +0,0 @@ -name: testing -on: - push: - branches: - - develop -jobs: - deploy: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - name: Set up JDK 11 - uses: actions/setup-java@v2 - with: - java-version: '11' - distribution: 'adopt' - - name: Cache Maven packages - uses: actions/cache@v2 - with: - path: ~/.m2 - key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }} - restore-keys: ${{ runner.os }}-m2 - - name: Testing the Java code - run: mvn install diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml new file mode 100644 index 000000000..3931192b6 --- /dev/null +++ b/.github/workflows/tests.yml @@ -0,0 +1,55 @@ +name: Tests + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +on: + push: + branches: + - develop + pull_request: + branches: + - develop + - main + +jobs: + tests: + name: Compile and Run Tests + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - name: Set up JDK 17 + uses: actions/setup-java@v3 + with: + java-version: '17' + distribution: 'adopt' + - name: Cache Maven packages + uses: actions/cache@v2 + with: + path: ~/.m2 + key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }} + restore-keys: ${{ runner.os }}-m2 + - name: Testing the Java code + run: mvn package + + # Only run for pull request on main or if pushed to develop + compile_native: + name: Test Native Executable Compilation + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Set up GraalVM + uses: graalvm/setup-graalvm@v1 + with: + java-version: 22 + components: native-image + cache: maven + - name: 'Compile native-binary and run tests' + run: 'mvn -Pnative -Dagent=true package' + - name: 'Upload artifact' + uses: actions/upload-artifact@v4 + with: + name: 'iguana-native' + path: 'target/iguana' + if-no-files-found: error diff --git a/.gitignore b/.gitignore index dfd0dc545..bc02395e7 100644 --- a/.gitignore +++ b/.gitignore @@ -5,8 +5,8 @@ tmp_ser **/queryInstances/* -# Created by https://www.toptal.com/developers/gitignore/api/java,maven,intellij,eclipse -# Edit at https://www.toptal.com/developers/gitignore?templates=java,maven,intellij,eclipse +# Created by https://www.toptal.com/developers/gitignore/api/java,maven,intellij+all,eclipse +# Edit at https://www.toptal.com/developers/gitignore?templates=java,maven,intellij+all,eclipse ### Eclipse ### .metadata @@ -74,7 +74,7 @@ local.properties # Spring Boot Tooling .sts4-cache/ -### Intellij ### +### Intellij+all ### # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider # Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 @@ -153,39 +153,14 @@ fabric.properties # Android studio 3.1+ serialized cache file .idea/caches/build_file_checksums.ser -### Intellij Patch ### -# Comment Reason: https://github.com/joeblau/gitignore.io/issues/186#issuecomment-215987721 +### Intellij+all Patch ### +# Ignore everything but code style settings and run configurations +# that are supposed to be shared within teams. -# *.iml -# modules.xml -# .idea/misc.xml -# *.ipr - -# Sonarlint plugin -# https://plugins.jetbrains.com/plugin/7973-sonarlint -.idea/**/sonarlint/ - -# SonarQube Plugin -# https://plugins.jetbrains.com/plugin/7238-sonarqube-community-plugin -.idea/**/sonarIssues.xml - -# Markdown Navigator plugin -# https://plugins.jetbrains.com/plugin/7896-markdown-navigator-enhanced -.idea/**/markdown-navigator.xml -.idea/**/markdown-navigator-enh.xml -.idea/**/markdown-navigator/ +.idea/* -# Cache file creation bug -# See https://youtrack.jetbrains.com/issue/JBR-2257 -.idea/$CACHE_FILE$ - -# CodeStream plugin -# https://plugins.jetbrains.com/plugin/12206-codestream -.idea/codestream.xml - -# Azure Toolkit for IntelliJ plugin -# https://plugins.jetbrains.com/plugin/8053-azure-toolkit-for-intellij -.idea/**/azureSettings.xml +!.idea/codeStyles +!.idea/runConfigurations ### Java ### # Compiled class file @@ -232,6 +207,4 @@ buildNumber.properties # JDT-specific (Eclipse Java Development Tools) .classpath -# End of https://www.toptal.com/developers/gitignore/api/java,maven,intellij,eclipse - - +# End of https://www.toptal.com/developers/gitignore/api/java,maven,intellij+all,eclipse diff --git a/README.md b/README.md deleted file mode 100644 index 1b8c5f00f..000000000 --- a/README.md +++ /dev/null @@ -1,148 +0,0 @@ -[![GitLicense](https://gitlicense.com/badge/dice-group/IGUANA)](https://gitlicense.com/license/dice-group/IGUANA) -![Java CI with Maven](https://github.com/dice-group/IGUANA/workflows/Java%20CI%20with%20Maven/badge.svg)[![BCH compliance](https://bettercodehub.com/edge/badge/AKSW/IGUANA?branch=master)](https://bettercodehub.com/) -[![Codacy Badge](https://api.codacy.com/project/badge/Grade/9668460dd04c411fab8bf5ee9c161124)](https://www.codacy.com/app/TortugaAttack/IGUANA?utm_source=github.com&utm_medium=referral&utm_content=AKSW/IGUANA&utm_campaign=Badge_Grade) -[![Project Stats](https://www.openhub.net/p/iguana-benchmark/widgets/project_thin_badge.gif)](https://www.openhub.net/p/iguana-benchmark) - - -# IGUANA - -IGUANA Logo - -## ABOUT - - -Semantic Web is becoming more important and it's data is growing each day. Triple stores are the backbone here, managing these data. -Hence it is very important that the triple store must scale on the data and can handle several users. -Current Benchmark approaches could not provide a realistic scenario on realistic data and could not be adjustet for your needs very easily. -Additionally Question Answering systems and Natural Language Processing systems are becoming more and more popular and thus needs to be stresstested as well. -Further on it was impossible to compare results for different benchmarks. - -Iguana is an an Integerated suite for benchmarking read/write performance of HTTP endpoints and CLI Applications.
which solves all these issues. -It provides an enviroment which ... - - -+ ... is highly configurable -+ ... provides a realistic scneario benchmark -+ ... works on every dataset -+ ... works on SPARQL HTTP endpoints -+ ... works on HTTP Get & Post endpoints -+ ... works on CLI applications -+ and is easily extendable - - -For further Information visit - -[iguana-benchmark.eu](http://iguana-benchmark.eu) - -[Documentation](http://iguana-benchmark.eu/docs/3.3/) - - -# Getting Started - -# Prerequisites - -You need to install Java 11 or greater. -In Ubuntu you can install these using the following commands - -``` -sudo apt-get install java -``` - -# Iguana Modules - -Iguana consists of two modules - -1. **corecontroller**: This will benchmark the systems -2. **resultprocessor**: This will calculate the Metrics and save the raw benchmark results - -## **corecontroller** - -The **corecontroller** will benchmark your system. It should be started on the same machine the is started. - -## **resultprocessor** - -The **resultprocessor** will calculate the metrics. -By default it stores its result in a ntriple file. But you may configure it, to write the results directly to a Triple Store. -On the processing side, it calculates various metrics. - -Per run metrics: -* Query Mixes Per Hour (QMPH) -* Number of Queries Per Hour (NoQPH) -* Number of Queries (NoQ) -* Average Queries Per Second (AvgQPS) - -Per query metrics: -* Queries Per Second (QPS) - * Number of successful and failed queries - * result size - * queries per second - * sum of execution times - -You can change these in the Iguana Benchmark suite config. - -If you use the [basic configuration](https://github.com/dice-group/IGUANA/blob/master/example-suite.yml), it will save all mentioned metrics to a file called `results_{{DATE_RP_STARTED}}.nt` - - -# Setup Iguana - -## Download -Please download the release zip **iguana-x.y.z.zip** from the newest release available [here](https://github.com/dice-group/IGUANA/releases/latest): - -``` -mkdir iguana -wget https://github.com/dice-group/IGUANA/releases/download/v3.3.2/iguana-3.3.2.zip -unzip iguana-3.3.2.zip -``` - - -It contains the following files: - -* iguana.corecontroller-X.Y.Z.jar -* start-iguana.sh -* example-suite.yml - -# Run Your Benchmarks - -## Create a Configuration - -You can use the [basic configuration](https://github.com/dice-group/IGUANA/blob/master/example-suite.yml) we provide and modify it to your needs. -For further information please visit our [configuration](http://iguana-benchmark.eu/docs/3.2/usage/configuration/) and [Stresstest](http://iguana-benchmark.eu/docs/3.0/usage/stresstest/) wiki pages. For a detailed, step-by-step instruction please attend our [tutorial](http://iguana-benchmark.eu/docs/3.2/usage/tutorial/). - - - -## Execute the Benchmark - -Use the start script -``` -./start-iguana.sh example-suite.yml -``` -Now Iguana will execute the example benchmark suite configured in the example-suite.yml file - - -# How to Cite - -```bibtex -@InProceedings{10.1007/978-3-319-68204-4_5, -author="Conrads, Lixi -and Lehmann, Jens -and Saleem, Muhammad -and Morsey, Mohamed -and Ngonga Ngomo, Axel-Cyrille", -editor="d'Amato, Claudia -and Fernandez, Miriam -and Tamma, Valentina -and Lecue, Freddy -and Cudr{\'e}-Mauroux, Philippe -and Sequeda, Juan -and Lange, Christoph -and Heflin, Jeff", -title="Iguana: A Generic Framework for Benchmarking the Read-Write Performance of Triple Stores", -booktitle="The Semantic Web -- ISWC 2017", -year="2017", -publisher="Springer International Publishing", -address="Cham", -pages="48--65", -abstract="The performance of triples stores is crucial for applications driven by RDF. Several benchmarks have been proposed that assess the performance of triple stores. However, no integrated benchmark-independent execution framework for these benchmarks has yet been provided. We propose a novel SPARQL benchmark execution framework called Iguana. Our framework complements benchmarks by providing an execution environment which can measure the performance of triple stores during data loading, data updates as well as under different loads and parallel requests. Moreover, it allows a uniform comparison of results on different benchmarks. We execute the FEASIBLE and DBPSB benchmarks using the Iguana framework and measure the performance of popular triple stores under updates and parallel user requests. We compare our results (See https://doi.org/10.6084/m9.figshare.c.3767501.v1) with state-of-the-art benchmarking results and show that our benchmark execution framework can unveil new insights pertaining to the performance of triple stores.", -isbn="978-3-319-68204-4" -} -``` diff --git a/README.md b/README.md new file mode 120000 index 000000000..0e01b4308 --- /dev/null +++ b/README.md @@ -0,0 +1 @@ +docs/README.md \ No newline at end of file diff --git a/customs/images/Iguana_new_logo6.png b/customs/images/Iguana_new_logo6.png deleted file mode 100644 index 988f32f4c..000000000 Binary files a/customs/images/Iguana_new_logo6.png and /dev/null differ diff --git a/customs/images/iguana-result-schema.png b/customs/images/iguana-result-schema.png deleted file mode 100644 index 2781fcace..000000000 Binary files a/customs/images/iguana-result-schema.png and /dev/null differ diff --git a/docs/README.md b/docs/README.md new file mode 100644 index 000000000..afc6d7e05 --- /dev/null +++ b/docs/README.md @@ -0,0 +1,107 @@ +

+ IGUANA Logo +

+ +# IGUANA +Iguana is a benchmarking framework for testing the read performances of HTTP endpoints. +It is mostly designed for benchmarking triplestores by using the SPARQL protocol. +Iguana stresstests endpoints by simulating users which send a set of queries independently of each other. + +Benchmarks are configured using a YAML-file, this allows them to be easily repeated and adjustable. +Results are stored in RDF-files and can also be exported as CSV-files. + +## Features +- Benchmarking of (SPARQL) HTTP endpoints +- Reusable configuration +- Calculation of various metrics for better comparisons +- Processing of HTTP responses (e.g., results counting) + +## Setup + +### Prerequisites + +If you're using the native version of IGUANA, you need to have at least a `x86-64-v3` (Intel Haswell and AMD Excavator or newer) system that is running Linux. + +If you're using the Java version of IGUANA, you need to have `Java 17` or higher installed. +On Ubuntu it can be installed by executing the following command: + +```bash +sudo apt install openjdk-17-jre +``` + +### Download +The latest release can be downloaded at https://github.com/dice-group/IGUANA/releases/latest. +The zip file contains three files: + +* `iguana` +* `iguana.jar` +* `example-suite.yml` +* `start-iguana.sh` + +The `iguana` file is a native executable for IGUANA that has been compiled with GraalVM. +The `iguana.jar` file is the standard Java executable for IGUANA. +The `start-iguana.sh` script is a helper script to start IGUANA with the `iguana.jar` file. + +### Configuration +The `example-suite.yml` file contains an extensive configuration for a benchmark suite. +It can be used as a starting point for your own benchmark suite. +For a detailed explanation of the configuration, see the [configuration](./configuration/overview.md) documentation. + +## Usage + +### Native Version + +Start Iguana with a benchmark suite (e.g., the `example-suite.yml`) by executing the binary: + +```bash +./iguana example-suite.yml +``` + +### Java Version + +Start Iguana with a benchmark suite (e.g., the `example-suite.yml`) either by using the start script: + +```bash +./start-iguana.sh example-suite.yml +``` + +or by directly executing the jar-file: + +```bash +java -jar iguana.jar example-suite.yml +``` + +If you're using the script, you can use JVM arguments by setting the environment variable `IGUANA_JVM`. +For example, to let Iguana use 4GB of RAM you can set `IGUANA_JVM` as follows: + +```bash +export IGUANA_JVM=-Xmx4g +``` + +# How to Cite + +```bibtex +@InProceedings{10.1007/978-3-319-68204-4_5, +author="Conrads, Lixi +and Lehmann, Jens +and Saleem, Muhammad +and Morsey, Mohamed +and Ngonga Ngomo, Axel-Cyrille", +editor="d'Amato, Claudia +and Fernandez, Miriam +and Tamma, Valentina +and Lecue, Freddy +and Cudr{\'e}-Mauroux, Philippe +and Sequeda, Juan +and Lange, Christoph +and Heflin, Jeff", +title="Iguana: A Generic Framework for Benchmarking the Read-Write Performance of Triple Stores", +booktitle="The Semantic Web -- ISWC 2017", +year="2017", +publisher="Springer International Publishing", +address="Cham", +pages="48--65", +abstract="The performance of triples stores is crucial for applications driven by RDF. Several benchmarks have been proposed that assess the performance of triple stores. However, no integrated benchmark-independent execution framework for these benchmarks has yet been provided. We propose a novel SPARQL benchmark execution framework called Iguana. Our framework complements benchmarks by providing an execution environment which can measure the performance of triple stores during data loading, data updates as well as under different loads and parallel requests. Moreover, it allows a uniform comparison of results on different benchmarks. We execute the FEASIBLE and DBPSB benchmarks using the Iguana framework and measure the performance of popular triple stores under updates and parallel user requests. We compare our results (See https://doi.org/10.6084/m9.figshare.c.3767501.v1) with state-of-the-art benchmarking results and show that our benchmark execution framework can unveil new insights pertaining to the performance of triple stores.", +isbn="978-3-319-68204-4" +} +``` \ No newline at end of file diff --git a/docs/about.md b/docs/about.md deleted file mode 100644 index 37e2a0ccc..000000000 --- a/docs/about.md +++ /dev/null @@ -1,35 +0,0 @@ -# Iguana -Iguana is an an Integerated suite for benchmarking read/write performance of HTTP endpoints and CLI Applications. -Semantic Web is becoming more important and it's data is growing each day. Triple stores are the backbone here, managing these data. Hence it is very important that the triple store must scale on the data and can handle several users. Current Benchmark approaches could not provide a realistic scenario on realistic data and could not be adjustet for your needs very easily. Additionally Question Answering systems and Natural Language Processing systems are becoming more and more popular and thus needs to be stresstested as well. Further on it was impossible to compare results for different benchmarks. - -Iguana tries to solve all these issues. It provides an enviroment which ... - -* is highly configurable -* provides a realistic scneario benchmark -* works on every dataset -* works on SPARQL HTTP endpoints -* works on HTTP Get & Post endpoints -* works on CLI applications -* and is easily extendable - -## What is Iguana - -Iguana is a HTTP and CLI read/write performance benchmark framework suite. -It can stresstest HTTP get and post endpoints as well as CLI applications using a bunch of simulated users which will bombard the endpoint using queries. -Queries can be anything. SPARQL, SQL, Text and anything else you can fit in one line. - -## What can be benchmarked - -Iguana is capable of benchmarking and stresstesting the following applications - -* HTTP GET and POST endpoint (e.g. Triple Stores, REST Services, Question Answering endpoints) -* CLI Applications which either - * exit after every query - * or awaiting input after each query - -## What Benchmarks are possible - -Every simulated User (named Worker in the following) gets a set of queries. -These queries have to be saved in one file, whereas each query is one line. -Hence everything you can fit in one line (e.g a SPARQL query, a text question, an RDF document) can be used as a query and a set of these queries represent the benchmark. -Iguana will then let every Worker execute these queries against the endpoint. diff --git a/docs/architecture.md b/docs/architecture.md deleted file mode 100644 index 01f87b401..000000000 --- a/docs/architecture.md +++ /dev/null @@ -1,71 +0,0 @@ -# Architecture - -Iguanas architecture is build as generic as possible to ensure that your benchmark can be executed while you only have -to create a configuration file which fits your needs. -So ideally you do not need to code anything and can use Iguana out of the box. - -Iguana will parse your Configuration (YAML or JSON format) and will read which Endpoints/Applications you want to benchmark. -What datasets if you have any and what your benchmark should accomplish. -Do you just want to check how good your database/triple store performs against the state of the art? -Does your new version out performs the old version? -Do you want to check read and write performance? -... - -Whatever you want to do you just need to provide Iguana your tested applications, what to benchmark and which queries to use. - -Iguana relys mainly on HTTP libraries, the JENA framework and java 11. - - -## Overview - - -Iguana will read the configuration, parse it and executes for each specified datasets, each specified connection with the benchmark tasks you specified. -After the executions the results will be written as RDF. Either to a NTriple file or directly to a triple store. -The results can be queried itself using SPARQL. - -Iguana currently consists of on implemented Task, the Stresstest. -However, this task is very configurable and most definetly will met your needs if you want performance measurement. -It starts a user defined amount of Workers, which will try to simulate real users/applications querying your Endpoint/Application. - - -## Components - -Iguana consists of two components, the core controller and the result processor. - -### **core controller** - -The core which implements the Tasks and Workers to use. How HTTP responses should be handled. -How to analyze the benchmark queries to give a little bit more extra information in the results. - - -### **result processor** - -The result processor consist of the metrics to apply to the query execution results and how to save the results. -Most of the SOtA metrics are implemented in Iguana. If one's missing it is pretty easy to add a metric though. - -By default it stores its result in a ntriple file. But you may configure it, to write the results directly to a Triple Store. -On the processing side, it calculates various metrics. - -Per run metrics: -* Query Mixes Per Hour (QMPH) -* Number of Queries Per Hour (NoQPH) -* Number of Queries (NoQ) -* Average Queries Per Second (AvgQPS) - -Per query metrics: -* Queries Per Second (QPS) - * Number of successful and failed queries - * result size - * queries per second - * sum of execution times - -You can change these in the Iguana Benchmark suite config. - -If you use the [basic configuration](https://github.com/dice-group/IGUANA/blob/master/example-suite.yml), it will save all mentioned metrics to a file called `results_{DD}-{MM}-{YYYY}_{HH}-{mm}.nt` - -## More Information - -* [SPARQL](https://www.w3.org/TR/sparql11-query/) -* [RDF](https://www.w3.org/RDF/) -* [Iguana @ Github](https://github.com/dice-group/Iguana) -* [Our Paper from 2017](https://svn.aksw.org/papers/2017/ISWC_Iguana/public.pdf) (outdated) diff --git a/docs/configuration/ahead-of-time-compilation.md b/docs/configuration/ahead-of-time-compilation.md new file mode 100644 index 000000000..3f54387c5 --- /dev/null +++ b/docs/configuration/ahead-of-time-compilation.md @@ -0,0 +1,37 @@ +# Ahead of Time Compilation + +Because IGUANA is written in Java, the benchmark results might become inaccurate due to the architecture of the JVM. +The benchmark results might appear to be slower at the beginning of the execution and faster at the end, even though the +benchmarked system's performance remains constant. + +To minimize this effect, IGUANA uses GraalVM's ahead-of-time compilation feature. +This feature compiles the Java code to a native executable, which can be run without the need for a JVM. + +This section explains how to compile IGUANA with GraalVM and how to use the compiled binary. + +## Prerequisites + +To compile IGUANA with GraalVM, you need to have [GraalVM](https://www.graalvm.org/) installed on your system. +The `native-image` tool also requires some additional libraries to be installed on your system. +The further prerequisites can be found [here](https://www.graalvm.org/latest/reference-manual/native-image/#prerequisites). + +The default target architecture for the native binary is `x86-64-v3` (Intel Haswell and AMD Excavator or newer). +This and other settings can be adjusted in the `pom.xml` file. + +## Compilation + +To compile IGUANA with GraalVM, execute the following command: + +```bash +mvn -Pnative -Dagent=true package +``` + +This command creates a native binary named `iguana` in the `target/` directory. + +## Usage + +The compiled executable can be run like any other executable and behaves the same as the Java version. + +```bash +./iguana +``` diff --git a/docs/configuration/language_processor.md b/docs/configuration/language_processor.md new file mode 100644 index 000000000..f3fae6302 --- /dev/null +++ b/docs/configuration/language_processor.md @@ -0,0 +1,15 @@ +# Language Processor + +Language processors are used to process the response bodies of the HTTP requests that are executed by the workers. +The processing is done to extract relevant information from the responses and store them in the results. + +Language processors are defined by the content type of the response body they process. +They cannot be configured directly in the configuration file, but are used by the response body processors. + +Currently only the `SaxSparqlJsonResultCountingParser` language processor is supported for the `application/sparql-results+json` content type. + +## SaxSparqlJsonResultCountingParser + +The `SaxSparqlJsonResultCountingParser` is a language processor used to extract simple information from the responses of SPARQL endpoints that are in the `application/sparql-results+json` format. +It counts the number of results, the number of variables, +and the number of bindings from the response of a `SELECT` or `ASK` query. diff --git a/docs/configuration/metrics.md b/docs/configuration/metrics.md new file mode 100644 index 000000000..5e29522c4 --- /dev/null +++ b/docs/configuration/metrics.md @@ -0,0 +1,84 @@ +# Metrics + +Metrics are used to measure and compare the performance of the system during the stresstest. +They are divided into task metrics, worker metrics, and query metrics. + +Task metrics are calculated for every query execution across the whole task. +Worker metrics are calculated for every query execution of one worker. +Query metrics are calculated for every execution of one query across one worker and across every worker. + +For a detailed description of how results for tasks, workers and queries are reported in the RDF result file, please refer to the section [RDF results](rdf_results.md). + +## Configuration + +The metrics are configured in the `metrics` section of the configuration file. +To enable a metric, add an entry to the `metrics` list with the `type` of the metric. +Some metrics (`PQPS`, `PAvgQPS`) require the configuration of a `penalty` value, +which is the time in milliseconds that a failed query will be penalized with. + +```yaml +metrics: + - type: "QPS" + - type: "AvgQPS" + - type: "PQPS" + penalty: 180000 # in milliseconds +``` + +If the `metrics` section is not present in the configuration file, the following **default** configuration is used: +```yaml +metrics: + - type: "AES" + - type: "EachQuery" + - type: "QPS" + - type: "AvgQPS" + - type: "NoQ" + - type: "NoQPH" + - type: "QMPH" +``` + +## Available metrics + +| Name | Configuration type | Additional parameters | Scope | Description | +|--------------------------------------|--------------------|-----------------------------|--------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| Queries per second | `QPS` | | query | The number of successfully executed queries per second. It is calculated by dividing the number of successfully executed queries | +| Average queries per second | `AvgQPS` | | task, worker | The average number of queries successfully executed per second. It is calculated by dividing the sum of the QPS values of every query the task or worker has by the number of queries. | +| Number of queries | `NoQ` | | task, worker | The number of successfully executed queries. This metric is calculated for each worker and for the whole task. | +| Number of queries per hour | `NoQPH` | | task, worker | The number of successfully executed queries per hour. It is calculated by dividing the number of successfully executed queries by their sum of time (in hours) it took to execute them. The metric value for the task is the sum of the metric for each worker. | +| Query mixes per hour | `QMPH` | | task, worker | The number of query mixes executed per hour. A query mix is the set of queries executed by a worker, or the whole task. This metric is calculated for each worker and for the whole task. It is calculated by dividing the number of successfully executed queries by the number of queries inside the query mix and by their sum of time (in hours) it took to execute them. | +| Penalized queries per second | `PQPS` | `penalty` (in milliseconds) | query | The number of queries executed per second, penalized by the number of failed queries. It is calculated by dividing the number of successful and failed query executions by their sum of time (in seconds) it took to execute them. If a query fails, the time it took to execute it is set to the given `penalty` value. | +| Penalized average queries per second | `PAvgQPS` | `penalty` (in milliseconds) | task, worker | The average number of queries executed per second, penalized by the number of failed queries. It is calculated by dividing the sum of the PQPS of each query the task or worker has executed by the number of queries. | +| Aggregated execution statistics | `AES` | | task, worker | _see below_ | +| Each execution statistic | `EachQuery` | | query | _see below_ | + +## Other metrics + +### Aggregated Execution Statistics (AES) +This metric collects for each query that belongs to a worker or a task a number of statistics +that are aggregated for each execution. + +| Name | Description | +|---------------------|--------------------------------------------------------------| +| `succeeded` | The number of successful executions. | +| `failed` | The number of failed executions. | +| `resultSize` | The size of the HTTP response. (only stores the last result) | +| `timeOuts` | The number of executions that resulted with a timeout. | +| `wrongCodes` | The number of HTTP status codes received that were not 200. | +| `unknownExceptions` | The number of unknown exceptions during execution. | +| `totalTime` | The total time it took to execute the queries. | + +The `resultSize` is the size of the HTTP response in bytes and is an exception to the aggregation. + +### Each Execution Statistic (EachQuery) +This metric collects statistics for each execution of a query. + +| Name | Description | +|----------------|-----------------------------------------------------------------------------------------------------------| +| `run` | The number of the execution. | +| `startTime` | The time stamp where the execution started. | +| `time` | The time it took to execute the query. | +| `success` | If the execution was successful. | +| `code` | Numerical value of the end state of the execution. (success=0, timeout=110, http_error=111, exception=1) | +| `resultSize` | The size of the HTTP response. | +| `exception` | The exception that occurred during execution. (if any occurred) | +| `httpCode` | The HTTP status code received. (if any was received) | +| `responseBody` | The hash of the HTTP response body. (only if `parseResults` inside the stresstest has been set to `true`) | diff --git a/docs/configuration/overview.md b/docs/configuration/overview.md new file mode 100644 index 000000000..76b715332 --- /dev/null +++ b/docs/configuration/overview.md @@ -0,0 +1,298 @@ +# Configuration + +The configuration file for a benchmark suite can either be `.yaml`-file or a `.json`-file. +YAML is recommended and all examples will be presented as YAML. + +## Example +The following example shows a basic configuration for a benchmark suite as an introduction. + +```yaml +dataset: + - name: "sp2b" # for documentation purposes + +connections: + - name: "fuseki" + endpoint: "http://localhost:3030/sparql" + dataset: "sp2b" + +tasks: + - type: "stresstest" # stresstest the endpoint + workers: + - type: "SPARQLProtocolWorker" # this worker type sends SPARQL queries over HTTP with the SPARQL protocol + number: 2 # generate 2 workers with the same configuration + connection: "fuseki" # the endpoint to which the workers are sending the queries to + queries: + path: "./example/suite/queries.txt" # the file with the queries + format: "one-per-line" # the format of the queries + completionTarget: + number: 1 # each worker stops after executing all queries once + timeout: "3 min" # a query will time out after 3 minutes + acceptHeader: "application/sparql-results+json" # the expected content type of the HTTP response (HTTP Accept header) + parseResults: false + +# calculate queries per second only for successful queries and the queries per second with a penalty for failed queries +metrics: + - type: "PQPS" + penalty: 180000 # in milliseconds (3 minutes) + - type: "QPS" + +# store the results in an n-triples file and in CSV files +storages: + - type: "rdf file" + path: "./results/result.nt" + - type: "csv file" + directory: "./results/" +``` + +This configuration defines a benchmark suite that stresstests a triplestore with two workers. + +The triplestore is named `fuseki` and is located at `http://localhost:3030/sparql`. +The dataset, that is used for the benchmark, is named `sp2b`. +During the stresstest the workers will send SPARQL queries +that are located in the file `./example/suite/queries.txt` to the triplestore. +They will stop after they have executed all queries once, which is defined by the `completionTarget`-property. + +After the queries have been executed, two metrics are calculated based on the results. +The first metric is the `PQPS`-metric, which calculates the queries per second with a penalty for failed queries. +The second metric is the `QPS`-metric, which calculates the queries per second only for successful queries. + +The results are stored in an RDF file at `./results/result.nt` and in CSV files in the directory `./results/`. + +## Structure + +The configuration file consists of the following six sections: +- [Datasets](#Datasets) +- [Connections](#Connections) +- [Tasks](tasks.md) +- [Response-Body-Processors](#Response-Body-Processor) +- [Metrics](metrics.md) +- [Storages](storages.md) + +Each section holds an array of their respective items. +Each item type will be defined further in this documentation. +The order of the sections is not important. +The general structure of a suite configuration may look like this: + +```yaml +tasks: + - # item 1 + - # item 2 + - # ... + +storages: + - # item 1 + - # item 2 + - # ... + +datasets: + - # item 1 + - # item 2 + - # ... + +connections: + - # item 1 + - # item 2 + - # ... + + +responseBodyProcessors: + - # item 1 + - # item 2 + - # ... + +metrics: + - # item 1 + - # item 2 + - # ... +``` + +## Durations + +Durations are used to define time spans in the configuration. +They can be used for the `timeout`-property of the workers or the response body processors or for the `completionTarget`-property of the tasks. +Duration values can be defined as a XSD duration string or as a string with a number and a unit. +The following units are supported: +- `s` or `sec`or `secs` for seconds +- `m` or `min` or `mins` for minutes +- `h` or `hr` or `hrs` for hours +- `d` or `day` or `days` for days + +Some examples for duration values: +```yaml +timeout: "2S" # 2 seconds +timeout: "10s" # 10 seconds +timeout: "PT10S" # 10 seconds +``` + +## Tasks +The tasks are the core of the benchmark suite. +They define the actual process of the benchmarking suite +and are executed from top to bottom in the order they are defined in the configuration. +At the moment, the `stresstest` is the only implemented task. +The `stresstest`-task queries specified endpoints with the given queries and evaluates the performance of the endpoint +by measuring the time each query execution took. +After the execution of the queries, the task calculates the required metrics based on the measurements. + +The tasks are explained in more detail in the [Tasks](tasks.md) documentation. + +## Storages +The storages define where and how the results of the benchmarking suite are stored. +There are three types of storages that are supported at the moment: +- `rdf file` +- `csv file` +- `triplestore` + +Each storage type will be explained in more detail in the [Storages](storages.md) documentation. + +## Datasets +The datasets that have been used for the benchmark can be defined here. +Right now, this is only used for documentation purposes. +For example, you might want to know which dataset was loaded into a triplestore at the time a stresstest +was executed. + +The datasets are therefore later on referenced in the `connections`-property +to document which dataset has been loaded into which endpoint. + +### Properties +Each dataset entry has the following properties: + +| property | required | description | example | +|----------|----------|-----------------------------------------------------------------|------------------------| +| name | yes | This is a descriptive name for the dataset. | `"sp2b"` | +| file | no | File path of the dataset. (not used for anything at the moment) | `"./datasets/sp2b.nt"` | + +### Example +```yaml +datasets: + - name: "sp2b" + file: "./datasets/sp2b.nt" + +connections: + - name: "fuseki" + endpoint: "https://localhost:3030/query" + dataset: "sp2b" +``` + +As already mentioned, the `datasets`-property is only used for documentation. +The information about the datasets will be stored in the results. +For the csv storage, the above configuration might result with the following `task-configuration.csv`-file: + +| taskID | connection | version | dataset | +|-------------------------------------------------------------|------------|---------|---------| +| http://iguana-benchmark.eu/resource/1699354119-3273189568/0 | fuseki | v2 | sp2b | + +The resulting triples for the rdf file storage might look like this: + +```turtle +ires:fuseki a iont:Connection ; + rdfs:label "fuseki" ; + iprop:dataset ires:sp2b . + +ires:sp2b a iont:Dataset ; + rdfs:label "sp2b" . +``` + +## Connections +The connections are used to define the endpoints for the triplestores. +The defined connections can later be used in the `tasks`-configuration +to specify the endpoints for the benchmarking process. + +### Properties +| property | required | description | example | +|----------------------|----------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|----------------------------------| +| name | yes | This is a descriptive name for the connection. **(needs to be unique)** | `"fuseki"` | +| version | no | This serves to document the version of the connection.
It has no functional property. | `"v1.0.1"` | +| dataset | no | This serves to document the dataset, that has been loaded into the specified connection. It has no functional property.
**(needs to reference an already defined dataset in `datasets`)** | `"sp2b"` | +| endpoint | yes | An URI at which the endpoint is located. | `"http://localhost:3030/query"` | +| authentication | no | Basic authentication data for the connection. | _see below_ | +| updateEndpoint | no | An URI at which an additional update-endpoint might be located.
This is useful for triplestores that have separate endpoints for update queries. | `"http://localhost:3030/update"` | +| updateAuthentication | no | Basic Authentication data for the updateEndpoint. | _see below_ | + +Iguana only supports the HTTP basic authentication for now. +The authentication properties are objects that are defined as follows: + +| property | required | description | example | +|----------|----------|---------------------------|--------------| +| user | yes | The user name. | `"admin"` | +| password | yes | The password of the user. | `"password"` | + +### Example + +```yaml +datasets: + - name: "wikidata" + +connections: + - name: "fuseki" + endpoint: "https://localhost:3030/query" + - name: "tentris" + version: "v0.4.0" + dataset: "wikidata" # needs to reference an existing definition in datasets + endpoint: "https://localhost:9080/query" + authentication: + user: "admin" + password: "password" + updateEndpoint: "https://localhost:8080/update" + updateAuthentication: + user: "updateUser" + password: "123" +``` + + +## Response-Body-Processor +The response body processors are used +to process the response bodies that are received for each query from the benchmarked endpoints. +The processors extract relevant information from the response bodies and store them in the results. +Processors are defined by the content type of the response body they process. +At the moment, only the `application/sparql-results+json` content type is supported. + +The response body processors are explained in more detail in the [Response-Body-Processor](response_body_processor) documentation. + +## Metrics +Metrics are used to compare the performance of the benchmarked endpoints. +The metrics are calculated from the results of the benchmarking tasks. +Depending on the type of the metric, they are calculated for each query, for each worker, or for the whole task. + +Each metric will be explained in more detail in the [Metrics](metrics.md) documentation. + +## Basic Example + +```yaml +datasets: + - name: "sp2b" + +connections: + - name: "fuseki" + dataset: "sp2b" + endpoint: "http://localhost:3030/sp2b" + +tasks: + - type: "stresstest" + workers: + - number: 2 + type: "SPARQLProtocolWorker" + parseResults: true + acceptHeader: "application/sparql-results+json" + queries: + path: "./example/suite/queries/" + format: "folder" + completionTarget: + number: 1 + connection: "fuseki" + timeout: "2S" + +responseBodyProcessors: + - contentType: "application/sparql-results+json" + threads: 1 + +metrics: + - type: "PQPS" + penalty: 100 + - type: "QPS" + +storages: + - type: "rdf file" + path: "./results/result.nt" + - type: "csv file" + directory: "./results/" +``` diff --git a/docs/configuration/queries.md b/docs/configuration/queries.md new file mode 100644 index 000000000..262f1b98c --- /dev/null +++ b/docs/configuration/queries.md @@ -0,0 +1,95 @@ +# Queries + +Benchmarks often involve running a series of queries against a database and measuring their performances. +The query handler in Iguana is responsible for loading and selecting queries for the benchmarking process. + +Inside the stresstest task, the query handler is configured with the `queries` property. +Every worker instance of the same worker configuration will use the same query handler. +The `queries` property is an object that contains the following properties: + +| property | required | default | description | example | +|-----------|----------|----------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-------------------------------------------| +| path | yes | | The path to the queries. It can be a file or a folder. | `./example/suite/queries/` | +| format | no | `one-per-line` | The format of the queries. | `folder` or `separator` or `one-per-line` | +| separator | no | `""` | The separator that should be used if the format is set to `separator`. | `\n###\n` | +| caching | no | `true` | If set to `true`, the queries will be cached into memory. If set to `false`, the queries will be read from the file system every time they are needed. | `false` | +| order | no | `linear` | The order in which the queries are executed. If set to `linear` the queries will be executed in their order inside the file. If `format` is set to `folder`, queries will be sorted by their file name first. | `random` or `linear` | +| seed | no | `0` | The seed for the random number generator that selects the queries. If multiple workers use the same query handler, their seed will be the sum of the given seed and their worker id. | `12345` | +| lang | no | `SPARQL` | Not used for anything at the moment. | | + +## Format + +### One-per-line +The `one-per-line` format is the default format. +In this format, every query is written in a single line inside one file. + +In this example, the queries are written in a single file, each query in a single line: +``` +SELECT DISTINCT * WHERE { ?s ?p ?o } +SELECT DISTINCT ?s ?p ?o WHERE { ?s ?p ?o } +``` + +### Folder +It is possible to write every query in a separate file and put them all in a folder. +Queries will be sorted by their file name before they are read. + +In this example, the queries are written in separate files inside the folder `./example/suite/queries/`: +``` +./example/suite/queries/ +├── query1.txt +└── query2.txt +``` + +The file `query1.txt` contains the following query: +``` +SELECT DISTINCT * +WHERE { + ?s ?p ?o +} +``` + +The file `query2.txt` contains the following query: +``` +SELECT DISTINCT ?s ?p ?o +WHERE { + ?s ?p ?o +} +``` + +### Separator +It is possible to write every query in a single file and separate them with a separator. +The separator can be set with the `separator` property. +Iguana will then split the file into queries based on the separator. +If the `separator` property is set to an empty string `""` (default) the queries will be separated by an empty line. +The separator string can also contain escape sequences like `\n` or `\t`. + +In this example, the queries inside this file are separated by a line consisting of the string `###`: +``` +SELECT DISTINCT * +WHERE { + ?s ?p ?o +} +### +SELECT DISTINCT ?s ?p ?o +WHERE { + ?s ?p ?o +} +``` +The `separator` property should be set to `"\n###\n"`. (be aware of different line endings on different operating systems) + +## Example +```yaml +tasks: + - type: "stresstest" + workers: + - type: "SPARQLProtocolWorker" + queries: + path: "./example/suite/queries.txt" + format: "separator" + separator: "\n###\n" + caching: false + order: "random" + seed: 12345 + lang: "SPARQL" + # ... additional worker properties +``` diff --git a/docs/configuration/rdf_results.md b/docs/configuration/rdf_results.md new file mode 100644 index 000000000..db74c7687 --- /dev/null +++ b/docs/configuration/rdf_results.md @@ -0,0 +1,146 @@ +# RDF Results +The differences between task, worker, and query metrics will be explained in more detail with the following examples. +The results shown have been generated with the `rdf file` storage type. + +## Task and Worker Metrics +The first excerpt shows the results for the task `ires:1710247002-3043500295/0` and its worker +`ires:1710247002-3043500295/0/0`: + +```turtle + + a iont:Stresstest , iont:Task ; + iprop:AvgQPS 84.121083502 ; + iprop:NoQ 16 ; + iprop:NoQPH 21894.0313677612 ; + iprop:QMPH 1287.8841981036 ; + iprop:endDate "2024-03-12T12:36:48.323Z"^^ ; + iprop:metric ires:QMPH , ires:NoQPH , ires:AvgQPS , ires:NoQ ; + iprop:noOfWorkers "1"^^ ; + iprop:query (iri of every query that has been executed inside the task) ; + iprop:startDate "2024-03-12T12:36:42.636Z"^^ ; + iprop:workerResult . + + + a iont:Worker ; + iprop:AvgQPS 84.121083502 ; + iprop:NoQ 16 ; + iprop:NoQPH 21894.0313677612 ; + iprop:QMPH 1287.8841981036 ; + iprop:connection ires:fuseki ; + iprop:endDate "2024-03-12T12:36:48.322204Z"^^ ; + iprop:metric ires:NoQ , ires:NoQPH , ires:QMPH , ires:AvgQPS ; + iprop:noOfQueries "17"^^ ; + iprop:noOfQueryMixes "1"^^ ; + iprop:query (iri of every query the worker has executed) ; + iprop:startDate "2024-03-12T12:36:42.6457629Z"^^ ; + iprop:timeOut "PT10S"^^ ; + iprop:workerID "0"^^ ; + iprop:workerType "SPARQLProtocolWorker" . +``` + +- The IRI `ires:1710247002-3043500295/0` represents the task `0` of the benchmark suite `1710247002-3043500295`. +- The IRI `ires:1710247002-3043500295/0/0` represents the worker `0` of the task described above. + +Both task and worker contain results of the `AvgQPS`, `NoQ`, `NoQPH`, and `QMPH` metrics. +These metrics are calculated for the whole task and for each worker, which can be seen in the example. +Because the task of this example only had one worker, the results are the same. + +Additional information about the task and worker, besides the metric results, are stored as well. +The following properties are stored for the task: +- `noOfWorkers`: The number of workers that executed the task. +- `query`: The IRI of every query that was executed by the task. +- `startDate`: The time when the task started. +- `endDate`: The time when the task ended. +- `workerResult`: The IRIs of the workers that executed the task. +- `metric`: The IRIs of the metrics that were calculated for the task. + +The following properties are stored for the worker: +- `connection`: The IRI of the connection that the worker used. +- `noOfQueries`: The number of queries. +- `noOfQueryMixes`: The number of queries mixes that the worker executed (mutually exclusive to `timeLimit`). +- `timeLimit`: The time duration for which the worker has executed queries (mutually exclusive to `noOfQueryMixes`). +- `query`: The IRI of every query that the worker executed. +- `startDate`: The time when the worker started. +- `endDate`: The time when the worker ended. +- `timeOut`: The maximum time a query execution should take. +- `workerID`: The id of the worker. +- `workerType`: The type of the worker. + +## Query Metrics +Every query of each query handler has its own id. +It consists of a hash value of the query handler and the query id in this format: +`ires::`. +In this example, results for the query `ires:1181728761:0` are shown: + +```turtle + + a iont:ExecutedQuery ; + iprop:QPS 18.975908187 ; + iprop:failed 0 ; + iprop:queryID ires:1181728761:0 ; + iprop:resultSize 212 ; + iprop:succeeded 1 ; + iprop:timeOuts 0 ; + iprop:totalTime "PT0.0526984S"^^ ; + iprop:unknownException 0 ; + iprop:wrongCodes 0 . + + + a iont:ExecutedQuery ; + iprop:QPS 18.975908187 ; + iprop:failed 0 ; + iprop:queryExecution ; + iprop:queryID ires:1181728761:0 ; + iprop:resultSize 212 ; + iprop:succeeded 1 ; + iprop:timeOuts 0 ; + iprop:totalTime "PT0.0526984S"^^ ; + iprop:unknownException 0 ; + iprop:wrongCodes 0 . +``` + +The IRI `` consists of the following +segments: +- `ires:1710247002-3043500295` is the IRI of the benchmark suite. +- `ires:1710247002-3043500295/0` is the IRI of the first task. +- `ires:1710247002-3043500295/0/0` is the IRI of the first task's worker. +- `1181728761:0` is the query id. + +The suite id is made up of the timestamp and the hash value of the suite configuration in this pattern: +`ires:-`. + +The subject `` represents the results of the query +`ires:1181728761:0` from first worker of the task `1710247002-3043500295/0`. + +The subject `` represents the results of the query +`ires:1181728761:0` from every worker across the whole task `1710247002-3043500295/0`. + +Results of query metrics, like the `QPS` metric (also the `AES` metric), +are therefore calculated for each query of each worker and for each query of the whole task. + +The `iprop:queryExecution` property of `` +contains the IRIs of the executions of that query from that worker. +These will be explained in the next section. + +## Each Execution Statistic + +With the `EachQuery` metric Iguana stores the statistics of each execution of a query. +The following excerpt shows the execution statistics of the query `ires:1181728761:0`: + +```turtle + + iprop:code "0"^^ ; + iprop:httpCode "200" ; + iprop:queryID ires:1181728761:0 ; + iprop:responseBody ; + iprop:resultSize "212"^^ ; + iprop:run 1 ; + iprop:startTime "2024-03-12T12:36:42.647764Z"^^ ; + iprop:success true ; + iprop:time "PT0.0526984S"^^ . +``` + +The IRI `` consists of the worker +query IRI as described above and the run number of the query execution. + +The properties of the `EachQuery` metric are described in the [metrics](./metrics.md) section. diff --git a/docs/configuration/response_body_processor.md b/docs/configuration/response_body_processor.md new file mode 100644 index 000000000..5b4cc7259 --- /dev/null +++ b/docs/configuration/response_body_processor.md @@ -0,0 +1,25 @@ +# Response-Body-Processor + +The response body processor is used +to process the response bodies of the HTTP requests that are executed by the workers. +The processing is done to extract relevant information from the responses and store them in the results. + +Iguana supports multiple response body processors that are defined by the content type of the response body they process. + +Currently only the `application/sparql-results+json` content type is supported, +and it only uses the `SaxSparqlJsonResultCountingParser` language processor +to extract simple information from the responses. + +Workers send the response bodies to the response body processors, +after receiving the full response bodies from the HTTP requests. +Response bodies are processed in parallel by the number of threads that are defined in the configuration. + +To use a response body processor, it needs to be defined in the configuration file with the `contentType` property +in the `responseBodyProcessors` list. + +## Properties +| property | required | description | example | +|-------------|----------|--------------------------------------------------------------------------------------------------------------------|-------------------------------------| +| contentType | yes | The content type of the response body. | `"application/sparql-results+json"` | +| threads | no | The number of threads that are used to process the response bodies. (default is 1) | `2` | +| timeout | no | The maximum duration that the response body processor can take to process a response body. (default is 10 minutes) | `10m` | \ No newline at end of file diff --git a/docs/configuration/storages.md b/docs/configuration/storages.md new file mode 100644 index 000000000..775ccdac1 --- /dev/null +++ b/docs/configuration/storages.md @@ -0,0 +1,89 @@ +# Storages + +Storages are used to store the results of the benchmark suite. +It is possible to use multiple storages at the same time. +They can be configured with the `storages` property in the configuration file +by providing a list of storage configurations. + +## Example + +```yaml +storages: + - type: "csv file" + directory: "./results" + - type: "rdf file" + path: "./results" + - type: "triplestore" + endpoint: "http://localhost:3030/ds" + username: "admin" + password: "password" +``` + +The following values for the `type` property are supported: + +- [csv file](#csv-file-storage) +- [rdf file](#rdf-file-storage) +- [triplestore](#triplestore-storage) + +## CSV File Storage + +The csv file storage writes the results of the benchmark suite to multiple csv files. +It only has a single property, `directory`, +which defines the path to the directory where the csv files should be written to. + +Inside the directory, a new directory for the execution of the benchmark suite will be created. +The name of the directory is `suite--` where +the `timestamp` is the benchmark's time of execution and `config-hash` the hash value of the benchmark configuration. + +The following shows an example of the directory structure and created files of the csv storage: + +```text +suite-1710241608-1701417056/ +├── suite-summary.csv +├── task-0 +│ ├── application-sparql+json +│ │ └── sax-sparql-result-data.csv +│ ├── each-execution-worker-0.csv +│ ├── query-summary-task.csv +│ ├── query-summary-worker-0.csv +│ └── worker-summary.csv +└── task-configuration.csv +``` + +- The `suite-summary.csv` file contains the summary of each task. +- The `task-configuration.csv` file contains information about the configuration of each task. +- Inside the `task-0` directory, the results of the task with the id `0` are stored. + - The `each-execution-worker-0.csv` file contains the metric results of each query execution for `worker 0`. + - The `query-summary-task.csv` file contains the summary of the metric results for every query inside the task. + - The `query-summary-worker-0.csv` file contains the summary of the metric results for every query of `worker 0`. + - The `worker-summary.csv` file contains the summary of metrics for each worker of the task. + +The `application-sparql+json` directory contains results from Language Processors +that process results with the `application/sparql+json` content type. +Each Language Processor creates their own files in their respective directory. + +## RDF File Storage + +The rdf file storage writes the results of the benchmark suite to a single rdf file. + +It only has a single property, `path`, +which defines the path to the rdf file where the results should be written to. +The path can be either a file or a directory. +The file extension of the file determines in which format the rdf data is stored +(e.g., `.nt` for n-triples, `.ttl` for turtle). + +If the path is a directory or a file that already exists, +the file will be a turtle file with a timestamp as its name. + +## Triplestore Storage + +The triplestore storage writes the results of the benchmark suite directly to a triplestore as triples, +similar to the rdf file storage. + +It has the following properties: + +- `endpoint`: The update endpoint of the triplestore. +- `username`: The username for the authentication of the triplestore. +- `password`: The password for the authentication of the triplestore. + +The `username` and `password` properties are optional. diff --git a/docs/configuration/tasks.md b/docs/configuration/tasks.md new file mode 100644 index 000000000..95162ed33 --- /dev/null +++ b/docs/configuration/tasks.md @@ -0,0 +1,50 @@ +# Tasks +The tasks are the core of the benchmark suite. +They define the actual process of the benchmarking suite +and are executed from top to bottom in the order they are defined in the configuration. +At the moment, the `stresstest` is the only implemented task. + +Tasks are defined in the `tasks` section of the configuration and are distinguished by the `type` property. + +## Example +```yaml +tasks: + - type: "stresstest" + # properties of the task + # ... +``` + +## Stresstest +The `stresstest`-task queries the specified endpoints in rapid succession with the given queries. +It measures the time it takes to execute each query and calculates the required metrics based +on the measurements. +The task is used to measure the performance of the endpoint for each query. +The task is configured with the following properties: + +| property | required | description | +|---------------|----------|--------------------------------------------------------------| +| workers | yes | An array that contains worker configurations. | +| warmupworkers | no | An array that contains worker configurations for the warmup. | + +The stresstest uses workers to execute the queries, which are supposed to simulate users. +Each worker has its own set of queries and executes them parallel to the other workers. + +Warmup workers have the same functionality as normal workers, +but their results won't be processed and stored. +The stresstest runs the warmup workers before the actual workers. +They're used to warm up the system before the actual benchmarking starts. + +For more information about the worker configuration, see [here](./workers.md). + +### Example +```yaml +tasks: + - type: "stresstest" + workers: + - type: "SPARQLProtocolWorker" + # ... worker properties + warmupworkers: + - type: "SPARQLProtocolWorker" + # ... +``` + diff --git a/docs/configuration/workers.md b/docs/configuration/workers.md new file mode 100644 index 000000000..91ae5e852 --- /dev/null +++ b/docs/configuration/workers.md @@ -0,0 +1,137 @@ +# Workers +The stresstest uses workers to execute the queries, which are supposed to simulate users. +Each worker has its own set of queries and executes them parallel to the other workers. + +Iguana supports multiple worker types, but currently only the `SPARQLProtocolWorker` is implemented. +Workers have the common `type` property which defines the type of the worker. + +```yaml +tasks: + - type: "stresstest" + workers: + - type: "SPARQLProtocolWorker" + # properties of the worker + # ... + - type: "SPARQLProtocolWorker" + # properties of the worker + # ... +``` + +## SPARQLProtocolWorker + +The `SPARQLProtocolWorker` is a worker that sends SPARQL queries to an endpoint using the SPARQL protocol. +The worker can be configured with the following properties: + +| property | required | default | description | +|------------------|----------|-------------|-----------------------------------------------------------------------------------------------------------------| +| number | no | `1` | The number of workers that should be initiated with that same configuration. | +| queries | yes | | The configuration of the query handler these workers should use. (see [here](./queries.md)) | +| completionTarget | yes | | Either defines how many queries the worker should send, or how long it should send them. | +| connection | yes | | The name of the connection that the worker should use.
(needs to reference an already defined connection) | +| timeout | yes | | The duration for the query timeout. | +| acceptHeader | no | | The accept header that the worker should use for the HTTP requests. | +| requestType | no | `get query` | The request type that the worker should use. | +| parseResults | no | `true` | Whether the worker should parse the results. | + +Each property is explained in more detail below. + +### Example +```yaml +connection: + - name: "fuseki" + dataset: "sp2b" + endpoint: "http://localhost:3030/sp2b" + +tasks: + - type: "stresstest" + workers: + - type: "SPARQLProtocolWorker" + number: 2 # two workers with the same configuration will be initiated + queries: # the query handler configuration, both workers will use the same query handler + path: "./example/suite/queries/" + format: "folder" + completionTarget: + number: 1 # each query will be executed once + connection: "fuseki" # the worker will use the connection with the name "fuseki", which is defined above + timeout: "2S" + acceptHeader: "application/sparql-results+json" + requestType: "get query" + parseResults: true +``` + +### Number + +The `number` property defines the number of workers that should be initiated with the same configuration. +Workers with the same configuration will use the same query handler instance. + +### Queries + +The `queries` property is the configuration of the query handler that the worker should use. +The query handler is responsible for loading and selecting the queries that the worker should execute. +The query handler configuration is explained in more detail [here](./queries.md). + +### Completion Target +The `completionTarget` property defines when the worker should stop executing queries. +The property takes an object as its value that contains either one of the following properties: +- `number`: The number of times the worker should execute each query. +- `duration`: The duration during which the worker should iterate and execute every query. + +Example: +```yaml +tasks: + - type: "stresstest" + workers: + - type: "SPARQLProtocolWorker" + number: 1 + completionTarget: + number: 100 # execute each query 100 times + # ... + - type: "SPARQLProtocolWorker" + number: 1 + completionTarget: + duration: "10s" # execute queries for 10 seconds + # ... +``` + +### Timeout +The `timeout` property defines the maximum time a query execution should take, +this includes the time it takes to send the request and to receive the response. +If the timeout is reached, the worker will mark it as failed, +cancel the HTTP request and continue with the execution of the next query. + +The system that's being tested should make sure that it's able +to abort the further execution of the query if the timeout has been reached +(e.g., by using a timeout parameter for the system, if available). +Otherwise, problems like high resource usage or other issues might occur. + +### Request Type +The `requestType` property defines the type of the HTTP request that the worker should use. +It consists of a string that can be one of the following values: + +| request type | HTTP method | Content-Type header value | description | +|-------------------------|-------------|-------------------------------------|----------------------------------------------------------------------------------------------------------------| +| `"get query"` | `GET` | | The worker will send a `GET` request with a `query` parameter that contains the query. | +| `"post query"` | `POST` | `application/sparq-query` | The body will contain the query. | +| `"post update"` | `POST` | `application/sparq-update` | The body will contain the update query. | +| `"post url-enc query"` | `POST` | `application/x-www-form-urlencoded` | The body will contain the a url-encoded key-value pair with the key being `query` and the query as the value. | +| `"post url-enc update"` | `POST` | `application/x-www-form-urlencoded` | The body will contain the a url-encoded key-value pair with the key being `update` and the query as the value. | + +### Accept Header + +The `acceptHeader` property defines the value for the `Accept` header of the HTTP requests that a worker sends to the defined endpoint. +This property also affects the [Response-Body-Processors](./overview#Response-Body-Processor) +that are used to process the response bodies. + +### Parse Results + +The `parseResults` property defines whether the worker should parse the results of the queries. +If the property is set to `true`, +the worker will send the response body to the [Response-Body-Processors](./overview#Response-Body-Processor) for processing +and calculate hash values for the response bodies. +If the property is set to `false`, +the worker will not parse the response bodies and will not calculate hash values for the response bodies. + +Setting the property to `false` can improve the performance of the worker. +This means that the worker is able to measure the performance more accurately. +If the property is set to `true`, the worker will temporarily store the whole response bodies in memory for processing. +If the property is set to `false`, the worker will discard any received bytes from the response. diff --git a/docs/develop/architecture.md b/docs/develop/architecture.md deleted file mode 100644 index b051238e4..000000000 --- a/docs/develop/architecture.md +++ /dev/null @@ -1,3 +0,0 @@ -## Test1 - -## Test2 diff --git a/docs/develop/extend-lang.md b/docs/develop/extend-lang.md deleted file mode 100644 index 4285fdd8a..000000000 --- a/docs/develop/extend-lang.md +++ /dev/null @@ -1,110 +0,0 @@ -# Extend Languages - -If you want to add query specific statistics and/or using the correct result size for an HTTP Worker (Post or Get) you can do so. -(This may be interesting if you're not using SPARQL) - -Let's start by implementing the `LanguageProcessor` - -```java -@Shorthand("lang.MyLanguage") -public class MyLanguageProcessor implements LanguageProcessor { - - @Override - public String getQueryPrefix() { - } - - - @Override - public Model generateTripleStats(List queries, String resourcePrefix, String taskID) { - } - - @Override - public Long getResultSize(CloseableHttpResponse response) throws ParserConfigurationException, SAXException, ParseException, IOException { - } - - @Override - Long getResultSize(Header contentTypeHeader, BigByteArrayOutputStream content) throws ParserConfigurationException, SAXException, ParseException, IOException{ - } - - @Override - long readResponse(InputStream inputStream, BigByteArrayOutputStream responseBody) throws IOException{ - } - - -} -``` - -## Query prefix - -Set a query prefix which will be used in the result set, f.e. "sql" - -```java - @Override - public String getQueryPrefix() { - return "sql"; - } -``` - -## Generate Query Statistics - -Generating query specific statistics (which will be added in the result file) - -You will get the queries (containg of an ID and the query itself) a resourcePrefix you may use to create the URIs and the current taskID. - -A basic pretty standard exmaple is - -```java - @Override - public Model generateTripleStats(List queries, String resourcePrefix, String taskID) { - Model model = ModelFactory.createDefaultModel(); - for(QueryWrapper wrappedQuery : queries) { - Resource subject = ResourceFactory.createResource(COMMON.RES_BASE_URI + resourcePrefix + "/" + wrappedQuery.getId()); - model.add(subject, RDF.type, Vocab.queryClass); - model.add(subject, Vocab.rdfsID, wrappedQuery.getId().replace(queryPrefix, "").replace("sql", "")); - model.add(subject, RDFS.label, wrappedQuery.getQuery().toString()); - - //ADD YOUR TRIPLES HERE which contains query specific statistics - } - return model; - - } -``` - -## Get the result size - -To generate the correct result size in the result file do the following - -```java - @Override - public Long getResultSize(CloseableHttpResponse response) throws ParserConfigurationException, SAXException, ParseException, IOException { - - - InputStream inStream = response.getEntity().getContent(); - Long size = -1L; - //READ INSTREAM ACCORDINGLY - - - return size; - } - - - @Override - public Long getResultSize(Header contentTypeHeader, BigByteArrayOutputStream content) throws ParserConfigurationException, SAXException, ParseException, IOException { - //Read content from Byte Array instead of InputStream - InputStream is = new BigByteArrayInputStream(content); - Long size=-1L; - ... - - return size; - } - - @Override - public long readResponse(InputStream inputStream, BigByteArrayOutputStream responseBody) throws IOException { - //simply moves content from inputStream to the byte array responseBody and returns the size; - //will be used for parsing the anwser in another thread. - return Streams.inputStream2ByteArrayOutputStream(inputStream, responseBody); - } - - -``` - diff --git a/docs/develop/extend-metrics.md b/docs/develop/extend-metrics.md deleted file mode 100644 index cd5667d36..000000000 --- a/docs/develop/extend-metrics.md +++ /dev/null @@ -1,107 +0,0 @@ -# Extend Metrics - -Developed a new metric or simply want to use one that isn't implemented? - -Start by extending the `AbstractMetric` - -```java -package org.benchmark.metric - -@Shorthand("MyMetric") -public class MyMetric extends AbstractMetric{ - - @Override - public void receiveData(Properties p) { - } - - @Override - public void close() { - callbackClose(); - super.close(); - - } - - protected void callbackClose() { - //ADD YOUR CLOSING HERE - } -} -``` - -## Receive Data - -This method will receive all the results during the benchmark. - -You'll receive a few values regarding that one query execution, the time it took, if it succeeded, if not if it was a timeout, a wrong HTTP Code or unkown. -Further on the result size of the query. - -If your metric is a single value metric you can use the `processData` method, which will automatically add each value together. -However if your metric is query specific you can use the `addDataToContainter` method. (Look at the [QPSMetric](https://github.com/dice-group/IGUANA/blob/master/iguana.resultprocessor/src/main/java/org/aksw/iguana/rp/metrics/impl/QPSMetric.java). - -Be aware that both mehtods will save the results for each worker used. This allows to calcualte the overall metric as well the metric for each worker itself. - -We will go with the single-value metric for now. - - -An example on how to retrieve every possible value and saving the time and success. - -```java - @Override - public void receiveData(Properties p) { - - double time = Double.parseDouble(p.get(COMMON.RECEIVE_DATA_TIME).toString()); - long tmpSuccess = Long.parseLong(p.get(COMMON.RECEIVE_DATA_SUCCESS).toString()); - long success = tmpSuccess>0?1:0; - long failure = success==1?0:1; - long timeout = tmpSuccess==COMMON.QUERY_SOCKET_TIMEOUT?1:0; - long unknown = tmpSuccess==COMMON.QUERY_UNKNOWN_EXCEPTION?1:0; - long wrongCode = tmpSuccess==COMMON.QUERY_HTTP_FAILURE?1:0; - if(p.containsKey(COMMON.RECEIVE_DATA_SIZE)) { - size = Long.parseLong(p.get(COMMON.RECEIVE_DATA_SIZE).toString()); - } - - Properties results = new Properties(); - results.put(TOTAL_TIME, time); - results.put(TOTAL_SUCCESS, success); - - Properties extra = getExtraMeta(p); - processData(extra, results); - } -``` - - -## Close - -In this method you should finally calculate your metric and send the results. - -```java - protected void callbackClose() { - //create model to contain results - Model m = ModelFactory.createDefaultModel(); - - Property property = getMetricProperty(); - Double sum = 0.0; - - // Go over each worker and add metric results to model. - for(Properties key : dataContainer.keySet()){ - Double totalTime = (Double) dataContainer.get(key).get(TOTAL_TIME); - Integer success = (Integer) dataContainer.get(key).get(TOTAL_SUCCESS); - Double noOfQueriesPerHour = hourInMS*success*1.0/totalTime; - sum+=noOfQueriesPerHour; - Resource subject = getSubject(key); - m.add(getConnectingStatement(subject)); - m.add(subject, property, ResourceFactory.createTypedLiteral(noOfQueriesPerHour)); - } - - // Add overall metric to model - m.add(getTaskResource(), property, ResourceFactory.createTypedLiteral(sum)); - - //Send data to storage - sendData(m); - } - - -``` - -## Constructor - -The constructor parameters will be provided the same way the Task get's the parameters, thus simply look at [Extend Task](../extend-task). diff --git a/docs/develop/extend-queryhandling.md b/docs/develop/extend-queryhandling.md deleted file mode 100644 index 6b1702bc1..000000000 --- a/docs/develop/extend-queryhandling.md +++ /dev/null @@ -1,57 +0,0 @@ -# Extend Query Handling - -If you want to use another query generating method as the implemented ones you can do so. - -Start by extend the `AbstractWorkerQueryHandler`. It will split up the generation for UPDATE queries and Request queries. - -```java -package org.benchmark.query - - -public class MyQueryHandler extends AbstractWorkerQueryHandler{ - - protected abstract QuerySet[] generateQueries(String queryFileName) { - - } - - protected abstract QuerySet[] generateUPDATE(String updatePath) { - - } - -} - -``` - -for simplicity we will only show the `generateQueries` as it is pretty much the same. -However be aware that the `generateUPDATE` will use a directory or file instead of just a query file. - -## Generate Queries - -The class will get a query file containing all the queries. -How you read them and what to do with them is up to you. -You just need to return an array of `QuerySet`s - -A query set is simply a container which contains the name/id of the query as well as the query or several queries (f.e. if they are of the same structure but different values). -For simplicity we assume that we deal with only one query per query set. - -Parse your file and for each query create a QuerySet - - -```java - protected QuerySet[] generateQueries(String queryFileName) { - File queryFile = new File(queryFileName); - List ret = new LinkedList(); - - int id=0; - //TODO parse your queries - ... - - ret.add(new InMemQuerySet(idPrefix+id++, queryString)); - ... - - - return ret.toArray(new QuerySet[]{}); - } -``` - -This function will parse your query accodringly and add an In Memory QuerySet (another option is a File Based Query Set, where each QuerySet will be stored in a file and IO happens during the benchmark itself. diff --git a/docs/develop/extend-result-storages.md b/docs/develop/extend-result-storages.md deleted file mode 100644 index 95ab89779..000000000 --- a/docs/develop/extend-result-storages.md +++ /dev/null @@ -1,51 +0,0 @@ -#Extend Result Storages - -If you want to use a different storage than RDF you can extend the storages - -However it is highly optimized for RDF so we suggest to work on top of the `TripleBasedStorage` - -```java -package org.benchmark.storage - -@Shorthand("MyStorage") -public class MyStorage extends TripleBasedStorage { - - @Override - public void commit() { - - } - - - @Override - public String toString(){ - return this.getClass().getSimpleName(); - } - -} - -``` - -## Commit - -This should take all the current results, store them and remove them from memory. - -You can access the results at the Jena Model `this.metricResults`. - -For example: - -```java - - @Override - public void commit() { - try (OutputStream os = new FileOutputStream(file.toString(), true)) { - RDFDataMgr.write(os, metricResults, RDFFormat.NTRIPLES); - metricResults.removeAll(); - } catch (IOException e) { - LOGGER.error("Could not commit to NTFileStorage.", e); - } - } -``` - -## Constructor - -The constructor parameters will be provided the same way the Task get's the parameters, thus simply look at [Extend Task](../extend-task). diff --git a/docs/develop/extend-task.md b/docs/develop/extend-task.md deleted file mode 100644 index df83e57f4..000000000 --- a/docs/develop/extend-task.md +++ /dev/null @@ -1,225 +0,0 @@ -# Extend Tasks - -You can extend Iguana with your benchmark task, if the Stresstest doesn't fit your needs. -F.e. you may want to check systems if they answer correctly rather than stresstest them. - -You will need to create your own task either in the Iguana code itself or by using Iguana as a library. -Either way start by extending the AbstractTask. - -```java -package org.benchmark - -@Shorthand("MyBenchmarkTask") -public class MyBenchmarkTask extend AbstractTask { - -} - -``` - -You will need to override some functions. For now include them and go through them step by step - -```java -package org.benchmark - -@Shorthand("MyBenchmarkTask") -public class MyBenchmarkTask extend AbstractTask { - - //Your constructor(s) - public MyBenchmarkTask(Integer timeLimit, ArrayList workers, LinkedHashMap queryHandler) throws FileNotFoundException { - } - - - //Meta Data (which will be added in the resultsfile) - @Override - public void addMetaData() { - super.addMetaData(); - } - - //Initializing - @Override - public void init(String[] ids, String dataset, Connection connection) { - super.init(ids, dataset, connection); - } - - //Your actual Task - @Override - public void execute() { - } - - - //Closing the benchmark, freeing some stuff etc. - @Override - public void close() { - super.close(); - } -} - -``` - - -## Constructor and Configuration - -Let's start with the Constructor. -The YAML benchmark configuration will provide you the constructor parameters. - -Imagine you want to have three different parameters. -The first one should provide an integer (e.g. the time limit of the task) -The second one should provide a list of objects (e.g. a list of integers to use) -The third parameter should provide a map of specific key-value pairs. - -You can set this up by using the following parameters: - -```java -public MyBenchmarkTask(Integer param1, ArrayList param2, LinkedHashMap param3) throws FileNotFoundException { - //TODO whatever you need to do with the parameters -} -``` - -Then Your configuration may look like the following - -```yaml -... - className: "MyBenchmarkTask" - configuration: - param1: 123 - param2: - - "1" - - "2" - param3: - val1: "abc" - val2: 123 - -``` - -The parameters will then be matched by their names to the names of the parameters of your constructor, allowing multiple constructors - -These are the three types you can represent in a Yaml configuration. -* Single Values -* Lists of Objects -* Key-Value Pairs - - -## Add Meta Data - -If you want to add Meta Data to be written in the results file do the following, - -Let noOfWorkers a value you already set. - -```java - /** - * Add extra Meta Data - */ - @Override - public void addMetaData() { - super.addMetaData(); - - Properties extraMeta = new Properties(); - extraMeta.put("noOfWorkers", noOfWorkers); - - //Adding them to the actual meta data - this.metaData.put(COMMON.EXTRA_META_KEY, extraMeta); - } - -``` - -Then the resultsfile will contain all the mappings you put in extraMeta. - -## Initialize the Task - -You may want to initialize your task, set some more values, start something in the background etc. etc. - -You will be provided the `suiteID`, `experimentID` and the `taskID` in the `ids` array, as well as the name of the dataset -and the connection currently beeing benchmarked. - - -```java - @Override - public void init(String[] ids, String dataset, Connection connection) { - super.init(ids, dataset, connection); - //ADD YOUR CODE HERE - } -``` - -The ids, the dataset and the connection will be set in the `AbstractTask` which you can simply access by using `this.connection` for example. - -## Execute - -Now you can create the actual benchmark task you want to use. - - -```java - @Override - public void execute() { - //ADD YOUR CODE HERE - } -``` - -Be aware that if you are using the `workers` implemented in Iguana, you need to stop them after your benchmark using the `worker.stopSending()` method. - -## Close - -If you need to close some streams at the end of your benchmark task, you can do that in the `close` function. - -Simply override the existing one and call the super method and implement what you need. - -```java - @Override - public void close() { - super.close(); - } -``` - -## Full overview - -```java -package org.benchmark - -@Shorthand("MyBenchmarkTask") -public class MyBenchmarkTask extend AbstractTask { - - private Integer param1; - private ArrayList param2; - private LinkedHashMap param3; - - //Your constructor(s) - public MyBenchmarkTask(Integer param1, ArrayList param2, LinkedHashMap param3) throws FileNotFoundException { - - this.param1=param1; - this.param2=param2; - this.param3=param3; - - } - - - //Meta Data (which will be added in the resultsfile) - @Override - public void addMetaData() { - super.addMetaData(); - - Properties extraMeta = new Properties(); - extraMeta.put("noOfWorkers", noOfWorkers); - - //Adding them to the actual meta data - this.metaData.put(COMMON.EXTRA_META_KEY, extraMeta); - } - - @Override - public void init(String[] ids, String dataset, Connection connection) { - super.init(ids, dataset, connection); - //ADD YOUR CODE HERE - } - - @Override - public void execute() { - //ADD YOUR CODE HERE - } - - - //Closing the benchmark, freeing some stuff etc. - @Override - public void close() { - super.close(); - } -} - -``` diff --git a/docs/develop/extend-workers.md b/docs/develop/extend-workers.md deleted file mode 100644 index c85f838aa..000000000 --- a/docs/develop/extend-workers.md +++ /dev/null @@ -1,129 +0,0 @@ -# Extend Workers - -If the implemented workers aren't sufficient you can create your own one. - -Start by extending the `AbstractWorker` - -```java -package org.benchmark.workers - -@Shorthand("MyWorker") -public class MyWorker extends AbstractWorker{ - - - //Setting the next query to be benchmarked in queryStr and queryID - public void getNextQuery(StringBuilder queryStr, StringBuilder queryID) throws IOException{ - - } - - - //Executing the current benchmark query - public void executeQuery(String query, String queryID){ - - } - -} -``` - -These are the only two functions you need to implement, the rest is done by the `AbstractWorker`. - -You can override more functions, please consider looking into the javadoc for that. - -## Constructor - -The constructor parameters will be provided the same way the Task get's the parameters, thus simply look at [Extend Task](../extend-task). - -## Get the next query - -The benchmark task should create and initialize the benchmark queries and will set them accordingly to the worker. - -You can access these queries using the `queryFileList` array. -Each element consists of one query set, containing the queryID/name and a list of one to several queries. - -In the following we will choose the next query set, counted by `currentQueryID` and use a random query of this. - -```java - - @Override - public void getNextQuery(StringBuilder queryStr, StringBuilder queryID) throws IOException { - // get next Query File and next random Query out of it. - QuerySet currentQuery = this.queryFileList[this.currentQueryID++]; - queryID.append(currentQuery.getName()); - - int queriesInSet = currentQuery.size(); - int queryLine = queryChooser.nextInt(queriesInSet); - queryStr.append(currentQuery.getQueryAtPos(queryLine)); - - // If there is no more query(Pattern) start from beginning. - if (this.currentQueryID >= this.queryFileList.length) { - this.currentQueryID = 0; - } - - } -``` - -Thats it. - -This exact method is implemented in the `AbstractRandomQueryChooserWorker` class and instead of extend the `AbstractWorker` class, you can also extend this and spare your time. -However if you need another way like only executing one query and if there are no mery queries to test end the worker you can do so: - -```java - - @Override - public void getNextQuery(StringBuilder queryStr, StringBuilder queryID) throws IOException { - // If there is no more query(Pattern) start from beginning. - if (this.currentQueryID >= this.queryFileList.length) { - this.stopSending(); - } - - - // get next Query File and the first Query out of it. - QuerySet currentQuery = this.queryFileList[this.currentQueryID++]; - queryID.append(currentQuery.getName()); - - int queriesInSet = currentQuery.size(); - queryStr.append(currentQuery.getQueryAtPos(0)); - - } -``` - -## Execute the current query - -Now you can execute the query against the current connection (`this.con`). - -As this is up to you how to do that, here is an example implementation for using HTTP Get. - -```java -@Override - public void executeQuery(String query, String queryID) { - Instant start = Instant.now(); - - try { - String qEncoded = URLEncoder.encode(query, "UTF-8"); - String addChar = "?"; - if (con.getEndpoint().contains("?")) { - addChar = "&"; - } - String url = con.getEndpoint() + addChar + parameter+"=" + qEncoded; - HttpGet request = new HttpGet(url); - RequestConfig requestConfig = RequestConfig.custom().setSocketTimeout(timeOut.intValue()) - .setConnectTimeout(timeOut.intValue()).build(); - - if(this.responseType != null) - request.setHeader(HttpHeaders.ACCEPT, this.responseType); - - request.setConfig(requestConfig); - CloseableHttpClient client = HttpClients.createDefault(); - CloseableHttpResponse response = client.execute(request, getAuthContext(con.getEndpoint())); - - // method to process the result in background - super.processHttpResponse(queryID, start, client, response); - - } catch (Exception e) { - LOGGER.warn("Worker[{{ '{{}}' }} : {{ '{{}}' }}]: Could not execute the following query\n{{ '{{}}' }}\n due to", this.workerType, - this.workerID, query, e); - super.addResults(new QueryExecutionStats(queryID, COMMON.QUERY_UNKNOWN_EXCEPTION, durationInMilliseconds(start, Instant.now()))); - } - } -``` - diff --git a/docs/develop/how-to-start.md b/docs/develop/how-to-start.md deleted file mode 100644 index e69de29bb..000000000 diff --git a/docs/develop/maven.md b/docs/develop/maven.md deleted file mode 100644 index c030b301f..000000000 --- a/docs/develop/maven.md +++ /dev/null @@ -1,51 +0,0 @@ -# Use Iguana as a Maven dependency - -Iguana provides 3 packages - -**iguana.commons** which consists of some helper classes. - -**iguana.resultprocessor** which consists of metrics and the result storage workflow - -and **iguana.corecontroller** which contains the tasks, the workers, the query handlers, and the overall Iguana workflow - -to use one of these packages in your maven project add the following repository to your pom: - -```xml - - iguana-github - Iguana Dice Group repository - https://maven.pkg.github.com/dice-group/Iguana - -``` - -Afterwards add the package you want to add using the following, - -for the core controller, which will also include the result processor as well as the commons. - -```xml - - org.aksw - iguana.corecontroller - ${iguana-version} - -``` - -for the result processor which will also include the commons. - -```xml - - org.aksw - iguana.resultprocessor - ${iguana-version} - -``` - -or for the commons. - -```xml - - org.aksw - iguana.commons - ${iguana-version} - -``` diff --git a/docs/develop/overview.md b/docs/develop/overview.md deleted file mode 100644 index 3dcb93fd6..000000000 --- a/docs/develop/overview.md +++ /dev/null @@ -1,26 +0,0 @@ -# Development Overview - -Iguana is open source and available at Github [here](https://github.com/dice-group/Iguana). -There are two main options to work on Iguana. - -* Fork the git repository and work directly on Iguana -* or use the [Iguana Maven Packages](https://github.com/orgs/dice-group/packages?repo_name=IGUANA) as a library - -Iguana is a benchmark framework which can be extended to fit your needs. - -## Extend - -There are several things you can extend in Iguana. - -* Tasks - Add your benchmark task -* Workers - Your system won't work with HTTP GET or POST, or work completely different? Add your specific worker. -* Query Handling - You do not use Plain Text queries or SPARQL? Add your query handler. -* Language - Want more statistics about your specific queries? The result size isn't accurate? add your language support -* Result Storage - Don't want to use RDF? Add your own solution to store the benchmark results. -* Metrics - The metrics won't fit your needs? Add your own. - -## Bugs - -For bugs please open an issue at our [Github Issue Tracker](https://github.com/dice-group/Iguana/issues) - - diff --git a/docs/download.md b/docs/download.md deleted file mode 100644 index 08d8a3390..000000000 --- a/docs/download.md +++ /dev/null @@ -1,23 +0,0 @@ -# Download - -## Prerequisites - -You need to have Java 11 or higher installed. - - -In Ubuntu you can do this by -```bash -sudo apt-get install java -``` - -## Download - -Please download the latest release at [https://github.com/dice-group/IGUANA/releases/latest](https://github.com/dice-group/IGUANA/releases/latest) - -The zip file contains 3 files. - -* iguana-{{ release_version }}.jar -* example-suite.yml -* start-iguana.sh - -The example-suite.yml is a valid benchmark configuration which you can adjust to your needs using the [Configuration](../usage/configuration) wiki. diff --git a/docs/index.md b/docs/index.md deleted file mode 100644 index 06543a855..000000000 --- a/docs/index.md +++ /dev/null @@ -1,25 +0,0 @@ -## Welcome to the Iguana documentation! - -IGUANA Logo - -This documentation will help you benchmark your HTTP endpoints (such as your Triple store) using Iguana and help you extend Iguana to your needs. -It is split into three parts - -* General -* Quick Start Guide -* Usage -* Development - -In **General** you will find a bit of information of what Iguana is and what it's capable of. - -In the **Quick Start Guide** you will find how to download and start Iguana as well how to quickly configure your first simple benchmark using Iguana. - -In **Usage** you will find everything on how to execute a benchmark with Iguana and how to configure the benchmark to your needs. -It further provides details on what tests Iguana is capable of. -A Tutorial will finally guide you through all steps broadly which you can use as a quick start. - -In **Development** you will find everything you need to know in case that Iguana isn't sufficient for your needs. It shows how to extend Iguana to use your metrics or your specific benchmark test - - - -Have exciting Evaluations! diff --git a/docs/quick-config.md b/docs/quick-config.md deleted file mode 100644 index b498b74df..000000000 --- a/docs/quick-config.md +++ /dev/null @@ -1,60 +0,0 @@ -# Quickly Configure Iguana - -Here we will setup a quick configuration which will benchmark one triple store (e.g. apache jena fuseki) using one simulated user. -We assume that your triple store (or whatever HTTP GET endpoint you want to use) is running and loaded with data. -For now we assume that the endpoint is at `http://localhost:3030/ds/sparql` and uses GET with the parameter `query` - -Further on the benchmark should take 10 minutes (or 60.000 ms) and uses plain text queries located in `queries.txt`. - -If you do not have created some queries yet, use these for example - -```sparql -SELECT * {?s ?p ?o} -SELECT * {?s ?p ?o} LIMIT 10 -SELECT * {?s ?o} -``` - -and save them to `queries.txt`. - - -Your results will be written as an N-Triple file to `first-benchmark-results.nt` - -The following configuration works with these demands. - -```yaml -# you can ignore this for now -datasets: - - name: "Dataset" - -#Your connection -connections: - - name: "Fuseki" - # Change this to your actual endpoint you want to use - endpoint: "http://localhost:3030/ds/sparql" - -# The benchmark task -tasks: - - className: "Stresstest" - configuration: - # 10 minutes (time Limit is in ms) - timeLimit: 600000 - # we are using plain text queries - queryHandler: - className: "InstancesQueryHandler" - - # create one SPARQL Worker (it's basically a HTTP get worker using the 'query' parameter - # it uses the queries.txt file as benchmark queries - workers: - - threads: 1 - className: "SPARQLWorker" - queriesFile: "queries.txt" - -# tell Iguana where to save your results to -storages: - - className: "NTFileStorage" - configuration: - fileName: "first-benchmark-results.nt" -``` - - -For more information on the confguration have a look at [Configuration](../usage/configuration/) diff --git a/docs/run-iguana.md b/docs/run-iguana.md deleted file mode 100644 index 809f5f533..000000000 --- a/docs/run-iguana.md +++ /dev/null @@ -1,26 +0,0 @@ -# Start a Benchmark - -Start Iguana with a benchmark suite (e.g the example-suite.yml) either using the start script - -```bash -./start-iguana.sh example-suite.yml -``` - -To set JVM options, you can use `$IGUANA_JVM` - -For example to let Iguana use 4GB of RAM you can set the `IGUANA_JVM` as follows -```bash -export IGUANA_JVM=-Xmx4g -``` - -and start as above. - - - -or using the jar with java 11 as follows - - -```bash -java -jar iguana-corecontroller-{{ release_version }}.jar example-suite.yml -``` - diff --git a/docs/shorthand-mapping.md b/docs/shorthand-mapping.md deleted file mode 100644 index e54e3132b..000000000 --- a/docs/shorthand-mapping.md +++ /dev/null @@ -1,32 +0,0 @@ -| Shorthand | Class Name | -|----------|-------| -| Stresstest | `org.aksw.iguana.cc.tasks.impl.Stresstest` | -|----------|-------| -| InstancesQueryHandler | `org.aksw.iguana.cc.query.impl.InstancesQueryHandler` | -| DelimInstancesQueryHandler | `org.aksw.iguana.cc.query.impl.DelimInstancesQueryHandler` | -| PatternQueryHandler | `org.aksw.iguana.cc.query.impl.PatternQueryHandler` | -|----------|-------| -| lang.RDF | `org.aksw.iguana.cc.lang.impl.RDFLanguageProcessor` | -| lang.SPARQL | `org.aksw.iguana.cc.lang.impl.SPARQLLanguageProcessor` | -| lang.SIMPLE | `org.aksw.iguana.cc.lang.impl.ThrowawayLanguageProcessor` | -|----------|-------| -| SPARQLWorker | `org.aksw.iguana.cc.worker.impl.SPARQLWorker` | -| UPDATEWorker | `org.aksw.iguana.cc.worker.impl.UPDATEWorker` | -| HttpPostWorker | `org.aksw.iguana.cc.worker.impl.HttpPostWorker` | -| HttpGetWorker | `org.aksw.iguana.cc.worker.impl.HttpGetWorker` | -| CLIWorker | `org.aksw.iguana.cc.worker.impl.CLIWorker` | -| CLIInputWorker | `org.aksw.iguana.cc.worker.impl.CLIInputWorker` | -| CLIInputFileWorker | `org.aksw.iguana.cc.worker.impl.CLIInputFileWorker` | -| CLIInputPrefixWorker | `org.aksw.iguana.cc.worker.impl.CLIInputPrefixWorker` | -| MultipleCLIInputWorker | `org.aksw.iguana.cc.worker.impl.MultipleCLIInputWorker` | -|----------|-------| -| NTFileStorage | `org.aksw.iguana.rp.storages.impl.NTFileStorage` | -| RDFFileStorage | `org.aksw.iguana.rp.storages.impl.RDFFileStorage` | -| TriplestoreStorage | `org.aksw.iguana.rp.storages.impl.TriplestoreStorage` | -|----------|-------| -| QPS | `org.aksw.iguana.rp.metrics.impl.QPSMetric` | -| AvgQPS | `org.aksw.iguana.rp.metrics.impl.AvgQPSMetric` | -| NoQ | `org.aksw.iguana.rp.metrics.impl.NoQMetric` | -| NoQPH | `org.aksw.iguana.rp.metrics.impl.NoQPHMetric` | -| QMPH | `org.aksw.iguana.rp.metrics.impl.QMPHMetric` | -| EachQuery | `org.aksw.iguana.rp.metrics.impl.EQEMetric` | diff --git a/docs/usage/configuration.md b/docs/usage/configuration.md deleted file mode 100644 index 070cb20f2..000000000 --- a/docs/usage/configuration.md +++ /dev/null @@ -1,327 +0,0 @@ -# Configuration - -The Configuration explains Iguana how to execute your benchmark. It is divided into 5 categories - -* Connections -* Datasets -* Tasks -* Storages -* Metrics - -Additionally a pre and post task script hook can be set. - -The configuration has to be either in YAML or JSON. Each section will be detailed out and shows configuration examples. At the end the full configuration will be shown. -For this we will stick to the YAML format, however the equivalent JSON is also valid and can be parsed by Iguana. - -### Connections - -Every benchmark suite can execute several connections (e.g. an HTTP endpoint, or a CLI application). -A connection has the following items - -* name - the name you want to give the connection, which will be saved in the results. -* endpoint - the HTTP endpoint or CLI call. -* updateEndpoint - If your HTTP endpoint is an HTTP Post endpoint set this to the post endpoint. (optional) -* user - for authentication purposes (optional) -* password - for authentication purposes (optional) -* version - setting the version of the tested triplestore, if set resource URI will be ires:name-version (optional) - -To setup an endpoint as well as an updateEndpoint might be confusing at first, but if you to test read and write performance simultanously and how updates might have an impact on read performance, you can set up both. - -For more detail on how to setup the CLI call look at [Implemented Workers](../workers). There are all CLI Workers explained and how to set the endpoint such that the application will be run correctly. - -Let's look at an example: - -```yaml -connections: - - name: "System1" - endpoint: "http://localhost:8800/query" - version: 1.0-SNAP - - name: "System2" - endpoint: "http://localhost:8802/query" - updateEndpoint: "http://localhost:8802/update" - user: "testuser" - password: "secret" -``` - -Here we have two connections: System1 and System2. System1 is only setup to use an HTTP Get endpoint at http://localhost:8800/query. System2 however uses authentication and has an update endpoint as well, and thus will be correctly test with updates (POSTs) too. - -### Datasets - -Pretty straight forward. You might want to test your system with different datasets (e.g. databases, triplestores etc.) -If you system does not work on different datasets, just add one datasetname like - -```yaml -datasets: - - name: "DoesNotMatter" -``` - -otherwise you might want to benchmark different datasets. Hence you can setup a Dataset Name, as well as file. -The dataset name will be added to the results, whereas both can be used in the task script hooks, to automatize dataset load into your system. - -Let's look at an example: - -```yaml -datasets: - - name: "DatasetName" - file: "your-data-base.nt" - - name: "Dataset2" -``` - -### Tasks - -A Task is one benchmark Task which will be executed against all connections for all datasets. -A Task might be a stresstest which we will be using in this example. Have a look at the full configuration of the [Stresstest](../stresstest#Configuration) - -The configuration of one Task consists of the following: - -* className - The className or [Shorthand](#Shorthand) -* configuration - The parameters of the task - -```yaml -tasks: - - className: "YourTask" - configuration: - parameter1: value1 - parameter2: "value2" -``` - -Let's look at an example: - -```yaml -tasks: - - className: "Stresstest" - configuration: - #timeLimit is in ms - timeLimit: 3600000 - queryHandler: - className: "InstancesQueryHandler" - workers: - - threads: 2 - className: "SPARQLWorker" - queriesFile: "queries.txt" - timeOut: 180000 - - className: "Stresstest" - configuration: - noOfQueryMixes: 1 - queryHandler: - className: "InstancesQueryHandler" - workers: - - threads: 2 - className: "SPARQLWorker" - queriesFile: "queries.txt" - timeOut: 180000 -``` - -We configured two Tasks, both Stresstests. The first one will be executed for one hour and uses simple text queries which can be executed right away. -Further on it uses 2 simulated SPARQLWorkers with the same configuration. -At this point it's recommend to check out the [Stresstest Configuration](../stresstest#Configuration) in detail for further configuration. - - -### Storages - -Tells Iguana how to save your results. Currently Iguana supports two solutions - -* NTFileStorage - will save your results into one NTriple File. -* RDFFileStorage - will save your results into an RDF File (default TURTLE). -* TriplestoreStorage - Will upload the results into a specified Triplestore - -This is optional. The default storage is `NTFileStorage`. - -**NTFileStorage** can be setup by just stating to use it like - -```yaml -storages: - - className: "NTFileStorage" -``` -However it can be configured to use a different result file name. The default is `results_{DD}-{MM}-{YYYY}_{HH}-{mm}.nt`. -See example below. - -```yaml -storages: - - className: "NTFileStorage" - #optional - configuration: - fileName: "results-of-my-benchmark.nt" -``` - -The **RDFFileStorage** is similar to the NTFileStorage but will determine the RDF format from the file extension -To use RDF/XML f.e. you would end the file on .rdf, for TURTLE end it on .ttl - -```yaml -storages: - - className: "NTFileStorage" - #optional - configuration: - fileName: "results-of-my-benchmark.rdf" -``` - - - -The **TriplestoreStorage** can be configured as follows: - -```yaml -storages: - - className: TriplestoreStorage - configuration: - endpoint: "http://localhost:9999/sparql" - updateEndpoint: "http://localhost:9999/update" -``` - -if you triple store uses authentication you can set that up as follows: - -```yaml -storages: - - className: TriplestoreStorage - configuration: - endpoint: "http://localhost:9999/sparql" - updateEndpoint: "http://localhost:9999/update" - user: "UserName" - password: "secret" -``` - - -For further detail on how to read the results have a look [here](../results) - - - -### Metrics - -Let's Iguana know what Metrics you want to include in the results. - -Iguana supports the following metrics: - -* Queries Per Second (QPS) -* Average Queries Per Second (AvgQPS) -* Query Mixes Per Hour (QMPH) -* Number of Queries successfully executed (NoQ) -* Number of Queries per Hour (NoQPH) -* Each query execution (EachQuery) - experimental - -For more detail on each of the metrics have a look at [Metrics](../metrics) - -Let's look at an example: - -```yaml -metrics: - - className: "QPS" - - className: "AvgQPS" - - className: "QMPH" - - className: "NoQ" - - className: "NoQPH" -``` - -In this case we use all the default metrics which would be included if you do not specify `metrics` in the configuration at all. -However you can also just use a subset of these like the following: - -```yaml -metrics: - - className: "NoQ" - - className: "AvgQPS" -``` - -For more detail on how the results will include these metrics have a look at [Results](../results). - -### Task script hooks - -To automatize the whole benchmark workflow, you can setup a script which will be executed before each task, as well as a script which will be executed after each task. - -To make it easier, the script can get the following values - -* dataset.name - The current dataset name -* dataset.file - The current dataset file name if there is anyone -* connection - The current connection name -* connection.version - The current connection version, if no version is set -> {{ '{{connection.version}}' }} -* taskID - The current taskID - - -You can set each one of them as an argument using brackets like `{{ '{{connection}}' }}`. -Thus you can setup scripts which will start your system and load it with the correct dataset file beforehand and stop the system after every task. - -However these script hooks are completely optional. - -Let's look at an example: - -```yaml -preScriptHook: "/full/path/{{ '{{connection}}' }}-{{ '{{connection.version}}' }}/load-and-start.sh {{ '{{dataset.file}}' }}" -postScriptHook: "/full/path/{{ '{{connection}}' }}/stop.sh" - -``` - -### Full Example - -```yaml -connections: - - name: "System1" - endpoint: "http://localhost:8800/query" - - name: "System2" - endpoint: "http://localhost:8802/query" - updateEndpoint: "http://localhost:8802/update" - user: "testuser" - password: "secret" - -datasets: - - name: "DatasetName" - file: "your-data-base.nt" - - name: "Dataset2" - -tasks: - - className: "Stresstest" - configuration: - #timeLimit is in ms - timeLimit: 3600000 - queryHandler: - className: "InstancesQueryHandler" - workers: - - threads: 2 - className: "SPARQLWorker" - queriesFile: "queries.txt" - timeOut: 180000 - - className: "Stresstest" - configuration: - noOfQueryMixes: 1 - queryHandler: - className: "InstancesQueryHandler" - workers: - - threads: 2 - className: "SPARQLWorker" - queriesFile: "queries.txt" - timeOut: 180000 - -preScriptHook: "/full/path/{{ '{{connection}}' }}/load-and-start.sh {{ '{{dataset.file}}' }}" -postScriptHook: "/full/path/{{ '{{connection}}' }}/stop.sh" - - -metrics: - - className: "QMPH" - - className: "QPS" - - className: "NoQPH" - - className: "NoQ" - - className: "AvgQPS" - -storages: - - className: "NTFileStorage" - #optional - - configuration: - fileName: "results-of-my-benchmark.nt" -``` - - -### Shorthand - -A shorthand is a short name for a class in Iguana which can be used in the configuration instead of the complete class name: -e.g. instead of - -```yaml -storages: - - className: "org.aksw.iguana.rp.storage.impl.NTFileStorage" -``` - -you can use the shortname NTFileStorage: - -```yaml -storages: - - className: "NTFileStorage" -``` - - -For a full map of the Shorthands have a look at [Shorthand-Mapping](../../shorthand-mapping) diff --git a/docs/usage/getting-started.md b/docs/usage/getting-started.md deleted file mode 100644 index 21ce6045e..000000000 --- a/docs/usage/getting-started.md +++ /dev/null @@ -1,49 +0,0 @@ -## What is Iguana - -Iguana is a HTTP and CLI read/write performance benchmark framework suite. -It can stresstest HTTP get and post endpoints as well as CLI applications using a bunch of simulated users which will bombard the endpoint using queries. -Queries can be anything. SPARQL, SQL, Text and anything else you can fit in one line. - -### What can be benchmarked - -Iguana is capable of benchmarking and stresstesting the following applications - -* HTTP GET and POST endpoint (e.g. Triple Stores, REST Services, Question Answering endpoints) -* CLI Applications which either - * exit after every query - * or awaiting input after each query - -### What Benchmarks are possible - -Every simulated User (named Worker in the following) gets a set of queries. -These queries have to be saved in one file, whereas each query is one line. -Hence everything you can fit in one line (e.g a SPARQL query, a text question, an RDF document) can be used as a query and a set of these queries represent the benchmark. -Iguana will then let every Worker execute these queries against the endpoint. - - -## Download - -Please download the latest release at [https://github.com/dice-group/IGUANA/releases/latest](https://github.com/dice-group/IGUANA/releases/latest) - -The zip file contains 3 files. - -* iguana-corecontroller-x.y.z.jar -* example-suite.yml -* start.sh - -The example-suite.yml is a valid benchmark configuration which you can adjust to your needs using the [Configuration](Configuration) wiki. - -## Start a Benchmark - -Start Iguana with a benchmark suite (e.g the example-suite.yml) either using the start script - -```bash -./start-iguana.sh example-suite.yml -``` - -or using java 11 if you want to give Iguana more RAM or in general set JVM options. - -```bash -java -jar iguana-corecontroller-3.3.2.jar example-suite.yml -``` - diff --git a/docs/usage/languages.md b/docs/usage/languages.md deleted file mode 100644 index 18af1e166..000000000 --- a/docs/usage/languages.md +++ /dev/null @@ -1,16 +0,0 @@ -# Supported Languages - -The Language tag is set to assure that the result size returned by the benchmarked system is correctly read and that result can give a little extra query statistics. - -Currently two languages are implemented, however you can use `lang.SPARQL` or simply ignore it all the way. -If they are not in `SPARQL` the query statistics will be just containing the query text and the result size will be read as if each returned line were one result. - -Additionaly a SIMPLE language tag is added which parses nothing and sets the result size as the content length of the results. - -If you work with results which have a content length >=2GB please use `lang.SIMPLE`, as `lang.SPARQL` and `lang.RDF` cannot work with results >=2GB at the moment. - -The 3 languages are: - -* `lang.SPARQL` -* `lang.RDF` -* `lang.SIMPLE` diff --git a/docs/usage/metrics.md b/docs/usage/metrics.md deleted file mode 100644 index ff5abc4af..000000000 --- a/docs/usage/metrics.md +++ /dev/null @@ -1,53 +0,0 @@ -# Implemented Metrics - -Every metric will be calculated globally (for one Experiment Task) and locally (for each Worker) -Hence you can just analyze the overall metrics or if you want to look closer, you can look at each worker. - -## NoQ - -The number of successfully executed Queries - -## QMPH - -The number of executed Query Mixes Per Hour - -## NoQPH - -The number of successfully executed Number of Queries Per Hour - -## QPS - -For each query the `queries per second`, the `total time` in ms (summed up time of each execution), the no of `succeeded` and `failed` executions and the `result size` will be saved. -Additionaly will try to tell how many times a query failed with what reason. (`timeout`, `wrong return code` e.g. 400, or `unknown`) - -Further on the QPS metrics provides a penalized QPS which penalizes queries which will fail. -As some systems who cannot resolve a query just returns an error code and thus can have a very high score, even though they could only handle a few queries it would be rather unfair to the compared systems. Thus we introduced the penalty QPS. It is calculated the same as the QPS score, but for each failed query it uses the penalty instead of the actual time the failed query took. - -The default is set to the timeOut of the task. -However you can override it as follows: - -```yaml -metrics: - - className: "QPS" - configuration: - #in MS - penality: 10000 -``` - -## AvgQPS - -The average of all queries per second. -Also adding a penalizedAvgQPS. Default penalty is timeOut, can be overwritten as follows: - -```yaml -metrics: - - className: "AvgQPS" - confiugration: - # in ms - penalty: 10000 -``` - -## EachQuery - -Will save every query execution. (Experimental) - diff --git a/docs/usage/queries.md b/docs/usage/queries.md deleted file mode 100644 index 74b8edfef..000000000 --- a/docs/usage/queries.md +++ /dev/null @@ -1,115 +0,0 @@ -# Supported Queries - -There are currently two query types supported: - -* plain text queries -* SPARQL pattern queries - -## Plain Text Queries - -This can be anything: SPARQL, SQL, a whole book if you need to. -The only limitation is that it has to fit in one line per query. If that isn't possible use the [Multiple Line Plain Text Queries](#multiple-line-plain-text-queries). -Every query can be executed as is. - -This can be set using the following: - -```yaml -... - queryHandler: - className: "InstancesQueryHandler" -``` - -## SPARQL Pattern Queries - -This only works for SPARQL Queries at the moment. -The idea came from the DBpedia SPARQL Benchmark paper from 2011 and 2012. - -Instead of SPARQL queries as they are, you can set variables, which will be exchanged with real data. -Hence Iguana can create thousands of queries using a SPARQL pattern query. - -A pattern query might look like the following: -```sparql -SELECT * {?s rdf:type %%var0%% ; %%var1%% %%var2%%. %%var2%% ?p ?o} -``` - -This query in itself cannot be send to a triple store, however we can exchange the variables using real data. -Thus we need a reference endpoint (ideally) containing the same data as the dataset which will be tested. - -This query will then be exchanged to -```sparql -SELECT ?var0 ?var1 ?var2 {?s rdf:type ?var0 ; ?var1 ?var2. ?var2 ?p ?o} LIMIT 2000 -``` - -and be queried against the reference endpoint. - -For each result (limited to 2000) a query instance will be created. - -This will be done for every query in the benchmark queries. -All instances of these query patterns will be subsummed as if they were one query in the results. - -This can be set using the following: - -```yaml -... - queryHandler: - className: "PatternQueryHandler" - endpoint: "http://your-reference-endpoint/sparql" -``` - -or - -```yaml -... - queryHandler: - className: "PatternQueryHandler" - endpoint: "http://your-reference-endpoint/sparql" - limit: 4000 -``` - -## Multiple Line Plain Text Queries - -Basically like Plain Text Queries. However allows queries which need more than one line. -You basically seperate queries using a delimiter line. - -Let's look at an example, where the delimiter line is simply an empty line (this is the default) - -``` -QUERY 1 { -still query 1 -} - -QUERY 2 { -still Query2 -} -``` - -however if you set the delim=`###` for example the file has to look like: - -``` -QUERY 1 { -still query 1 -} -### -QUERY 2 { -still Query2 -} -``` - -The delimiter query handler can be set as follows - - -```yaml -... - queryHandler: - className: "DelimInstancesQueryHandler" -``` - -or if you want to set the delimiter line - -```yaml -... - queryHandler: - className: "DelimInstancesQueryHandler" - delim: "###" -``` - diff --git a/docs/usage/results.md b/docs/usage/results.md deleted file mode 100644 index 68faf0ae6..000000000 --- a/docs/usage/results.md +++ /dev/null @@ -1,151 +0,0 @@ -# Experiment Results - -IGUANA result schema - -## Fundamentals - -The results are saved into RDF. -For those who don't know what RDF is, it is best described as a way to represent a directed graph. -The according query language is called SPARQL. -The graph schema of an iguana result is shown above, where as each node represents a class object containg several annotations. - -To retrieve all TaskIDs you can do the following: - -```sparql -PREFIX rdf: -PREFIX iprop: -PREFIX iont: -PREFIX ires: - -SELECT ?taskID { - ?suiteID rdf:type iont:Suite . - ?suiteID iprop:experiment ?expID . - ?expID iprop:task ?taskID . -} -``` - -Let's look at an example to clarify how to request the global NoQ metric for a taskID you already know. -Let's assume the taskID is `123/1/1` - - -```sparql -PREFIX rdf: -PREFIX iprop: -PREFIX iont: -PREFIX ires: - -SELECT ?noq { - ires:123/1/1 iprop:NoQ ?noq -} -``` - -If you want to get all the local worker NoQ metrics do the following: - -```sparql -PREFIX rdf: -PREFIX iprop: -PREFIX iont: -PREFIX ires: - -SELECT ?workerID ?noq { - ires:123/1/1 iprop:workerResult ?workerID - ?workerID iprop:NoQ ?noq -} -``` - -However if you just want to see the global NoQ metric for all taskIDs in your results do the following: - -```sparql -PREFIX rdf: -PREFIX iprop: -PREFIX iont: -PREFIX ires: - -SELECT ?taskID ?noq { - ?suiteID rdf:type iont:Suite . - ?suiteID iprop:experiment ?expID . - ?expID iprop:task ?taskID . - ?taskID iprop:NoQ ?noq. -} -``` - -Instead of the NoQ metric you can do this for all other metrics, except `QPS`. - -To retrieve `QPS` look above in the results schema and let's look at an example. Let's assume the taskID is `123/1/1` again. -You can retrieve the global qps values (seen above in ExecutedQueries, e.g `QPS`, `succeeded` etc.) as follows, - -```sparql -PREFIX rdf: -PREFIX iprop: -PREFIX iont: -PREFIX ires: - -SELECT ?executedQuery ?qps ?failed ?resultSize { - ires:123/1/1 iprop:query ?executedQuery . - ?executedQuery iprop:QPS ?qps. - ?executedQuery iprop:failed ?failed . - ?executedQuery iprop:resultSize ?resultSize . -} -``` -This will get you the QPS value, the no. of failed queries and the result size of the query. - - -Further on you can show the dataset and connection names. - -``` -PREFIX rdf: -PREFIX iprop: -PREFIX iont: -PREFIX ires: -PREFIX rdfs: - -SELECT ?taskID ?datasetLabel ?connectionLabel ?noq { - ?suiteID rdf:type iont:Suite . - ?suiteID iprop:experiment ?expID . - ?expID iprop:dataset ?dataset . - ?dataset rdfs:label ?datasetLabel - ?expID iprop:task ?taskID . - ?taskID iprop:connection ?connection. - ?connection rdfs:label ?connectionLabel . - ?taskID iprop:NoQ ?noq. -} - -``` - -This query will show a table containing for each task, the taskID, the dataset name, the connection name and the no. of queries succesfully executed. - -## SPARQL Query statistics - -If you were using SPARQL queries as your benchmark queries you can add addtional further statistics of a query, such as: does the query has a FILTER. - -```sparql -PREFIX rdf: -PREFIX rdfs: -PREFIX iprop: -PREFIX iont: -PREFIX ires: - -SELECT ?executedQuery ?qps ?hasFilter ?queryText { - ires:123/1/1 iprop:query ?executedQuery . - ?executedQuery iprop:QPS ?qps. - ?executedQuery iprop:queryID ?query . - ?query iprop:filter ?hasFilter . - ?query rdfs:label ?queryText . -} -``` - -This provides the qps value, if the SPARQL query has a filter and the actual query string. - - -## Ontology - -The results ontology (description of what each property and class means) can be found [here](http://iguana-benchmark.eu/ontology/3.3.2/iguana.owl) - - - -## Adding LSQ analyzation - -If you're using SPARQL and want some more indepth analysation of the query statistics, you can use [LSQ](https://github.com/AKSW/LSQ) to do so. -Iguana will add an `owl:sameAs` link between the SPARQL queries used in your benchmark and the equivalent LSQ query links. - -Hence you can run the performance measurement using Iguana and the query analyzation using LSQ independently and combine both results afterwards diff --git a/docs/usage/stresstest.md b/docs/usage/stresstest.md deleted file mode 100644 index ea4c6302b..000000000 --- a/docs/usage/stresstest.md +++ /dev/null @@ -1,136 +0,0 @@ -# Stresstest - -Iguanas implemented Stresstest benchmark task tries to emulate a real case scenario under which an endpoint or application is under high stress. -As in real life endpoints might get multiple simultaneous request within seconds, it is very important to verify that you application can handle this. - -The stresstest emulates users or applications which will bombard the endpoint using a set of queries for a specific amount of time or a specific amount of queries executed. -Each simulated user is called Worker in the following. -As you might want to test read and write performance or just want to emulate different user behaviour, the stresstest allows to configure several workers. -Every worker configuration can additionaly be started several times, hence if you want one configuration executed multiple times, you can simply tell Iguana to run this worker configuration the specified amount of time. -However to assure that the endpoint can't just cache the repsonse of the first request of a query, every worker starts at a pre determined random query, meaning that the single worker will always start at that query to assure fairness in benchmark comparisons, while every worker will start at a different query. - -## Configuration - -To configure this task you have to first tell Iguana to use the implemented task like the following: - -```yaml -tasks: - - className: "Stresstest" -``` - -Further on you need to configure the Stresstest using the configuration parameter like: - -```yaml -tasks: - - className: "Stresstest" - configuration: - timeLimit: 600000 - ... -``` - -As an end restriction you can either use `timeLimit` which will stop the stresstest after the specified amount in ms or you can set `noOfQueryMixes` which stops every worker after they executed the amount of queries in the provided query set. - -Additionaly to either `timeLimit` or `noOfQueryMixes` you can set the following parameters - -* queryHandler -* workers -* warmup (optional) - -### Query Handling - -The queryHandler parameter let's the stresstest know what queries will be used. -Normally you will need the `InstancesQueryHandler` which will use plain text queries (could be SQL, SPARQL, a whole RDF document). The only restriction is that each query has to be in one line. - -You can set the query handler like the following: -```yaml -tasks: - - className: "Stresstest" - queryHandler: - className: "InstancesQueryHandler" - ... -``` - -To see which query handlers are supported see [Supported Queries](../queries/) - -### Workers (simulated Users) - -Further on you have to add which workers to use. -As described above you can set different worker configurations. -Let's look at an example: -```yaml - - className: "Stresstest" - timeLimit: 600000 - workers: - - threads: 4 - className: "SPARQLWorker" - queriesFile: "/path/to/your/queries.txt" - - threads: 16 - className: "SPARQLWorker" - queriesFile: "/other/queries.txt" - fixedLatency: 5000 -``` - -In this example we have two different worker configurations we want to use. The first want will create 4 `SPARQLWorker`s using queries at `/path/to/your/queries.txt` with any latencym thus every query will be executed immediatly after another. -The second worker configuration will execute 16 `SPARQLWorker`s using queries at `/other/queries.txt` using a fixed waiting time of `5000ms` between each query. -Hence every worker will execute their queries independently from each other but will wait 5s after each of their query execution before executing the next one. -This configuration may simulate that we have a few Users requesting your endpoint locally (e.g. some of your application relying on your database) and several users querying your endpoint from outside the network where we would have network latency and other interferences which we will try to simulate with 5s. - -A full list of supported workers and their parameters can be found at [Supported Workers](../workers) - -In this example our Stresstest would create 20 workers, which will simultaenously request the endpoint for 60000ms (10 minutes). - -### Warmup - -Additionaly to these you can optionally set a warmup, which will aim to let the system be benchmarked under a normal situation (Some times a database is faster when it was already running for a bit) -The configuration is similar to the stresstest itself you can set a `timeLimit` (however not a certain no of query executions), you can set different `workers`, and a `queryHandler` to use. -If you don't set the `queryHandler` parameter the warmup will simply use the `queryHandler` specified in the Stresstest itself. - -You can set the Warmup as following: -```yaml -tasks: - - className: "Stresstest" - warmup: - timeLimit: 600000 - workers: - ... - queryHandler: - ... -``` - -That's it. -A full example might look like this - -```yaml -tasks: - - className: "Stresstest" - configuration: - # 1 hour (time Limit is in ms) - timeLimit: 3600000 - # warmup is optional - warmup: - # 10 minutes (is in ms) - timeLimit: 600000 - # queryHandler could be set too, same as in the stresstest configuration, otherwise the same queryHandler will be use. - # workers are set the same way as in the configuration part - workers: - - threads: 1 - className: "SPARQLWorker" - queriesFile: "queries_warmup.txt" - timeOut: 180000 - queryHandler: - className: "InstancesQueryHandler" - workers: - - threads: 16 - className: "SPARQLWorker" - queriesFile: "queries_easy.txt" - timeOut: 180000 - - threads: 4 - className: "SPARQLWorker" - queriesFile: "queries_complex.txt" - fixedLatency: 100 -``` - -## References - -* [Supported Queries](../queries/) -* [Supported Workers](../workers) diff --git a/docs/usage/tutorial.md b/docs/usage/tutorial.md deleted file mode 100644 index bd9fc0313..000000000 --- a/docs/usage/tutorial.md +++ /dev/null @@ -1,342 +0,0 @@ -# Tutorial - -In this tutorial we will go through one benchmark using two systems, two datasets and one Stresstest. - -We are using the following - -* Iguana v3.0.2 -* Apache Jena Fuseki 3 -* Blazegraph - -## Download - -First lets create a working directory - -```bash -mkdir myBenchmark -cd myBenchmark -``` - -Now let's download all required systems and Iguana. - -Starting with Iguana -```bash -wget https://github.com/dice-group/IGUANA/releases/download/v3.0.2/iguana-3.0.2.zip -unzip iguana-3.0.2.zip -``` - -Now we will download Blazegraph - -```bash -mkdir blazegraph && cd blazegraph -wget https://downloads.sourceforge.net/project/bigdata/bigdata/2.1.5/blazegraph.jar?r=https%3A%2F%2Fsourceforge.net%2Fprojects%2Fbigdata%2Ffiles%2Fbigdata%2F2.1.5%2Fblazegraph.jar%2Fdownload%3Fuse_mirror%3Dmaster%26r%3Dhttps%253A%252F%252Fwww.blazegraph.com%252Fdownload%252F%26use_mirror%3Dnetix&ts=1602007009 -cd ../ -``` - -At last we just need to download Apache Jena Fuseki and Apache Jena - -```bash -mkdir fuseki && cd fuseki -wget https://downloads.apache.org/jena/binaries/apache-jena-3.16.0.zip -unzip apache-jena-3.16.0.zip - -wget https://downloads.apache.org/jena/binaries/apache-jena-fuseki-3.16.0.zip -unzip apache-jena-fuseki-3.16.0.zip -``` - -Finally we have to download our datasets. -We use two small datasets from scholarly data. -The ISWC 2010 and the ekaw 2012 rich dataset. - -``` -mkdir datasets/ -cd datasets -wget http://www.scholarlydata.org/dumps/conferences/alignments/iswc-2010-complete-alignments.rdf -wget http://www.scholarlydata.org/dumps/conferences/alignments/ekaw-2012-complete-alignments.rdf -cd .. -``` - - -That's it. -Let's setup blazegraph and fuseki. - -## Setting Up Systems - -To simplify the benchmark workflow we will use the pre and post script hook, in which we will load the current system and after the benchmark stop the system. - -### Blazegraph - -First let's create the script files - -```bash -cd blazegraph -touch load-and-start.sh -touch stop.sh -``` - -The `load-and-start.sh` script will start blazegraph and use curl to POST our dataset. -In our case the datasets are pretty small, hence the loading time is minimal. -Otherwise it would be wise to load the dataset beforehand, backup the `blazegraph.jnl` file and simply exchanging the file in the pre script hook. - -For now put this into the script `load-and-start.sh` - -```bash -#starting blazegraph with 4 GB ram -cd ../blazegraph && java -Xmx4g -server -jar blazegraph.jar & - -#load the dataset file in, which will be set as the first script argument -curl -X POST H 'Content-Type:application/rdf+xml' --data-binary '@$1' http://localhost:9999/blazegraph/sparql -``` - -Now edit `stop.sh` and adding the following: - -``` -pkill -f blazegraph -``` - -Be aware that this kills all blazegraph instances, so make sure that no other process which includes the word blazegraph is running. - -finally get into the correct working directory again -```bash -cd .. -``` - -### Fuseki - -Now the same for fuseki: - -```bash -cd fuseki -touch load-and-start.sh -touch stop.sh -``` - -The `load-and-start.sh` script will load the dataset into a TDB directory and start fuseki using the directory. - -Edit the script `load-and-start.sh` as follows - -```bash -cd ../fuseki -# load the dataset as a tdb directory -apache-jena-3.16.0/bin/tdbloader2 --loc DB $1 - -# start fuseki -apache-jena-fuseki-3.16.0/fuseki-server --loc DB /ds & - -``` - -To assure fairness and provide Fuseki with 4GB as well edit `apache-jena-fuseki-3.16.0/fuseki-server` and go to the last bit exchange the following - -``` -JVM_ARGS=${JVM_ARGS:--Xmx1200M} -``` - -to - -``` -JVM_ARGS=${JVM_ARGS:--Xmx4G} -``` - -Now edit `stop.sh` and adding the following: - -``` -pkill -f fuseki -``` - -Be aware that this kills all Fuseki instances, so make sure that no other process which includes the word fuseki is running. - -finally get into the correct working directory again -```bash -cd .. -``` - -## Benchmark queries - -We need some queries to benchmark. - -For now we will just use 3 simple queryies -``` -SELECT * {?s ?p ?o} -SELECT * {?s ?p ?o} LIMIT 10 -SELECT * {?s ?o} -``` - -save this to `queries.txt` - - - -## Creating the Benchmark Configuration - -Now let's create the Iguana benchmark configuration. -Create a file called `benchmark-suite.yml` - -```bash -touch benchmark-suite.yml -``` - -Add the following subscections to this file, or simply go to [#Full Configuration](full-configuration) and add the whole piece to it. - -Be aware that the configuration will be started on directory level below our working directory and thus paths will use `../` to get the correct path. - -### Datasets - -We have two datasets, the ekaw 2012 and the iswc 2010 datasets. -Let's name them as such and set the file path, s.t. the script hooks can use the file paths. - -```yaml -datasets: - - name: "ekaw-2012" - file: "../datasets/ekaw-2012-complete-alignments.rdf" - - name: "iswc-2010" - file: "../datasets/iswc-2010-complete-alignments.rdf" -``` - -### Connections - -We have two connections, blazegraph and fuseki with their respective endpoint at them as following: - -```yaml -connections: - - name: "blazegraph" - endpoint: "http://localhost:9999/blazegraph/sparql" - - name: "fuseki" - endpoint: "http://localhost:3030/ds/sparql" -``` - -### Task script hooks - -To assure that the correct triple store will be loaded with the correct dataset add the following pre script hook `../{{ '{{connection}}' }}/load-and-start.sh {{ '{{dataset.file}}' }}` -`{{ '{{connection}}' }}` will be set to the current benchmarked connection name (e.g. `fuseki`) and the `{{ '{{dataset.file}}' }}` will be set to the current dataset file path. - -For example the start script of fuseki is located at `fuseki/load-and-start.sh`. - -Further on add the `stop.sh` script as the post-script hook, assuring that the store will be stopped after each task - -This will look like this: - -```yaml -pre-script-hook: "../{{ '{{connection}}' }}/load-and-start.sh {{ '{{dataset.file}}' }}" -post-script-hook: "../{{ '{{connection}}' }}/stop.sh -``` - -### Task configuration - -We want to stresstest our stores using 10 minutes (60.000 ms)for each dataset connection pair. -We are using plain text queries (`InstancesQueryHandler`) and want to have two simulated users querying SPARQL queries. -The queries file is located at our working directory at `queries.txt`. Be aware that we start Iguana one level below, which makes the correct path `../queries.txt` - -To achieve this restrictions add the following to your file - -```yaml -tasks: - - className: "Stresstest" - configuration: - timeLimit: 600000 - queryHandler: - className: "InstancesQueryHandler" - workers: - - threads: 2 - className: "SPARQLWorker" - queriesFile: "../queries.txt" -``` - -### Result Storage - -Let's put the results as an NTriple file and for smootheness of this tutorial let's put it into the file `my-first-iguana-results.nt` - -Add the following to do this. - -```yaml -storages: - - className: "NTFileStorage" - configuration: - fileName: "my-first-iguana-results.nt" -``` - -### Full configuration - -```yaml -datasets: - - name: "ekaw-2012" - file: "../datasets/ekaw-2012-complete-alignments.rdf" - - name: "iswc-2010" - file: "../datasets/iswc-2010-complete-alignments.rdf" - -connections: - - name: "blazegraph" - endpoint: "http://localhost:9999/blazegraph/sparql" - - name: "fuseki" - endpoint: "http://localhost:3030/ds/sparql" - -pre-script-hook: "../{{ '{{connection}}' }}/load-and-start.sh {{ '{{dataset.file}}' }}" -post-script-hook: "../{{ '{{connection}}' }}/stop.sh - -tasks: - - className: "Stresstest" - configuration: - timeLimit: 600000 - queryHandler: - className: "InstancesQueryHandler" - workers: - - threads: 2 - className: "SPARQLWorker" - queriesFile: "../queries.txt" - -storages: - - className: "NTFileStorage" - configuration: - fileName: "my-first-iguana-results.nt" -``` - -## Starting Benchmark - -Simply use the previous created `benchmark-suite.yml` and start with - -```bash -cd iguana/ -./start-iguana.sh ../benchmark-suite.yml -``` - -Now we wait for 40 minutes until the benchmark is finished. - -## Results - -As previously shown, our results will be shown in `my-first-iguana-results.nt`. - -Load this into a triple store of your choice and query for the results you want to use. - -Just use blazegraph for example: - -```bash -cd blazegraph -../load-and-start.sh ../my-first-iguana-results.nt -``` - -To query the results go to `http://localhost:9999/blazegraph/`. - -An example: - -``` -PREFIX rdf: -PREFIX iprop: -PREFIX iont: -PREFIX ires: -PREFIX rdfs: - -SELECT ?taskID ?datasetLabel ?connectionLabel ?noq { - ?suiteID rdf:type iont:Suite . - ?suiteID iprop:experiment ?expID . - ?expID iprop:dataset ?dataset . - ?dataset rdfs:label ?datasetLabel - ?expID iprop:task ?taskID . - ?taskID iprop:connection ?connection. - ?connection rdfs:label ?connectionLabel . - ?taskID iprop:NoQ ?noq. -} - -``` - -This will provide a list of all task, naming the dataset, the connection and the no. of queries which were succesfully executed - -We will however not go into detail on how to read the results. -This can be read at [Benchmark Results](../results/) diff --git a/docs/usage/workers.md b/docs/usage/workers.md deleted file mode 100644 index a54794906..000000000 --- a/docs/usage/workers.md +++ /dev/null @@ -1,333 +0,0 @@ -# Supported Workers - -A Worker is basically just a thread querying the endpoint/application. It tries to emulate a single user/application requesting your system until it should stop. -In a task (e.g. the [stresstest](../stresstest/)) you can configure several worker configurations which will then be used inside the task. - -Every worker configuration can additionaly be started several times, hence if you want one configuration executed multiple times, you can simply tell Iguana to run this worker configuration the specified amount of time. -However to assure that the endpoint can't just cache the repsonse of the first request of a query, every worker starts at a pre determined random query, meaning that the single worker will always start at that query to assure fairness in benchmark comparisons, while every worker will start at a different query. - -There a few workers implemented, which can be seperated into two main categories - -* Http Workers -* CLI Workers - -## Http Workers - -These Workers can be used to benchmark Http Applications (such as a SPARQL endpoint). - -### Http Get Worker - -A Http worker using GET requests. -This worker will use the `endpoint` of the connection. - -This worker has several configurations listed in the following table: - -| parameter | optional | default | description | -| ----- | --- | ----- | --------- | -| queriesFile | no | | File containg the queries this worker should use. | -| parameterName | yes | query | the GET paremter to set the query as value to. (see also [Supported Queries](../queries) ) | -| responseType | yes | | The content type the endpoint should return. Setting the `Accept: ` header | -| language | yes | lang.SPARQL (plain text) | The language the queries and response are in (e.g. SPARQL). Basically just creates some more statistics (see [Supported Langauges](languages) ) | -| timeOut | yes | 180000 (3 minutes) | The timeout in MS after a query should be aborted | -| fixedLatency | yes | 0 | If the value (in MS) should be waited between each query. Simulating network latency or user behaviour. | -| gaussianLatency | yes | 0 | A random value between `[0, 2*value]` (in MS) will be waited between each query. Simulating network latency or user behaviour. | - -Let's look at an example: - -```yaml - ... - workers: - - threads: 1 - className: "HttpGetWorker" - timeOut: 180000 - parameterName: "text" -``` - -This will use one HttpGetWOrker using a timeout of 3 minutes and the get parameter text to request the query through. - -### Http Post Worker - -A Http worker using POST requests. -This worker will use the `updateEndpoint` of the connection. - -This worker has several configurations listed in the following table: - -| parameter | optional | default | description | -| ----- | --- | ----- | --------- | -| queriesFile | no | | File containg the queries this worker should use. | -| parameterName | yes | query | the GET paremter to set the query as value to. (see also [Supported Queries](../queries) ) | -| contentType | yes | `text/plain` | The content type of the update queries. Setting the `Content-Type: ` header | -| responseType | yes | | The content type the endpoint should return. Setting the `Accept: ` header | -| language | yes | lang.SPARQL (plain text) | The language the queries and response are in (e.g. SPARQL). Basically just creates some more statistics (see [Supported Langauges](languages) ) | -| timeOut | yes | 180000 (3 minutes) | The timeout in MS after a query should be aborted | -| fixedLatency | yes | 0 | If the value (in MS) should be waited between each query. Simulating network latency or user behaviour. | -| gaussianLatency | yes | 0 | A random value between `[0, 2*value]` (in MS) will be waited between each query. Simulating network latency or user behaviour. | - -Let's look at an example: - -```yaml - ... - workers: - - threads: 1 - className: "HttpPostWorker" - timeOut: 180000 -``` - -This will use one HttpGetWOrker using a timeout of 3 minutes. - -### SPARQL Worker - -Simply a GET worker but the language parameter is set to `lang.SPARQL`. -Otherwise see the [Http Get Worker](#http-get-worker) for configuration - -An Example: -```yaml - ... - workers: - - threads: 1 - className: "SPARQLWorker" - timeOut: 180000 -``` - - -### SPARQL UPDATE Worker - -Simply a POST worker but specified for SPARQL Updates. - -Parameters are : - -| parameter | optional | default | description | -| ----- | --- | ----- | --------- | -| queriesFile | no | | File containg the queries this worker should use. | -| timerStrategy | yes | `NONE` | `NONE`, `FIXED` or `DISTRIBUTED`. see below for explanation. | -| timeOut | yes | 180000 (3 minutes) | The timeout in MS after a query should be aborted | -| fixedLatency | yes | 0 | If the value (in MS) should be waited between each query. Simulating network latency or user behaviour. | -| gaussianLatency | yes | 0 | A random value between `[0, 2*value]` (in MS) will be waited between each query. Simulating network latency or user behaviour. | - - -The **timerStrategy** parameter let's the worker know how to distribute the updates. -The fixedLatency and gaussianLatency parameters are not affected, the worker will wait those additionally. - -* NONE: the worker just updates each update query after another -* FIXED: calculating the distribution by `timeLimit / #updates` at the start and waiting the amount between each update. Time Limit will be used of the task the worker is executed in. -* DISTRIBUTED: calculating the time to wait between two updates after each update by `timeRemaining / #updatesRemaining`. - -An Example: -```yaml - ... - workers: - - threads: 1 - className: "UPDATEWorker" - timeOut: 180000 - timerStrategy: "FIXED" -``` - - -## CLI Workers - -These workers can be used to benchmark a CLI application. - -### CLI Worker - -This Worker should be used if the CLI application runs a query once and exits afterwards. -Something like -```bash -$ cli-script.sh query -HEADER -QUERY RESULT 1 -QUERY RESULT 2 -... -$ -``` - -Parameters are : - -| parameter | optional | default | description | -| ----- | --- | ----- | --------- | -| queriesFile | no | | File containg the queries this worker should use. | -| timeOut | yes | 180000 (3 minutes) | The timeout in MS after a query should be aborted | -| fixedLatency | yes | 0 | If the value (in MS) should be waited between each query. Simulating network latency or user behaviour. | -| gaussianLatency | yes | 0 | A random value between `[0, 2*value]` (in MS) will be waited between each query. Simulating network latency or user behaviour. | - - -An Example: -```yaml - ... - workers: - - threads: 1 - className: "CLIWorker" -``` - - -### CLI Input Worker - -This Worker should be used if the CLI application runs and the query will be send using the Input. - -Something like -```bash -$ cli-script.sh start -Your Input: QUERY -HEADER -QUERY RESULT 1 -QUERY RESULT 2 -... - -Your Input: -``` - -Parameters are : - -| parameter | optional | default | description | -| ----- | --- | ----- | --------- | -| queriesFile | no | | File containg the queries this worker should use. | -| initFinished | no | | String which occurs when the application is ready to be requested (e.g. after loading) | -| queryFinished | no | | String which occurs if the query response finished | -| queryError | no | | String which occurs when an error during the query execution happend | -| timeOut | yes | 180000 (3 minutes) | The timeout in MS after a query should be aborted | -| fixedLatency | yes | 0 | If the value (in MS) should be waited between each query. Simulating network latency or user behaviour. | -| gaussianLatency | yes | 0 | A random value between `[0, 2*value]` (in MS) will be waited between each query. Simulating network latency or user behaviour. | - -An Example: -```yaml - ... - workers: - - threads: 1 - className: "CLIInputWorker" - initFinished: "loading finished" - queryFinished: "query execution took:" - queryError: "Error happend during request" -``` - -### Multiple CLI Input Worker - - -This Worker should be used if the CLI application runs and the query will be send using the Input and will quit on errors. - -Something like -```bash -$ cli-script.sh start -Your Input: QUERY -HEADER -QUERY RESULT 1 -QUERY RESULT 2 -... - -Your Input: ERROR -ERROR happend, exiting -$ -``` - -To assure a smooth benchmark, the CLI application will be run multiple times instead of once, and if the application quits, the next running process will be used, while in the background the old process will be restarted. -Thus as soon as an error happend, the benchmark can continue without a problem. - -Parameters are : - -| parameter | optional | default | description | -| ----- | --- | ----- | --------- | -| queriesFile | no | | File containg the queries this worker should use. | -| initFinished | no | | String which occurs when the application is ready to be requested (e.g. after loading) | -| queryFinished | no | | String which occurs if the query response finished | -| queryError | no | | String which occurs when an error during the query execution happend | -| numberOfProcesses | yes | 5 | The number of times the application should be started to assure a smooth benchmark. see above. | -| timeOut | yes | 180000 (3 minutes) | The timeout in MS after a query should be aborted | -| fixedLatency | yes | 0 | If the value (in MS) should be waited between each query. Simulating network latency or user behaviour. | -| gaussianLatency | yes | 0 | A random value between `[0, 2*value]` (in MS) will be waited between each query. Simulating network latency or user behaviour. | - -An Example: -```yaml - ... - workers: - - threads: 1 - className: "MultipleCLIInputWorker" - initFinished: "loading finished" - queryFinished: "query execution took:" - queryError: "Error happend during request" -``` - -### CLI Input File Worker - -Same as the [Multiple CLI Input Worker](#multiple-cli-input-worker). However the query won't be send to the input but written to a file and the file will be send to the input - -Something like -```bash -$ cli-script.sh start -Your Input: file-containg-the-query.txt -HEADER -QUERY RESULT 1 -QUERY RESULT 2 -... - -``` - -Parameters are : - -| parameter | optional | default | description | -| ----- | --- | ----- | --------- | -| queriesFile | no | | File containg the queries this worker should use. | -| initFinished | no | | String which occurs when the application is ready to be requested (e.g. after loading) | -| queryFinished | no | | String which occurs if the query response finished | -| queryError | no | | String which occurs when an error during the query execution happend | -| directory | no | | Directory in which the file including the query should be saved. | -| numberOfProcesses | yes | 5 | The number of times the application should be started to assure a smooth benchmark. see [Multiple CLI Input Worker](#multiple-cli-input-worker). | -| timeOut | yes | 180000 (3 minutes) | The timeout in MS after a query should be aborted | -| fixedLatency | yes | 0 | If the value (in MS) should be waited between each query. Simulating network latency or user behaviour. | -| gaussianLatency | yes | 0 | A random value between `[0, 2*value]` (in MS) will be waited between each query. Simulating network latency or user behaviour. | - - -An Example: -```yaml - ... - workers: - - threads: 1 - className: "CLIInputFileWorker" - initFinished: "loading finished" - queryFinished: "query execution took:" - queryError: "Error happend during request" - directory: "/tmp/" -``` - -### CLI Input Prefix Worker - -Same as the [Multiple CLI Input Worker](#multiple-cli-input-worker). However the CLI application might need a pre and suffix. - -Something like -```bash -$ cli-script.sh start -Your Input: PREFIX QUERY SUFFIX -HEADER -QUERY RESULT 1 -QUERY RESULT 2 -... - -``` - - -Parameters are : - -| parameter | optional | default | description | -| ----- | --- | ----- | --------- | -| queriesFile | no | | File containg the queries this worker should use. | -| initFinished | no | | String which occurs when the application is ready to be requested (e.g. after loading) | -| queryFinished | no | | String which occurs if the query response finished | -| queryError | no | | String which occurs when an error during the query execution happend | -| queryPrefix | no | | String to use as a PREFIX before the query. | -| querySuffix | no | | String to use as a SUFFIX after the query. | -| numberOfProcesses | yes | 5 | The number of times the application should be started to assure a smooth benchmark. see [Multiple CLI Input Worker](#multiple-cli-input-worker). | -| timeOut | yes | 180000 (3 minutes) | The timeout in MS after a query should be aborted | -| fixedLatency | yes | 0 | If the value (in MS) should be waited between each query. Simulating network latency or user behaviour. | -| gaussianLatency | yes | 0 | A random value between `[0, 2*value]` (in MS) will be waited between each query. Simulating network latency or user behaviour. | - - -An Example: -```yaml - ... - workers: - - threads: 1 - className: "CLIInputPrefixWorker" - initFinished: "loading finished" - queryFinished: "query execution took:" - queryError: "Error happend during request" - queryPrefix: "SPARQL" - querySuffix: ";" -``` -Will send the following as Input `SPARQL QUERY ;` diff --git a/docs/usage/workflow.md b/docs/usage/workflow.md deleted file mode 100644 index 6ec5d42fc..000000000 --- a/docs/usage/workflow.md +++ /dev/null @@ -1,15 +0,0 @@ -# Workflow - -Iguana will first parse configuration and afterwards will execute each task for each connection for each dataset. - -Imagine it like the following: - -* for each dataset D - * for each connection C - * for each task T - 1. execute pre script hook - 2. execute task T(D, C) - 3. collect and calculate results - 4. write results - 5. execute post script hook - diff --git a/example-suite.yml b/example-suite.yml index c4290113e..00c50eb5e 100644 --- a/example-suite.yml +++ b/example-suite.yml @@ -1,68 +1,108 @@ +# This file showcases the configuration of most IGUANA features. + +# Datasets are optional and have no functionality. datasets: - name: "DatasetName" - #optional, will just be set in the pre & post script hooks by using {{dataset.file}} file: "src/test/resources/dataset.txt" + +# Connections that will be used by the workers for the benchmark. connections: - name: "Virtuoso7" - user: "dba" - password: "dba" + authentication: + user: "dba" + password: "dba" endpoint: "http://localhost:8890/sparql" + dataset: "DatasetName" # optional - name: "Virtuoso6" - user: "dba" - password: "dba" + authentication: + user: "dba" + password: "dba" endpoint: "http://localhost:8891/sparql" - name: "Blazegraph" endpoint: "http://localhost:9999/blazegraph/sparql" - - name: "Fuseki" - user: "test" - endpoint: "http://127.0.0.1:3030/ds/sparql" + authentication: + user: "user" + password: "test" updateEndpoint: "http://localhost:3030/ds/update" + updateAuthentication: + user: "updateUser" + password: "password" +# The tasks that will be executed by the benchmark. They will be executed in the order they are defined. tasks: - - className: "org.aksw.iguana.cc.tasks.impl.Stresstest" - configuration: - # 1 hour (time Limit is in ms) - timeLimit: 360000 - # warmup is optional - warmup: - # 1 minutes (is in ms) - timeLimit: 600000 - # queryHandler could be set too, same as in the stresstest configuration, otherwise the same queryHandler will be use. - # workers are set the same way as in the configuration part - workers: - - threads: 1 - className: "SPARQLWorker" - queriesFile: "queries_warmup.txt" - timeOut: 180000 - queryHandler: - className: "InstancesQueryHandler" - workers: - - threads: 16 - className: "SPARQLWorker" - queriesFile: "queries_easy.txt" - timeOut: 180000 - - threads: 4 - className: "SPARQLWorker" - queriesFile: "queries_complex.txt" - fixedLatency: 100 - gaussianLatency: 50 - parameterName: "query" - responseType: "application/sparql-results+json" + - type: stresstest # Stresstests are used to test the performance of the system by sending a large number of requests. + warmupWorkers: # Warmup workers are used to warm up the system before the actual stresstest. + - type: SPARQLProtocolWorker + requestType: post update # Send POST requests with application/sparql-update content type to the endpoint. + number: 16 # Initialize 16 workers with the same configuration. + queries: + path: "./example/queries" + format: "folder" # Queries are stored in a folder. + order: "linear" + timeout: 0.02s + connection: Virtuoso7 + parseResults: false + completionTarget: + number: 50 # Execute each query 50 times. + workers: + - type: "SPARQLProtocolWorker" + number: 16 + queries: + path: "./example/queries.txt" + order: "random" + seed: 42 + timeout: 3m + connection: Virtuoso7 + parseResults: false + completionTarget: + duration: 1000s # Execute the queries for 1000 seconds. + requestType: post url-enc query # Send url-encoded POST request to endpoint. + - number: 4 + type: "SPARQLProtocolWorker" + connection: Virtuoso7 + requestType: post url-enc update + completionTarget: + duration: 1000s + queries: + path: "./example/queries.txt" + timeout: 100s + acceptHeader: "application/sparql-results+json" # Accept header for the request. + - type: stresstest + workers: + - type: "SPARQLProtocolWorker" + connection: Virtuoso7 + number: 16 + requestType: post query + queries: + path: "./example/queries.txt" + timeout: 180s + completionTarget: + duration: 1000s + parseResults: false + - number: 4 + requestType: get query # Send GET request with the query as the parameter to the endpoint. + connection: Virtuoso7 + completionTarget: + duration: 1000s + type: "SPARQLProtocolWorker" + queries: + path: "./example/queries.txt" + timeout: 100s + parseResults: true + acceptHeader: "application/sparql-results+json" -# both are optional and can be used to load and start as well as stop the connection before and after every task -preScriptHook: "./triplestores/{{connection}}/start.sh {{dataset.file}} {{dataset.name}} {{taskID}}" -postScriptHook: "./triplestores/{{connection}}/stop.sh" - -#optional otherwise the same metrics will be used as default -metrics: - - className: "QMPH" - - className: "QPS" - - className: "NoQPH" - - className: "AvgQPS" - - className: "NoQ" - -#optional otherwise an nt file will be used +# Define how the results will be stored. storages: - - className: "NTFileStorage" - #configuration: - #fileName: YOUR_RESULT_FILE_NAME.nt \ No newline at end of file + - type: "rdf file" + path: "some.ttl" + - type: "csv file" + directory: "results/" + - type: "triplestore" + user: "dba" + password: "dba" + endpoint: "http://localhost:8890/update" + +responseBodyProcessors: + - contentType: "application/sparql-results+json" + threads: 1 + timeout: 1 min diff --git a/graalvm/generate-config.sh b/graalvm/generate-config.sh new file mode 100755 index 000000000..fdd4625f2 --- /dev/null +++ b/graalvm/generate-config.sh @@ -0,0 +1,54 @@ +#!/usr/bin/env bash + +if [ -z "$GRAALVM_HOME" ]; then + echo "The variable GRAALVM_HOME needs to be set to the GraalVM installation directory." + exit 1 +fi + +SUITE=./graalvm/suite.yml +TARGET_DIR=./target +while getopts ":hs:t:" opt; do + case ${opt} in + h) + echo "Usage: $0 [-h] [-s ]" + echo " -h: Display this help message." + echo " -s : The path to the suite.yml file. Default: ./graalvm/suite.yml" + echo " -t : The location of the maven target directory. Default: ./target/" + exit 0 + ;; + t) + TARGET_DIR=$OPTARG + ;; + s) + SUITE=$OPTARG + ;; + ?) + echo "Invalid option: ${opt}" 1>&2 + exit 1 + ;; + esac +done + +if [ ! -f "$TARGET_DIR"/iguana.jar ]; then + mvn -DskipTests package +fi + +if [ ! -d src/main/resources/META-INF/native-image/ ]; then + mkdir -p src/main/resources/META-INF/native-image/ +fi + +# Move generated configuration files from tests to the resources +if [ -f "$TARGET_DIR"/native/agent-output/test/resource-config.json ]; then + mv "$TARGET_DIR"/native/agent-output/test/* src/main/resources/META-INF/native-image/ +fi + +# Run through multiple different execution paths, so that the tracing agent can generate complete configuration files. +"$GRAALVM_HOME"/bin/java -agentlib:native-image-agent=config-merge-dir=src/main/resources/META-INF/native-image/ -jar "$TARGET_DIR"/iguana.jar --help > /dev/null +"$GRAALVM_HOME"/bin/java -agentlib:native-image-agent=config-merge-dir=src/main/resources/META-INF/native-image/ -jar "$TARGET_DIR"/iguana.jar --dry-run -is "$SUITE" > /dev/null +"$GRAALVM_HOME"/bin/java -agentlib:native-image-agent=config-merge-dir=src/main/resources/META-INF/native-image/ -jar "$TARGET_DIR"/iguana.jar --dry-run "$SUITE" > /dev/null + +# there is a bug in the tracing agent that outputs wrong formatted lines in the resource-config.json file (https://github.com/oracle/graal/issues/7985) +sed 's/\\\\E//g' src/main/resources/META-INF/native-image/resource-config.json | sed 's/\\\\Q//g' > src/main/resources/META-INF/native-image/resource-config.json.tmp +mv src/main/resources/META-INF/native-image/resource-config.json.tmp src/main/resources/META-INF/native-image/resource-config.json + +rm -r ./graalvm/results/ diff --git a/graalvm/generate-profile.sh b/graalvm/generate-profile.sh new file mode 100755 index 000000000..5960767ed --- /dev/null +++ b/graalvm/generate-profile.sh @@ -0,0 +1,61 @@ +#!/usr/bin/env bash + +# Check if the GRAALVM_HOME variable is set +if [ -z "$GRAALVM_HOME" ]; then + echo "The variable GRAALVM_HOME needs to be set to the GraalVM installation directory." + exit 1 +fi + +# Default value for ARGUMENTS +ARGUMENTS="--gc=G1 -march=x86-64-v3" + +# Parse the command line arguments +while getopts ":hs:a:" opt; do + case ${opt} in + h) + echo "Usage: $0 [-h] [-s ]" + echo " -h: Display this help message." + echo " -s : The path to the suite.yml file" + echo " -a : The arguments to pass to the native-image command. Default: --gc=G1 -march=x86-64-v3" + exit 0 + ;; + s) + SUITE=$OPTARG + ;; + a) + ARGUMENTS="$OPTARG" + ;; + ?) + echo "Invalid option: $OPTARG" 1>&2 + exit 1 + ;; + esac +done + +# Check if suite argument was given +printf "" +if [ -z "$SUITE" ]; then + echo "Argument -s is required." + exit 1 +fi + +# Instrument the application +"$GRAALVM_HOME"/bin/native-image --pgo-instrument "$ARGUMENTS" -jar ./target/iguana.jar -o "./target/iguana-4.0.0-instrumented" +if [ $? -ne 0 ]; then + echo "Error while instrumenting the application." + exit 1 +fi + +# Generate the profile +./target/iguana-4.0.0-instrumented -XX:ProfilesDumpFile=custom.iprof "$SUITE" +if [ $? -ne 0 ]; then + echo "Error while generating the profile." + exit 1 +fi + +# Compile the application with the profile +"$GRAALVM_HOME"/bin/native-image --pgo=custom.iprof "$ARGUMENTS" -jar ./target/iguana.jar -o "./target/iguana-4.0.0-pgo" +if [ $? -ne 0 ]; then + echo "Error while compiling the application." + exit 1 +fi diff --git a/graalvm/queries.txt b/graalvm/queries.txt new file mode 100644 index 000000000..b3a425249 --- /dev/null +++ b/graalvm/queries.txt @@ -0,0 +1 @@ +placeholder \ No newline at end of file diff --git a/graalvm/suite.yml b/graalvm/suite.yml new file mode 100644 index 000000000..243127d1f --- /dev/null +++ b/graalvm/suite.yml @@ -0,0 +1,88 @@ +datasets: + - name: "DatasetName" + file: "src/test/resources/dataset.txt" + +connections: + - name: "Blazegraph" + version: "1.1.1" + dataset: "DatasetName" + endpoint: "http://localhost:9999/blazegraph/sparql" + authentication: + user: "user" + password: "test" + updateEndpoint: "http://localhost:3030/ds/update" + updateAuthentication: + user: "updateUser" + password: "password" + +storages: + - type: "rdf file" + path: "graalvm/results/some.ttl" + - type: "csv file" + directory: "graalvm/results/" + - type: "triplestore" + endpoint: "http://localhost:9999/blazegraph/sparql" + user: "user" + password: "test" + baseUri: "http://example.org" + +responseBodyProcessors: + - contentType: "application/sparql-results+json" + threads: 1 + +metrics: + - type: "AES" + - type: "EachQuery" + - type: "QPS" + - type: "AvgQPS" + - type: "NoQ" + - type: "NoQPH" + - type: "QMPH" + - type: "PAvgQPS" + penalty: 100 + - type: "PQPS" + penalty: 100 + + +tasks: + # 1 hour (time Limit is in ms) + - type: stresstest + warmupWorkers: + # 1 minutes (is in ms) + - type: SPARQLProtocolWorker + number: 1 + queries: + path: "./graalvm/queries.txt" + format: "separator" + separator: ";" + caching: true + order: "random" + seed: 123 + lang: "SPARQL" + timeout: 2s + connection: Blazegraph + completionTarget: + duration: 1s + acceptHeader: "application/sparql-results+json" + requestType: get query + parseResults: true + workers: + - type: "SPARQLProtocolWorker" + number: 1 + queries: + path: "./graalvm/queries.txt" + timeout: 3m + connection: Blazegraph + completionTarget: + duration: 1s + requestType: get query + acceptHeader: "application/sparql-results+json" + - number: 1 + type: "SPARQLProtocolWorker" + connection: Blazegraph + completionTarget: + number: 1 + queries: + path: "./graalvm/queries.txt" + timeout: 100s + acceptHeader: "application/sparql-results+json" diff --git a/iguana.commons/README b/iguana.commons/README deleted file mode 100644 index 5ca5f7d2a..000000000 --- a/iguana.commons/README +++ /dev/null @@ -1,46 +0,0 @@ -CONTENTS OF THIS FILE ---------------------- - -* Introduction -* Requirements -* Installation -* Configuration -* Links - - -INTRODUCTION ------------- - -The Commons module of Iguana will be used -to serve as methods and utils which all or several modules of Iguana needs - -Bugs can be submitted at https://github.com/AKSW/IGUANA/issues -Please refer to the Module as following "[Commons] your message" - - -REQUIREMENTS ------------- - -The Commons is not a standalone module. - - -INSTALLATION ------------- - -The Commons is not a standalone module. - - -CONFIGURATION -------------- - -The Commons is not a standalone module. - - -LINKS ------ - -* Project Site: http://iguana-benchmark.eu - -* Github Site: http://github.com/AKSW/IGUANA - -* Bug Tracker: http://github.com/AKSW/IGUANA/issues diff --git a/iguana.commons/pom.xml b/iguana.commons/pom.xml deleted file mode 100644 index 07ae01253..000000000 --- a/iguana.commons/pom.xml +++ /dev/null @@ -1,162 +0,0 @@ - - 4.0.0 - - org.aksw - iguana-parent - ${revision} - - iguana.commons - Iguana Commons - Iguana Common Classes and Methods - - - AGPLv3 or later - https://www.gnu.org/licenses/agpl-3.0.html - - - - - Lixi Conrads - lixiconrads@gmail.com - - Former Developer - - Dice Research Group - https://dice-research.org - - - - Dice Research Group - https://dice-research.org - - - GitHub Issue Management - https://github.com/dice-group/iguana/issues - - https://dice-research.org/IGUANA - - - 11 - 2.17.1 - 3.16.0 - UTF-8 - 11 - 11 - - - - - junit - junit - 4.13.1 - test - - - commons-configuration - commons-configuration - 1.10 - - - org.apache.commons - commons-exec - 1.3 - - - org.apache.logging.log4j - log4j-slf4j-impl - ${log4j.version} - - - org.apache.logging.log4j - log4j-api - ${log4j.version} - - - org.apache.logging.log4j - log4j-core - ${log4j.version} - - - org.apache.logging.log4j - log4j-1.2-api - ${log4j.version} - - - org.simpleframework - simple - 5.1.6 - - - org.reflections - reflections - 0.9.9 - - - - - - github - GitHub dice-group Apache Maven Packages - https://maven.pkg.github.com/dice-group/IGUANA - - - - - - - - - org.jacoco - jacoco-maven-plugin - 0.8.6 - - - prepare-agent - - prepare-agent - - - - report - prepare-package - - report - - - - post-unit-test - test - - report - - - - - target/jacoco.exec - - target/jacoco-ut - - - - - - org.apache.maven.plugins - maven-compiler-plugin - 3.8.1 - - 11 - 11 - UTF-8 - - -parameters - - - **/log4j2.yml - - - - - - - diff --git a/iguana.commons/src/main/java/org/aksw/iguana/commons/annotation/Nullable.java b/iguana.commons/src/main/java/org/aksw/iguana/commons/annotation/Nullable.java deleted file mode 100644 index 3d11e9dd7..000000000 --- a/iguana.commons/src/main/java/org/aksw/iguana/commons/annotation/Nullable.java +++ /dev/null @@ -1,12 +0,0 @@ -package org.aksw.iguana.commons.annotation; - -import java.lang.annotation.*; - -/** - * Lets the TypeFactory know that the Parameter can be null and thus be ignored. - */ -@Documented -@Retention(RetentionPolicy.RUNTIME) -@Target(ElementType.PARAMETER) -public @interface Nullable { -} diff --git a/iguana.commons/src/main/java/org/aksw/iguana/commons/annotation/ParameterNames.java b/iguana.commons/src/main/java/org/aksw/iguana/commons/annotation/ParameterNames.java deleted file mode 100644 index ceae9f810..000000000 --- a/iguana.commons/src/main/java/org/aksw/iguana/commons/annotation/ParameterNames.java +++ /dev/null @@ -1,14 +0,0 @@ -package org.aksw.iguana.commons.annotation; - -import java.lang.annotation.*; - -/** - * Uses provided names in the order of the constructor parameters, instead of the constructor parameter names for the TypeFactory - */ -@Retention(RetentionPolicy.RUNTIME) -@Target(ElementType.CONSTRUCTOR) -@Inherited -public @interface ParameterNames { - - String[] names() default ""; -} diff --git a/iguana.commons/src/main/java/org/aksw/iguana/commons/annotation/Shorthand.java b/iguana.commons/src/main/java/org/aksw/iguana/commons/annotation/Shorthand.java deleted file mode 100644 index ee19817ca..000000000 --- a/iguana.commons/src/main/java/org/aksw/iguana/commons/annotation/Shorthand.java +++ /dev/null @@ -1,14 +0,0 @@ -package org.aksw.iguana.commons.annotation; - -import java.lang.annotation.*; - -/** - * Sets a short name to be used in the TypedFactory instead of the whole class name - */ -@Documented -@Retention(RetentionPolicy.RUNTIME) -@Target(ElementType.TYPE) -public @interface Shorthand { - - String value(); -} diff --git a/iguana.commons/src/main/java/org/aksw/iguana/commons/constants/COMMON.java b/iguana.commons/src/main/java/org/aksw/iguana/commons/constants/COMMON.java deleted file mode 100644 index 3851193a1..000000000 --- a/iguana.commons/src/main/java/org/aksw/iguana/commons/constants/COMMON.java +++ /dev/null @@ -1,113 +0,0 @@ -package org.aksw.iguana.commons.constants; - -/** - * Constants several modules need - * - * @author f.conrads - * - */ -public class COMMON { - - /* - * COMMON CONSTANTS - */ - - - /** - * The key for the experiment task ID in the properties received from the core - */ - public static final String EXPERIMENT_TASK_ID_KEY = "taskID"; - - /** - * The key for the experiment ID in the properties received from the core - */ - public static final String EXPERIMENT_ID_KEY = "expID"; - - /** - * The key for suite ID in the properties received from the core - */ - public static final String SUITE_ID_KEY = "suiteID"; - - - /** - * The key for starting an experiment task. Must be in the receiving properties - */ - public static final String RECEIVE_DATA_START_KEY = "startExperimentTask"; - - /** - * The key for ending an experiment task. Must be in the receiving properties - */ - public static final String RECEIVE_DATA_END_KEY = "endExperimentTask"; - - - /** - * Key in the properties receiving from the core to start an experiment - * as well as internal rp metrics key - */ - public static final String METRICS_PROPERTIES_KEY = "metrics"; - - - - /** - * TP2RP query time key - */ - public static final String RECEIVE_DATA_TIME = "resultTime"; - - /** - * TP2RP (Controller2RP) query success key - */ - public static final String RECEIVE_DATA_SUCCESS = "resultSuccess"; - - /** - * The number of Queries in the particular experiment - * will be used in the meta data. - */ - public static final String NO_OF_QUERIES = "noOfQueries"; - - - - public static final String QUERY_ID_KEY = "queryID"; - - public static final String CONNECTION_ID_KEY = "connID"; - - public static final String DATASET_ID_KEY = "datasetID"; - - public static final String EXTRA_META_KEY = "extraMeta"; - - public static final String EXTRA_IS_RESOURCE_KEY = "setIsResource"; - - public static final String QUERY_STRING = "queryString"; - - public static final String DOUBLE_RAW_RESULTS = "doubleRawResults"; - - public static final String SIMPLE_TRIPLE_KEY = "cleanTripleText"; - - public static final String QUERY_STATS = "queryStats"; - - public static final Object RECEIVE_DATA_SIZE = "resultSize"; - - public static final String QUERY_HASH = "queryHash"; - - public static final String WORKER_ID = "workerID"; - - /* Various status codes to denote the status of query execution and to prepare QueryExecutionStats object */ - public static final Long QUERY_UNKNOWN_EXCEPTION = 0L; - - public static final Long QUERY_SUCCESS = 1L; - - public static final Long QUERY_SOCKET_TIMEOUT = -1L; - - public static final Long QUERY_HTTP_FAILURE = -2L; - - public static final String EXPERIMENT_TASK_CLASS_ID_KEY = "actualTaskClass" ; - - public static final String BASE_URI = "http://iguana-benchmark.eu"; - - - public static final String RES_BASE_URI = BASE_URI+"/resource/"; - public static final String PROP_BASE_URI = BASE_URI+"/properties/"; - public static final String CLASS_BASE_URI = BASE_URI+"/class/"; - public static final String PENALTY = "penalty"; - public static final String CONNECTION_VERSION_KEY = "connectionVersion"; - public static final String EXPERIMENT_TASK_NAME_KEY = "taskName"; -} diff --git a/iguana.commons/src/main/java/org/aksw/iguana/commons/factory/TypedFactory.java b/iguana.commons/src/main/java/org/aksw/iguana/commons/factory/TypedFactory.java deleted file mode 100644 index 8d62570f0..000000000 --- a/iguana.commons/src/main/java/org/aksw/iguana/commons/factory/TypedFactory.java +++ /dev/null @@ -1,296 +0,0 @@ -/** - * - */ -package org.aksw.iguana.commons.factory; - -import org.aksw.iguana.commons.annotation.Nullable; -import org.aksw.iguana.commons.annotation.ParameterNames; -import org.aksw.iguana.commons.reflect.ShorthandMapper; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.lang.reflect.Constructor; -import java.lang.reflect.InvocationTargetException; -import java.lang.reflect.Parameter; -import java.util.*; - - -/** - * Factory for a Type. - * Creates an Object from Constructor and Constructor Arguments - * - * @author f.conrads - * @param The Type which should be created - * - */ -public class TypedFactory { - - private static final Logger LOGGER = LoggerFactory - .getLogger(TypedFactory.class); - - private String getClassName(String className){ - Map map = ShorthandMapper.getInstance().getShortMap(); - if(map.containsKey(className)){ - return map.get(className); - } - return className; - } - - - /** - * Will create a T Object from a Constructor Object created by the - * class name and the constructor arguments, be aware that all arguments - * must be Strings in the constructor. - * - * - * @param className - * The Class Name of the Implemented T Object - * @param constructorArgs - * constructor arguments (must be Strings), can be safely null - * @return The T Object created by the Constructor using the - * constructor args - */ - @SuppressWarnings("unchecked") - public T create(String className, Object[] constructorArgs){ - Object[] constructorArgs2 = constructorArgs; - if (constructorArgs2 == null) { - constructorArgs2 = new Object[0]; - } - Class[] stringClass = new Class[constructorArgs2.length]; - for(int i=0;i[] constructorClasses) { - - Object[] constructorArgs2 = constructorArgs; - - if (className == null) { - return null; - } - Class clazz; - try { - clazz = (Class) ClassLoader - .getSystemClassLoader().loadClass(className); - } catch (ClassNotFoundException e1) { - LOGGER.error("Could not load Object (name: " + className - + ")", e1); - return null; - } - - - if (constructorArgs2 == null) { - constructorArgs2 = new Object[0]; - } - if(constructorClasses==null){ - constructorClasses = new Class[constructorArgs2.length]; - for (int i = 0; i < constructorClasses.length; i++) { - constructorClasses[i] = String.class; - } - } - - try { - Constructor constructor = clazz - .getConstructor(constructorClasses); - return constructor.newInstance(constructorArgs2); - } catch (InstantiationException | IllegalAccessException - | IllegalArgumentException | InvocationTargetException - | NoSuchMethodException | SecurityException e) { - LOGGER.error("Could not initialize class " + clazz.getName() - + " with constructor.", e); - return null; - } - } - - - /** - * Uses the parameter Names and types of a constructor to find the best fitting constructor - * - * Only works with jvm -paramaters, otherwise use createAnnotated and annotate the constructors with ParameterNames and set names to the paramater names - * like - * . @ParameterNames(names={"a", "b"}) - * public Constructor(String a, Object b){...} - * - * @param className The Class Name of the Implemented T Object - * @param map key-value pair, whereas key represents the parameter name, where as value will be the value of the instantiation - * @return The instantiated object or null no constructor was found - */ - public T create(String className, Map map){ - Class clazz; - if(className==null){ - return null; - } - try { - clazz = (Class) ClassLoader - .getSystemClassLoader().loadClass(getClassName(className)); - } catch (ClassNotFoundException e1) { - return null; - } - Constructor[] constructors = clazz.getConstructors(); - find: for(Constructor constructor : constructors){ - //ParameterNames would be a backup - //ParameterNames paramNames = (ParameterNames) constructor.getAnnotation(ParameterNames.class); - //if(paramNames==null){ - // continue ; - //} - Parameter[] params = constructor.getParameters(); - - List names = new ArrayList(); - List types = new ArrayList(); - Set canBeNull = new HashSet(); - for(Parameter p : params){ - names.add(p.getName()); - types.add(p.getType()); - if(p.isAnnotationPresent(Nullable.class)){ - canBeNull.add(p.getName()); - } - } - List instanceNames = new ArrayList(map.keySet()); - Object[] constructorArgs = new Object[names.size()]; - if(!checkIfFits(map, names, canBeNull)){continue;} - for(Object key : instanceNames){ - Object value = map.get(key); - //Check if constructor can map keys to param Names - int indexKey = names.indexOf(key.toString()); - Class clazz2 = types.get(indexKey); - if(!clazz2.isInstance(value)){ - continue find; - } - constructorArgs[indexKey] = value; - } - try { - return (T) constructor.newInstance(constructorArgs); - } catch (InstantiationException | IllegalAccessException - | IllegalArgumentException | InvocationTargetException - | SecurityException e) { - //As we check that the COnstructor fits this shouldn't be thrown at all. Something very bad happend - LOGGER.error("Could not initialize class " + clazz.getName() - + " with constructor.", e); - return null; - } - } - LOGGER.error("Could not initialize class " + clazz.getName() - + " with constructor. Maybe Config file has wrong names?."); - return null; - } - - /** - * Checks if the giving parameter key-value mapping fits the constructor parameter names (key vs names) and takes into account that the parameter is allowed to be null and thus - * can be disregarded - * @param map paramater - Object Map - * @param names parameter names of the actual constructor - * @param canBeNull all paramaters who can be null - * @return true if constructor fits, otherwise false - */ - private boolean checkIfFits(Map map, List names, Set canBeNull) { - //check if all provided parameter names are in the constructor - for(Object key : map.keySet()){ - Object value = map.get(key); - if(!names.contains(key.toString())){ - return false; - } - } - //check if all notNull objects are provided - Set keySet = map.keySet(); - for(String name : names){ - //we can safely assume that Object is string - if(!keySet.contains(name)){ - //check if parameter is Nullable - if(!canBeNull.contains(name)){ - return false; - } - } - } - return true; - } - - /** - * Uses the parameter Names and types of a constructor to find the best fitting constructor - * - * Uses the ParameterNames annotation of a constructor to get the parameter names - * - * like - * . @ParameterNames(names={"a", "b"}) - * public Constructor(String a, Object b){...} - * - * @param className The Class Name of the Implemented T Object - * @param map Parameter name - value mapping - * @return The instantiated object or null no constructor was found - */ - public T createAnnotated(String className, Map map){ - Class clazz; - try { - clazz = (Class) ClassLoader - .getSystemClassLoader().loadClass(getClassName(className)); - } catch (ClassNotFoundException e1) { - return null; - } - Constructor[] constructors = clazz.getConstructors(); - find: for(Constructor constructor : constructors){ - ParameterNames paramNames = (ParameterNames) constructor.getAnnotation(ParameterNames.class); - if(paramNames==null){ - continue ; - } - Parameter[] params = constructor.getParameters(); - - List names = new ArrayList(); - List types = new ArrayList(); - Set canBeNull = new HashSet(); - for(int i=0;i instanceNames = new ArrayList(map.keySet()); - Object[] constructorArgs = new Object[names.size()]; - if(!checkIfFits(map, names, canBeNull)){continue;} - for(Object key : instanceNames){ - Object value = map.get(key); - //Check if constructor can map keys to param Names - int indexKey = names.indexOf(key.toString()); - Class clazz2 = types.get(indexKey); - if(!clazz2.isInstance(value)){ - continue find; - } - constructorArgs[indexKey] = value; - } - try { - return (T) constructor.newInstance(constructorArgs); - } catch (InstantiationException | IllegalAccessException - | IllegalArgumentException | InvocationTargetException - | SecurityException e) { - //As we check that the Constructor fits this shouldn't be thrown at all. Something very bad happend - LOGGER.error("Could not initialize class " + clazz.getName() - + " with constructor.", e); - return null; - } - } - LOGGER.error("Could not initialize class " + clazz.getName() - + " with constructor. Maybe Config file has wrong names?."); - return null; - } - - -} - diff --git a/iguana.commons/src/main/java/org/aksw/iguana/commons/io/BigByteArrayInputStream.java b/iguana.commons/src/main/java/org/aksw/iguana/commons/io/BigByteArrayInputStream.java deleted file mode 100644 index 5c41dfedd..000000000 --- a/iguana.commons/src/main/java/org/aksw/iguana/commons/io/BigByteArrayInputStream.java +++ /dev/null @@ -1,54 +0,0 @@ -package org.aksw.iguana.commons.io; - -import java.io.IOException; -import java.io.InputStream; - -public class BigByteArrayInputStream extends InputStream { - - private BigByteArrayOutputStream bbaos; - - private byte[] curArray; - private int curPos=0; - private int curPosInArray=0; - - public BigByteArrayInputStream(byte[] bytes) throws IOException { - bbaos = new BigByteArrayOutputStream(); - bbaos.write(bytes); - setNextArray(); - } - - public BigByteArrayInputStream(BigByteArrayOutputStream bbaos){ - this.bbaos = bbaos; - setNextArray(); - } - - private void setNextArray(){ - curArray=bbaos.getBaos().get(curPos++).toByteArray(); - } - - @Override - public int read() throws IOException { - if(eos()){ - return -1; - } - int ret; - - if(curPosInArray==2147483639){ - ret = curArray[curPosInArray]; - curPosInArray=0; - setNextArray(); - } - else{ - ret=curArray[curPosInArray++]; - } - return ret ; - } - - private boolean eos() { - //if the current Position is equal the length of the array, this is the last array in bbaos and the last element was already read - if(curArray.length==curPosInArray){ - return true; - } - return false; - } -} diff --git a/iguana.commons/src/main/java/org/aksw/iguana/commons/io/BigByteArrayOutputStream.java b/iguana.commons/src/main/java/org/aksw/iguana/commons/io/BigByteArrayOutputStream.java deleted file mode 100644 index 605131977..000000000 --- a/iguana.commons/src/main/java/org/aksw/iguana/commons/io/BigByteArrayOutputStream.java +++ /dev/null @@ -1,108 +0,0 @@ -package org.aksw.iguana.commons.io; - -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.io.OutputStream; -import java.io.UnsupportedEncodingException; -import java.nio.charset.Charset; -import java.util.ArrayList; -import java.util.List; - -public class BigByteArrayOutputStream extends OutputStream { - - private List baos = new ArrayList(); - - public BigByteArrayOutputStream() { - baos.add(new ByteArrayOutputStream()); - } - - - public List getBaos() { - return baos; - } - - public void write(BigByteArrayOutputStream bbaos) throws IOException { - for (byte[] bao : bbaos.toByteArray()) { - for (Byte b : bao) { - write(b); - } - } - - } - - public long size() { - long ret = 0; - for (ByteArrayOutputStream ba : baos) { - ret += ba.size(); - } - return ret; - } - - public synchronized byte[][] toByteArray() { - byte[][] ret = new byte[baos.size()][]; - for (int i = 0; i < baos.size(); i++) { - ret[i] = baos.get(i).toByteArray(); - } - return ret; - } - - - public void write(byte[] i) throws IOException { - for (byte b : i) { - write(b); - } - } - - public void write(byte[][] i) throws IOException { - for (byte[] arr : i) { - for (byte b : arr) { - write(b); - } - } - } - - public void write(byte i) throws IOException { - ByteArrayOutputStream current = baos.get(baos.size() - 1); - current = ensureSpace(current); - current.write(i); - } - - @Override - public void write(int i) throws IOException { - ByteArrayOutputStream current = baos.get(baos.size() - 1); - current = ensureSpace(current); - current.write(i); - } - - private ByteArrayOutputStream ensureSpace(ByteArrayOutputStream current) { - if (current.size() == 2147483639) { - baos.add(new ByteArrayOutputStream()); - } - return baos.get(baos.size() - 1); - } - - public String toString(String charset) throws UnsupportedEncodingException { - StringBuilder builder = new StringBuilder(); - for(ByteArrayOutputStream baos : this.baos){ - builder.append(baos.toString(charset)); - } - return builder.toString(); - } - - public String toString(Charset charset) throws UnsupportedEncodingException { - return toString(charset.toString()); - } - - public Long countMatches(char s) { - //read - long count=0; - for(ByteArrayOutputStream baos : this.baos){ - for(byte b : baos.toByteArray()){ - if(b==s){ - count++; - } - } - } - return count; - } -} \ No newline at end of file diff --git a/iguana.commons/src/main/java/org/aksw/iguana/commons/numbers/NumberUtils.java b/iguana.commons/src/main/java/org/aksw/iguana/commons/numbers/NumberUtils.java deleted file mode 100644 index 2c8629039..000000000 --- a/iguana.commons/src/main/java/org/aksw/iguana/commons/numbers/NumberUtils.java +++ /dev/null @@ -1,39 +0,0 @@ -package org.aksw.iguana.commons.numbers; - -/** - * Utils class for everything with numbers - * - * @author f.conrads - * - */ -public class NumberUtils { - - /** - * Returns either a long represantation of the String nm or null. - * - * @param nm String which should be parsed - * @return String as a long representation if String is a Long, otherwise null - */ - public static Long getLong(String nm) { - try { - Long ret = Long.parseLong(nm); - return ret; - }catch(NumberFormatException e) {} - return null; - } - - /** - * Returns either a double representation of the String nm or null. - * - * @param nm String which should be parsed - * @return String as a double representation if String is a double, otherwise null - */ - public static Double getDouble(String nm) { - try { - return Double.parseDouble(nm); - } catch (NumberFormatException | NullPointerException ignored) { - } - return null; - } - -} diff --git a/iguana.commons/src/main/java/org/aksw/iguana/commons/reflect/ShorthandMapper.java b/iguana.commons/src/main/java/org/aksw/iguana/commons/reflect/ShorthandMapper.java deleted file mode 100644 index 46d84fe5d..000000000 --- a/iguana.commons/src/main/java/org/aksw/iguana/commons/reflect/ShorthandMapper.java +++ /dev/null @@ -1,71 +0,0 @@ -package org.aksw.iguana.commons.reflect; - -import org.aksw.iguana.commons.annotation.Shorthand; -import org.reflections.Configuration; -import org.reflections.Reflections; -import org.reflections.scanners.*; -import org.reflections.util.ConfigurationBuilder; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.HashMap; -import java.util.Map; -import java.util.Set; - -/** - * Maps the shorthand to the class names at the beginning of it's initialization. - * Thus it has to be done once. - * - */ -public class ShorthandMapper { - - public Logger LOGGER = LoggerFactory.getLogger(getClass()); - - private Map shortMap = new HashMap(); - - private static ShorthandMapper instance; - - public static ShorthandMapper getInstance(){ - if(instance==null){ - instance = new ShorthandMapper(); - } - return instance; - } - - - public ShorthandMapper(){ - this("org"); - } - - /** - * create mapping, but only searches in packages with the prefix - * @param prefix package prefix to check - */ - public ShorthandMapper(String prefix){ - try { - Configuration config = ConfigurationBuilder.build(prefix).addScanners(new TypeAnnotationsScanner()).addScanners(new SubTypesScanner()); - Reflections reflections = new Reflections(new String[]{"", prefix}); - - Set> annotatedClasses = reflections.getTypesAnnotatedWith(Shorthand.class); - LOGGER.info("Found {} annotated classes", annotatedClasses.size()); - LOGGER.info("Annotated Classes : {}", annotatedClasses.toString()); - ClassLoader cloader = ClassLoader.getSystemClassLoader(); - for (Class annotatedClass : annotatedClasses) { - Shorthand annotation = (Shorthand) annotatedClass.getAnnotation(Shorthand.class); - if (annotation == null) { - continue; - } - if (shortMap.containsKey(annotation.value())) { - LOGGER.warn("Shorthand Key {} for Class {} already exists, pointing to Class {}. ", annotation.value(), shortMap.get(annotation.value()), annotatedClass.getCanonicalName()); - } - shortMap.put(annotation.value(), annotatedClass.getCanonicalName()); - } - }catch(Exception e){ - LOGGER.error("Could not create shorthand mapping", e); - } - } - - public Map getShortMap() { - return shortMap; - } -} diff --git a/iguana.commons/src/main/java/org/aksw/iguana/commons/script/ScriptExecutor.java b/iguana.commons/src/main/java/org/aksw/iguana/commons/script/ScriptExecutor.java deleted file mode 100644 index 6f7aac7be..000000000 --- a/iguana.commons/src/main/java/org/aksw/iguana/commons/script/ScriptExecutor.java +++ /dev/null @@ -1,105 +0,0 @@ -/** - * - */ -package org.aksw.iguana.commons.script; - -import org.apache.commons.exec.ExecuteException; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.*; - -/** - * Class to execute Shell Scripts - * - * @author f.conrads - * - */ -public class ScriptExecutor { - - private static final Logger LOGGER = LoggerFactory.getLogger(ScriptExecutor.class); - - /** - * Will execute the given file with the provided arguments - * via Shell. - * - * @param file file to execute - * @param args arguments to execute file with - * @throws ExecuteException if script can't be executed - * @throws IOException if file IO errors - * @return Process return, 0 means everything worked fine - */ - public static int exec(String file, String[] args) throws ExecuteException, IOException{ - String fileName = new File(file).getAbsolutePath(); - - String[] shellCommand = new String[1 + (args == null ? 0 : args.length)]; - shellCommand[0] = fileName; - - if(args != null && args.length!=0) - { - System.arraycopy(args, 0, shellCommand, 1, args.length); - } - - return execute(shellCommand); - } - - /**Checks if file contains arguments itself - * - * @param file file to execute - * @param args arguments to execute file with - * @return Process return, 0 means everything worked fine - * @throws ExecuteException if script can't be executed - * @throws IOException if file IO errors - */ - public static int execSafe(String file, String[] args) throws ExecuteException, IOException{ - String actualScript = file; - String[] args2 = args; - if(file.contains(" ")){ - - String[] providedArguments = file.split("\\s+"); - args2 = new String[providedArguments.length-1+args.length]; - actualScript=providedArguments[0]; - int i=1; - for(i=1;i threadBuffer = ThreadLocal.withInitial(() -> new byte[bufferSize]); - - protected static final ThreadLocal threadByteArrayOutputStream = ThreadLocal.withInitial(() -> new ByteArrayOutputStream(bufferSize)); - - /** - * Fastest way to serialize a stream to UTF-8 according to this stackoverflow question. - * - * @param inputStream the stream to read from - * @return the content of inputStream as a string. - * @throws IOException from {@link InputStream#read()} - */ - static public ByteArrayOutputStream inputStream2String(InputStream inputStream) throws IOException { - ByteArrayOutputStream result = threadByteArrayOutputStream.get(); - result.reset(); - try { - inputStream2ByteArrayOutputStream(inputStream, null, -1.0, result); - } catch (TimeoutException e) { - // never happens - System.exit(-1); - } - return result; - } - - /** - * Fastest way to serialize a stream to UTF-8 according to this stackoverflow question. - * - * @param inputStream the stream to read from - * @param startTime a time when the computation started - * @param timeout delta from startTime when the computation must be completed. Otherwise, a TimeoutException may be thrown. Timeout check is deactivated if timeout is < 0. - * @return the content of inputStream as a string. - * @throws IOException from {@link InputStream#read()} - * @throws TimeoutException Maybe thrown any time after if startTime + timeout is exceeded - */ - static public ByteArrayOutputStream inputStream2String(InputStream inputStream, Instant startTime, double timeout) throws IOException, TimeoutException { - ByteArrayOutputStream result = new ByteArrayOutputStream(); - inputStream2ByteArrayOutputStream(inputStream, startTime, timeout, result); - return result; - } - - /** - * Fastest way to serialize a stream to UTF-8 according to this stackoverflow question. - * - * @param inputStream the stream to read from - * @param startTime a time when the computation started - * @param timeout delta from startTime when the computation must be completed. Otherwise, a TimeoutException may be thrown. Timeout check is deactivated if timeout is < 0. - * @param result the stream where the result is written to. - * @return size of the output stream - * @throws IOException from {@link InputStream#read()} - * @throws TimeoutException Maybe thrown any time after if startTime + timeout is exceeded - */ - public static long inputStream2ByteArrayOutputStream(InputStream inputStream, Instant startTime, double timeout, ByteArrayOutputStream result) throws IOException, TimeoutException { - assert (result != null); - boolean enable_timeout = timeout > 0; - byte[] buffer = threadBuffer.get(); - int length; - while ((length = inputStream.read(buffer)) != -1) { - if (enable_timeout && durationInMilliseconds(startTime, Instant.now()) > timeout) - throw new TimeoutException("reading the answer timed out"); - result.write(buffer, 0, length); - } - return result.size(); - } - - /** - * Fastest way to serialize a stream to UTF-8 according to this stackoverflow question. - * - * @param inputStream the stream to read from - * @param result the stream where the result is written to. - * @return size of the output stream - * @throws IOException from {@link InputStream#read()} - */ - public static long inputStream2ByteArrayOutputStream(InputStream inputStream, ByteArrayOutputStream result) throws IOException { - try { - return inputStream2ByteArrayOutputStream(inputStream, Instant.now(), -1, result); - } catch (TimeoutException e) { - //will never happen - return 0; - } - } - - /** - * reads a stream and throws away the result. - * - * @param inputStream the stream to read from - * @param timeout delta from startTime when the computation must be completed. Otherwise, a TimeoutException may be thrown. Timeout check is deactivated if timeout is < 0. - * @return size of the output stream - * @throws IOException from {@link InputStream#read()} - * @throws TimeoutException Maybe thrown any time after if startTime + timeout is exceeded - */ - static public long inputStream2Length(InputStream inputStream, Instant startTime, double timeout) throws IOException, TimeoutException { - byte[] buffer = threadBuffer.get(); - long length; - long ret = 0; - while ((length = inputStream.read(buffer)) != -1) { - if (durationInMilliseconds(startTime, Instant.now()) > timeout && timeout > 0) - throw new TimeoutException("reading the answer timed out"); - ret += length; - } - return ret; - } -} diff --git a/iguana.commons/src/main/java/org/aksw/iguana/commons/time/TimeUtils.java b/iguana.commons/src/main/java/org/aksw/iguana/commons/time/TimeUtils.java deleted file mode 100644 index 051ddb76b..000000000 --- a/iguana.commons/src/main/java/org/aksw/iguana/commons/time/TimeUtils.java +++ /dev/null @@ -1,38 +0,0 @@ -package org.aksw.iguana.commons.time; - -import java.time.Duration; -import java.time.Instant; - -/** - * Everythin related to time stuff - */ -public class TimeUtils { - - /** - * returns the current time in Nanoseconds as a long instead of a double - * @return current time in nanoseconds as a long - */ - public static long getTimeInNanoseconds() { - Instant now = Instant.now(); - return ((long)now.getNano() + now.getEpochSecond() * 1000000000 /*ns*/); - } - - /** - * gets the current time in milliseconds - * @return the current time in ms - */ - public static double getTimeInMilliseconds() { - return getTimeInNanoseconds() / 1000000d /*ms*/; - } - - /** - * returns the duration in MS between two Time Instants - * @param start Start time - * @param end end time - * @return duration in ms between start and end - */ - public static double durationInMilliseconds(Instant start, Instant end) { - Duration duration = Duration.between(start, end); - return ((long)duration.getNano() + duration.getSeconds() * 1000000000 /*ns*/) / 1000000d /*ms*/; - } -} diff --git a/iguana.commons/src/test/java/org/aksw/iguana/commons/factory/AnnotatedFactorizedObject.java b/iguana.commons/src/test/java/org/aksw/iguana/commons/factory/AnnotatedFactorizedObject.java deleted file mode 100644 index 5913230c2..000000000 --- a/iguana.commons/src/test/java/org/aksw/iguana/commons/factory/AnnotatedFactorizedObject.java +++ /dev/null @@ -1,28 +0,0 @@ -package org.aksw.iguana.commons.factory; - -import org.aksw.iguana.commons.annotation.Nullable; -import org.aksw.iguana.commons.annotation.ParameterNames; -import org.aksw.iguana.commons.annotation.Shorthand; - -@Shorthand(value = "facto") -public class AnnotatedFactorizedObject extends FactorizedObject { - public AnnotatedFactorizedObject(String[] args, String[] args2) { - this.setArgs(args); - this.setArgs2(args2); - } - - @ParameterNames(names={"a","b","c"}) - public AnnotatedFactorizedObject(String a, String b, String c) { - this.setArgs(new String[] {a, b, c}); - } - - @ParameterNames(names={"a","b"}) - public AnnotatedFactorizedObject(String a, @Nullable String b) { - this.setArgs(new String[] {a, b==null?"wasNull":b}); - } - - public AnnotatedFactorizedObject() { - args = new String[] {"a3", "b3"}; - } - -} diff --git a/iguana.commons/src/test/java/org/aksw/iguana/commons/factory/FactorizedObject.java b/iguana.commons/src/test/java/org/aksw/iguana/commons/factory/FactorizedObject.java deleted file mode 100644 index e6f954a60..000000000 --- a/iguana.commons/src/test/java/org/aksw/iguana/commons/factory/FactorizedObject.java +++ /dev/null @@ -1,56 +0,0 @@ -package org.aksw.iguana.commons.factory; - -import org.aksw.iguana.commons.annotation.Nullable; - -public class FactorizedObject { - - protected String[] args; - protected String[] args2; - - public FactorizedObject(String[] args, String[] args2) { - this.setArgs(args); - this.setArgs2(args2); - } - - public FactorizedObject(String a, String b, String c) { - this.setArgs(new String[] {a, b, c}); - } - - public FactorizedObject(String a, @Nullable String b) { - this.setArgs(new String[] {a, b==null?"wasNull":b}); - } - - - public FactorizedObject() { - args = new String[] {"a3", "b3"}; - } - - /** - * @return the args - */ - public String[] getArgs() { - return args; - } - - /** - * @param args the args to set - */ - public void setArgs(String[] args) { - this.args = args; - } - - /** - * @return the args2 - */ - public String[] getArgs2() { - return args2; - } - - /** - * @param args2 the args2 to set - */ - public void setArgs2(String[] args2) { - this.args2 = args2; - } - -} diff --git a/iguana.commons/src/test/java/org/aksw/iguana/commons/factory/TypedFactoryTest.java b/iguana.commons/src/test/java/org/aksw/iguana/commons/factory/TypedFactoryTest.java deleted file mode 100644 index ff8ac9a92..000000000 --- a/iguana.commons/src/test/java/org/aksw/iguana/commons/factory/TypedFactoryTest.java +++ /dev/null @@ -1,151 +0,0 @@ -package org.aksw.iguana.commons.factory; - -import org.junit.Test; - -import java.util.HashMap; -import java.util.Map; - -import static org.junit.Assert.assertEquals; - -public class TypedFactoryTest { - - @Test - public void argumentClassesTest() { - String[] args = new String[] { "a1", "b1" }; - String[] args2 = new String[] { "a2", "b2" }; - TypedFactory factory = new TypedFactory(); - FactorizedObject testObject = factory.create("org.aksw.iguana.commons.factory.FactorizedObject", - new Object[] { args, args2 }, new Class[] { String[].class, String[].class }); - assertEquals(args[0], testObject.getArgs()[0]); - assertEquals(args[1], testObject.getArgs()[1]); - assertEquals(args2[0], testObject.getArgs2()[0]); - assertEquals(args2[1], testObject.getArgs2()[1]); - - } - - - @Test - public void noConstructor(){ - TypedFactory factory = new TypedFactory(); - HashMap map = new HashMap(); - map.put("nope", "nope"); - assertEquals(null, factory.create("org.aksw.iguana.commons.factory.FactorizedObject", map)); - assertEquals(null, factory.create("org.aksw.iguana.commons.factory.FactorizedObject", new Object[]{"nope"})); - assertEquals(null, factory.create("org.aksw.iguana.commons.factory.FactorizedObject", new Object[]{"nope"}, new Class[]{String.class})); - assertEquals(null, factory.createAnnotated("org.aksw.iguana.commons.factory.AnnotatedFactorizedObject", map)); - - map.clear(); - map.put("a", 123); - map.put("b", true); - assertEquals(null, factory.create("org.aksw.iguana.commons.factory.FactorizedObject", map)); - assertEquals(null, factory.createAnnotated("org.aksw.iguana.commons.factory.AnnotatedFactorizedObject", map)); - - } - - @Test - public void nullConstructorClass(){ - TypedFactory factory = new TypedFactory(); - FactorizedObject testObject = factory.create("org.aksw.iguana.commons.factory.FactorizedObject", new Object[]{"a", "b", "c"}, (Class[])null); - assertEquals("a", testObject.getArgs()[0]); - assertEquals("b", testObject.getArgs()[1]); - assertEquals("c", testObject.getArgs()[2]); - testObject = factory.create("org.aksw.iguana.commons.factory.FactorizedObject", (Object[])null, (Class[])null); - assertEquals("a3", testObject.getArgs()[0]); - assertEquals("b3", testObject.getArgs()[1]); - } - - @Test - public void nullClass(){ - TypedFactory factory = new TypedFactory(); - assertEquals(null, factory.create(null, new HashMap<>())); - assertEquals(null, factory.create(null, new Object[]{})); - assertEquals(null, factory.create(null, new Object[]{}, new Class[]{})); - - } - - @Test - public void classNameNotFoundTest(){ - TypedFactory factory = new TypedFactory(); - assertEquals(null, factory.create("thisClassShouldNotExist", new HashMap<>())); - assertEquals(null, factory.create("thisClassShouldNotExist", new Object[]{})); - assertEquals(null, factory.create("thisClassShouldNotExist", new Object[]{}, new Class[]{})); - assertEquals(null, factory.createAnnotated("thisClassShouldNotExist", new HashMap<>())); - } - - @Test - public void argumentStringsTest() { - - TypedFactory factory = new TypedFactory(); - FactorizedObject testObject = factory.create("org.aksw.iguana.commons.factory.FactorizedObject", (Object[])null); - assertEquals("a3", testObject.getArgs()[0]); - assertEquals("b3", testObject.getArgs()[1]); - } - - - @Test - public void mapCreationTestParameterNames() { - - TypedFactory factory = new TypedFactory(); - Map arguments = new HashMap(); - arguments.put("a", "a4"); - arguments.put("b", "b4"); - arguments.put("c", "c4"); - FactorizedObject testObject = factory.createAnnotated("org.aksw.iguana.commons.factory.AnnotatedFactorizedObject", arguments); - assertEquals("a4", testObject.getArgs()[0]); - assertEquals("b4", testObject.getArgs()[1]); - assertEquals("c4", testObject.getArgs()[2]); - arguments.clear(); - arguments.put("a", "a5"); - testObject = factory.createAnnotated("org.aksw.iguana.commons.factory.AnnotatedFactorizedObject", arguments); - assertEquals("a5", testObject.getArgs()[0]); - assertEquals("wasNull", testObject.getArgs()[1]); - } - - @Test - public void testNullable() { - - TypedFactory factory = new TypedFactory(); - Map arguments = new HashMap(); - arguments.put("a", "a4"); - arguments.put("b", "b4"); - FactorizedObject testObject = factory.create("org.aksw.iguana.commons.factory.FactorizedObject", arguments); - assertEquals("a4", testObject.getArgs()[0]); - assertEquals("b4", testObject.getArgs()[1]); - arguments.remove("b"); - testObject = factory.create("org.aksw.iguana.commons.factory.FactorizedObject", arguments); - assertEquals("a4", testObject.getArgs()[0]); - assertEquals("wasNull", testObject.getArgs()[1]); - - } - - @Test - public void mapCreationTest() { - - TypedFactory factory = new TypedFactory(); - Map arguments = new HashMap(); - arguments.put("a", "a4"); - arguments.put("b", "b4"); - arguments.put("c", "c4"); - FactorizedObject testObject = factory.create("org.aksw.iguana.commons.factory.FactorizedObject", arguments); - assertEquals("a4", testObject.getArgs()[0]); - assertEquals("b4", testObject.getArgs()[1]); - assertEquals("c4", testObject.getArgs()[2]); - - } - - @Test - public void shortHandAnnotationTest() { - - TypedFactory factory = new TypedFactory(); - Map arguments = new HashMap(); - arguments.put("a", "a4"); - arguments.put("b", "b4"); - arguments.put("c", "c4"); - AnnotatedFactorizedObject testObject = factory.create("facto", arguments); - assertEquals("a4", testObject.getArgs()[0]); - assertEquals("b4", testObject.getArgs()[1]); - assertEquals("c4", testObject.getArgs()[2]); - - } - -} diff --git a/iguana.commons/src/test/java/org/aksw/iguana/commons/number/NumberUtilsTest.java b/iguana.commons/src/test/java/org/aksw/iguana/commons/number/NumberUtilsTest.java deleted file mode 100644 index fa77c09a2..000000000 --- a/iguana.commons/src/test/java/org/aksw/iguana/commons/number/NumberUtilsTest.java +++ /dev/null @@ -1,56 +0,0 @@ -package org.aksw.iguana.commons.number; - - -import org.aksw.iguana.commons.numbers.NumberUtils; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.List; - -import static org.junit.Assert.assertEquals; - - -@RunWith(Parameterized.class) -public class NumberUtilsTest { - - @Parameterized.Parameters - public static Collection data() { - List testConfigs = new ArrayList(); - //simple method - testConfigs.add(new Object[]{"123", Long.class, 123L}); - testConfigs.add(new Object[]{"123.0", Double.class, 123.0}); - testConfigs.add(new Object[]{"123", Double.class, 123.0}); - testConfigs.add(new Object[]{"123.A", Double.class, null}); - testConfigs.add(new Object[]{"123.A", Long.class, null}); - testConfigs.add(new Object[]{"123.0123", Double.class, 123.0123}); - testConfigs.add(new Object[]{null, Double.class, null}); - testConfigs.add(new Object[]{null, Long.class, null}); - - return testConfigs; - } - - private String number; - private Class clazz; - private Number expected; - - public NumberUtilsTest(String number, Class clazz, Number expected){ - this.number=number; - this.expected = expected; - this.clazz=clazz; - } - - @Test - public void checkForClass(){ - if(clazz == Long.class){ - assertEquals(expected, NumberUtils.getLong(number)); - } - else if(clazz == Double.class) { - assertEquals(expected, NumberUtils.getDouble(number)); - - } - } - -} diff --git a/iguana.commons/src/test/java/org/aksw/iguana/commons/script/ScriptExecutorTest.java b/iguana.commons/src/test/java/org/aksw/iguana/commons/script/ScriptExecutorTest.java deleted file mode 100644 index 9c6959367..000000000 --- a/iguana.commons/src/test/java/org/aksw/iguana/commons/script/ScriptExecutorTest.java +++ /dev/null @@ -1,79 +0,0 @@ -package org.aksw.iguana.commons.script; - -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.File; -import java.io.IOException; -import java.lang.reflect.InvocationTargetException; -import java.lang.reflect.Method; -import java.util.ArrayList; -import java.util.Collection; -import java.util.List; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - -@RunWith(Parameterized.class) -public class ScriptExecutorTest { - - private static Logger LOGGER = LoggerFactory.getLogger(ScriptExecutorTest.class); - - private String cmd; - private String[] args; - private int expectedExitCode; - private Method callbackMethod; - private Object[] callbackArgs=new Object[]{}; - - @Parameterized.Parameters - public static Collection data() { - List testConfigs = new ArrayList(); - //simple method - testConfigs.add(new Object[]{"/bin/touch", new String[]{"ShouldNotExistWhatSoEver"}, 0, "removeFile", new Object[]{"ShouldNotExistWhatSoEver"}}); - //testing if additional arguments are checked - testConfigs.add(new Object[]{"/bin/echo test", new String[]{"123", "456"}, 0, "emptyCallback", new Object[]{}}); - //should fail as file not exist - testConfigs.add(new Object[]{"scriptThatShouldNotExist", new String[]{}, -1, "emptyCallback", new Object[]{}}); - //should fail with 1 - - - return testConfigs; - } - - - - public ScriptExecutorTest(String cmd, String[] args, int expectedExitCode, String callbackMethodName, Object[] callbackArgs) throws NoSuchMethodException { - this.cmd=cmd; - this.args=args; - this.expectedExitCode=expectedExitCode; - this.callbackArgs = callbackArgs; - Class[] classes = new Class[callbackArgs.length]; - for(int i=0;i - 4.0.0 - - org.aksw - iguana-parent - ${revision} - - iguana.corecontroller - - Iguanas Core Controller - The Controller of Iguanas Core Module. Handling the messaging and is coordinating the ResultProcessor as well as the dataGenerator. Will be communicating with the Web Controller Module. - - - AGPLv3 or later - https://www.gnu.org/licenses/agpl-3.0.html - - - - - Lixi Conrads - lixiconrads@gmail.com - - Former Developer - - Dice Research Group - https://dice-research.org - - - - Dice Research Group - https://dice-research.org - - - GitHub Issue Management - https://github.com/dice-group/iguana/issues - - https://dice-research.org/IGUANA - - - 11 - UTF-8 - 2.13.3 - 3.16.0 - 11 - 11 - - - - - org.apache.httpcomponents - httpclient - 4.5.13 - - - com.fasterxml.jackson.dataformat - jackson-dataformat-yaml - 2.11.2 - - - com.networknt - json-schema-validator - 1.0.43 - - - org.apache.jena - jena-arq - ${jena.version} - - - com.googlecode.json-simple - json-simple - 1.1.1 - - - - junit - junit - 4.13.1 - test - - - org.simpleframework - simple - 5.1.6 - - - org.aksw - iguana.resultprocessor - ${revision} - - - org.aksw - iguana.commons - - - - - - org.aksw - iguana.commons - ${revision} - - - - - - - - org.jacoco - jacoco-maven-plugin - 0.8.6 - - - prepare-agent - - prepare-agent - - - - report - prepare-package - - report - - - - post-unit-test - test - - report - - - - - target/jacoco.exec - - target/jacoco-ut - - - - - - org.codehaus.mojo - exec-maven-plugin - 1.5.0 - - - maven-dependency-plugin - - - install - - copy-dependencies - - - ${project.build.directory}/lib - - - - - - - - org.apache.maven.plugins - maven-compiler-plugin - 3.8.1 - - 11 - 11 - UTF-8 - - -parameters - - - - - - org.apache.maven.plugins - maven-shade-plugin - 2.4.3 - - iguana-${revision} - ${project.parent.basedir}/target - - - - - shade - - - true - shaded - - - org.aksw.iguana.cc.controller.MainController - - - - - - - - - - - github - GitHub dice-group Apache Maven Packages - https://maven.pkg.github.com/dice-group/IGUANA - - - diff --git a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/config/CONSTANTS.java b/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/config/CONSTANTS.java deleted file mode 100644 index 4b3c7ebca..000000000 --- a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/config/CONSTANTS.java +++ /dev/null @@ -1,34 +0,0 @@ -package org.aksw.iguana.cc.config; - -/** - * Constants used only by the Core controller - * - * @author f.conrads - * - */ -public class CONSTANTS { - - /** - * The key to set the workerID in the Extra Meta properties - * and the properties name in the final results to get the workerID - */ - public static final String WORKER_ID_KEY = "workerID"; - - /** - * The key to set the workerType in the Extra Meta properties - * and the properties name in the final results to get the workerType - */ - public static final String WORKER_TYPE_KEY = "workerType"; - - /** - * The key to get the timeLimit parameter. - * be aware that timeLimit can be null. - */ - public static final String TIME_LIMIT = "timeLimit"; - - - public static final String NO_OF_QUERY_MIXES = "numberOfQueryMixes"; - - - public static final String WORKER_TIMEOUT_MS = "timeOutMS"; -} diff --git a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/config/ConfigManager.java b/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/config/ConfigManager.java deleted file mode 100644 index b0946ba0d..000000000 --- a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/config/ConfigManager.java +++ /dev/null @@ -1,54 +0,0 @@ -/** - * - */ -package org.aksw.iguana.cc.config; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.File; -import java.io.IOException; - - -/** - * Manages an incoming Configuration and starts the corresponding {@link org.aksw.iguana.cc.config.IguanaConfig} - * - * @author f.conrads - * - */ -public class ConfigManager { - - private Logger LOGGER = LoggerFactory.getLogger(getClass()); - - - /** - * Will receive a JSON or YAML configuration and executes the configuration as an Iguana Suite - * @param configuration - * @param validate checks if error should be thrown if it validates the configuration given the iguana-schema.json schema - */ - public void receiveData(File configuration, Boolean validate) throws IOException { - - IguanaConfig newConfig = IguanaConfigFactory.parse(configuration, validate); - if(newConfig==null){ - return; - } - startConfig(newConfig); - } - - - - /** - * Starts the Config - */ - public void startConfig(IguanaConfig config) { - try { - config.start(); - } catch (IOException e) { - LOGGER.error("Could not start config due to an IO Exception", e); - } - - } - - - -} diff --git a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/config/IguanaConfig.java b/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/config/IguanaConfig.java deleted file mode 100644 index cdb672278..000000000 --- a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/config/IguanaConfig.java +++ /dev/null @@ -1,172 +0,0 @@ -package org.aksw.iguana.cc.config; - -import com.fasterxml.jackson.annotation.JsonProperty; -import org.aksw.iguana.cc.config.elements.*; -import org.aksw.iguana.cc.controller.TaskController; -import org.aksw.iguana.commons.script.ScriptExecutor; -import org.aksw.iguana.rp.controller.RPController; -import org.aksw.iguana.rp.metrics.Metric; -import org.aksw.iguana.rp.metrics.impl.*; -import org.aksw.iguana.rp.storage.Storage; -import org.aksw.iguana.rp.storage.impl.NTFileStorage; -import org.apache.commons.exec.ExecuteException; -import org.apache.commons.lang3.SerializationUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.IOException; -import java.time.Instant; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -/** - * Gets either a JSON or YAML configuration file using a json schema and will generate - * a SuiteID and ExperimentIDs as well as TaskIDs for it.
- * Afterwards it will start the taskProcessor with all specified tasks - *

- * The following order holds - *
    - *
  1. For each Dataset
  2. - *
  3. For each Connection
  4. - *
  5. For each Task
  6. - *
- * - * Further on executes the pre and post script hooks, before and after a class. - * Following values will be exchanged in the script string {{Connection}} {{Dataset.name}} {{Dataset.file}} {{taskID}} - * - * - * @author f.conrads - * - */ -public class IguanaConfig { - - private static final Logger LOGGER = LoggerFactory - .getLogger(IguanaConfig.class); - - private String suiteID; - @JsonProperty(required = true) - private List datasets; - @JsonProperty(required = true) - private List connections; - @JsonProperty(required = true) - private List tasks; - @JsonProperty(required = false) - private String preScriptHook; - @JsonProperty(required = false) - private String postScriptHook; - @JsonProperty(required = false) - private List metrics; - @JsonProperty(required = false) - private List storages; - - - /** - * starts the config - * @throws IOException - * @throws ExecuteException - */ - public void start() throws ExecuteException, IOException { - RPController rpController = initResultProcessor(); - TaskController controller = new TaskController(); - //get SuiteID - String suiteID = generateSuiteID(); - //generate ExpID - Integer expID = 0; - - for(Dataset dataset: datasets){ - expID++; - Integer taskID = 0; - for(Connection con : connections){ - for(Task task : tasks) { - taskID++; - String[] args = new String[] {}; - if(preScriptHook!=null){ - LOGGER.info("Executing preScriptHook"); - String execScript = preScriptHook.replace("{{dataset.name}}", dataset.getName()) - .replace("{{connection}}", con.getName()) - .replace("{{connection.version}}", con.getVersion("{{connection.version}}")) - .replace("{{taskID}}", taskID+""); - LOGGER.info("Finished preScriptHook"); - if(dataset.getFile()!=null){ - execScript = execScript.replace("{{dataset.file}}", dataset.getFile()); - } - - ScriptExecutor.execSafe(execScript, args); - } - LOGGER.info("Executing Task [{}/{}: {}, {}, {}]", taskID, task.getName(), dataset.getName(), con.getName(), task.getClassName()); - controller.startTask(new String[]{suiteID, suiteID+"/"+expID.toString(), suiteID+"/"+expID.toString()+"/"+taskID.toString()}, dataset.getName(), SerializationUtils.clone(con), SerializationUtils.clone(task)); - if(postScriptHook!=null){ - String execScript = postScriptHook.replace("{{dataset.name}}", dataset.getName()) - .replace("{{connection}}", con.getName()) - .replace("{{connection.version}}", con.getVersion("{{connection.version}}")) - .replace("{{taskID}}", taskID+""); - if(dataset.getFile()!=null){ - execScript = execScript.replace("{{dataset.file}}", dataset.getFile()); - } - LOGGER.info("Executing postScriptHook {}", execScript); - ScriptExecutor.execSafe(execScript, args); - LOGGER.info("Finished postScriptHook"); - } - } - } - } - rpController.close(); - - LOGGER.info("Finished benchmark"); - } - - private RPController initResultProcessor() { - //If storage or metric is empty use default - if(this.storages== null || this.storages.isEmpty()){ - storages = new ArrayList<>(); - StorageConfig config = new StorageConfig(); - config.setClassName(NTFileStorage.class.getCanonicalName()); - storages.add(config); - } - if(this.metrics == null || this.metrics.isEmpty()){ - LOGGER.info("No metrics were set. Using default metrics."); - metrics = new ArrayList<>(); - MetricConfig config = new MetricConfig(); - config.setClassName(QMPHMetric.class.getCanonicalName()); - metrics.add(config); - config = new MetricConfig(); - config.setClassName(QPSMetric.class.getCanonicalName()); - Map configMap = new HashMap(); - configMap.put("penalty", 180000); - config.setConfiguration(configMap); - metrics.add(config); - config = new MetricConfig(); - config.setClassName(NoQPHMetric.class.getCanonicalName()); - metrics.add(config); - config = new MetricConfig(); - config.setClassName(AvgQPSMetric.class.getCanonicalName()); - metrics.add(config); - config = new MetricConfig(); - config.setClassName(NoQMetric.class.getCanonicalName()); - metrics.add(config); - - } - //Create Storages - List storages = new ArrayList(); - for(StorageConfig config : this.storages){ - storages.add(config.createStorage()); - } - //Create Metrics - List metrics = new ArrayList(); - for(MetricConfig config : this.metrics){ - metrics.add(config.createMetric()); - } - RPController controller = new RPController(); - controller.init(storages, metrics); - return controller; - } - - - private String generateSuiteID() { - int currentTimeMillisHashCode = Math.abs(Long.valueOf(Instant.now().getEpochSecond()).hashCode()); - return String.valueOf(currentTimeMillisHashCode); - } - -} diff --git a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/config/IguanaConfigFactory.java b/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/config/IguanaConfigFactory.java deleted file mode 100644 index 68ec5de87..000000000 --- a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/config/IguanaConfigFactory.java +++ /dev/null @@ -1,69 +0,0 @@ -package org.aksw.iguana.cc.config; - -import com.fasterxml.jackson.core.JsonFactory; -import com.fasterxml.jackson.databind.JsonNode; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; -import com.networknt.schema.JsonSchema; -import com.networknt.schema.JsonSchemaFactory; -import com.networknt.schema.SpecVersion; -import com.networknt.schema.ValidationMessage; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.File; -import java.io.IOException; -import java.io.InputStream; -import java.util.Set; - -/** - * Creates an IguanaConfig from a given JSON or YAML file, and validates the config using a JSON schema file - */ -public class IguanaConfigFactory { - - private static Logger LOGGER = LoggerFactory.getLogger(IguanaConfigFactory.class); - - private static String schemaFile = "iguana-schema.json"; - - public static IguanaConfig parse(File config) throws IOException { - return parse(config, true); - } - - public static IguanaConfig parse(File config, Boolean validate) throws IOException { - if(config.getName().endsWith(".yml") || config.getName().endsWith(".yaml")){ - return parse(config, new YAMLFactory(), validate); - } - else if(config.getName().endsWith(".json")){ - return parse(config, new JsonFactory(), validate); - } - return null; - } - private static IguanaConfig parse(File config, JsonFactory factory) throws IOException { - return parse(config, factory, true); - } - - private static IguanaConfig parse(File config, JsonFactory factory, Boolean validate) throws IOException { - final ObjectMapper mapper = new ObjectMapper(factory); - if(validate && !validateConfig(config, schemaFile, mapper)){ - return null; - } - return mapper.readValue(config, IguanaConfig.class); - } - - private static boolean validateConfig(File configuration, String schemaFile, ObjectMapper mapper) throws IOException { - JsonSchemaFactory factory = JsonSchemaFactory.getInstance(SpecVersion.VersionFlag.V7); - InputStream is = Thread.currentThread().getContextClassLoader() - .getResourceAsStream(schemaFile); - JsonSchema schema = factory.getSchema(is); - JsonNode node = mapper.readTree(configuration); - Set errors = schema.validate(node); - if(errors.size()>0){ - LOGGER.error("Found {} errors in configuration file.", errors.size()); - } - for(ValidationMessage message : errors){ - LOGGER.error(message.getMessage()); - } - return errors.size()==0; - } - -} diff --git a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/config/elements/Connection.java b/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/config/elements/Connection.java deleted file mode 100644 index ed91b120f..000000000 --- a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/config/elements/Connection.java +++ /dev/null @@ -1,78 +0,0 @@ -package org.aksw.iguana.cc.config.elements; - -import com.fasterxml.jackson.annotation.JsonProperty; - -import java.io.Serializable; - -/** - * A connection configuration class - */ -public class Connection implements Serializable { - - @JsonProperty(required = true) - private String name; - @JsonProperty(required = false) - private String user; - @JsonProperty(required = false) - private String password; - @JsonProperty(required = true) - private String endpoint; - @JsonProperty(required = false) - private String updateEndpoint; - @JsonProperty(required = false) - private String version; - - public String getVersion() { - return version; - } - - public String getVersion(String defaultValue) { - if(version!=null) - return version; - return defaultValue; - } - - public void setVersion(String version) { - this.version = version; - } - - public String getName() { - return name; - } - - public void setName(String name) { - this.name = name; - } - - public String getUser() { - return user; - } - - public void setUser(String user) { - this.user = user; - } - - public String getPassword() { - return password; - } - - public void setPassword(String password) { - this.password = password; - } - - public String getEndpoint() { - return endpoint; - } - - public void setEndpoint(String endpoint) { - this.endpoint = endpoint; - } - - public String getUpdateEndpoint() { - return updateEndpoint; - } - - public void setUpdateEndpoint(String updateEndpoint) { - this.updateEndpoint = updateEndpoint; - } -} diff --git a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/config/elements/Dataset.java b/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/config/elements/Dataset.java deleted file mode 100644 index 6bbb0c066..000000000 --- a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/config/elements/Dataset.java +++ /dev/null @@ -1,32 +0,0 @@ -package org.aksw.iguana.cc.config.elements; - -import com.fasterxml.jackson.annotation.JsonProperty; - -/** - * The Dataset config class. - * - * Will set the name and if it was set in the config file the fileName - */ -public class Dataset { - @JsonProperty(required = true) - private String name; - - @JsonProperty - private String file; - - public String getName() { - return name; - } - - public void setName(String name) { - this.name = name; - } - - public String getFile() { - return file; - } - - public void setFile(String file) { - this.file = file; - } -} diff --git a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/config/elements/MetricConfig.java b/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/config/elements/MetricConfig.java deleted file mode 100644 index 3e2d5a37d..000000000 --- a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/config/elements/MetricConfig.java +++ /dev/null @@ -1,42 +0,0 @@ -package org.aksw.iguana.cc.config.elements; - -import com.fasterxml.jackson.annotation.JsonProperty; -import org.aksw.iguana.commons.factory.TypedFactory; -import org.aksw.iguana.rp.metrics.Metric; - -import java.util.HashMap; -import java.util.Map; - -/** - * Metric Config class - */ -public class MetricConfig { - - @JsonProperty(required = true) - private String className; - - @JsonProperty(required = false) - private Map configuration = new HashMap(); - - - public String getClassName() { - return className; - } - - public void setClassName(String className) { - this.className = className; - } - - public Map getConfiguration() { - return configuration; - } - - public void setConfiguration(Map configuration) { - this.configuration = configuration; - } - - public Metric createMetric() { - TypedFactory factory = new TypedFactory(); - return factory.create(className, configuration); - } -} diff --git a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/config/elements/StorageConfig.java b/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/config/elements/StorageConfig.java deleted file mode 100644 index 9144800ec..000000000 --- a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/config/elements/StorageConfig.java +++ /dev/null @@ -1,42 +0,0 @@ -package org.aksw.iguana.cc.config.elements; - -import com.fasterxml.jackson.annotation.JsonProperty; -import org.aksw.iguana.commons.factory.TypedFactory; -import org.aksw.iguana.rp.storage.Storage; - -import java.util.HashMap; -import java.util.Map; - -/** - * Storage Configuration class - */ -public class StorageConfig { - - - @JsonProperty(required = true) - private String className; - - @JsonProperty - private Map configuration = new HashMap(); - - public String getClassName() { - return className; - } - - public void setClassName(String className) { - this.className = className; - } - - public Map getConfiguration() { - return configuration; - } - - public void setConfiguration(Map configuration) { - this.configuration = configuration; - } - - public Storage createStorage() { - TypedFactory factory = new TypedFactory(); - return factory.create(className, configuration); - } -} diff --git a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/config/elements/Task.java b/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/config/elements/Task.java deleted file mode 100644 index 6ba9b151e..000000000 --- a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/config/elements/Task.java +++ /dev/null @@ -1,46 +0,0 @@ -package org.aksw.iguana.cc.config.elements; - -import com.fasterxml.jackson.annotation.JsonProperty; - -import java.io.Serializable; -import java.util.HashMap; -import java.util.Map; - -/** - * The task configuration class, sets the class name and it's configuration - */ -public class Task implements Serializable { - - @JsonProperty(required = true) - private Map configuration = new HashMap(); - - @JsonProperty(required = true) - private String className; - - @JsonProperty(required = false) - private String name=null; - - public String getName() { - return name; - } - - public void setName(String name) { - this.name = name; - } - - public Map getConfiguration() { - return configuration; - } - - public void setConfiguration(Map configuration) { - this.configuration = configuration; - } - - public String getClassName() { - return className; - } - - public void setClassName(String className) { - this.className = className; - } -} diff --git a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/controller/MainController.java b/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/controller/MainController.java deleted file mode 100644 index 9dd1b63ca..000000000 --- a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/controller/MainController.java +++ /dev/null @@ -1,68 +0,0 @@ -package org.aksw.iguana.cc.controller; - -import org.aksw.iguana.cc.config.ConfigManager; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.File; -import java.io.IOException; - -/** - * The main controller for the core. - * Will execute the Config Manager and the consuming for configurations. - * - * @author f.conrads - * - */ -public class MainController { - - - private static final Logger LOGGER = LoggerFactory - .getLogger(MainController.class); - - /** - * main method for standalone controlling. - * If the TaskController should run standalone instead of in the core itself - * - * @param argc - * @throws IOException - */ - public static void main(String[] argc) throws IOException{ - if(argc.length != 1 && argc.length !=2){ - System.out.println("java -jar iguana.jar [--ignore-schema] suite.yml \n\tsuite.yml - The suite containing the benchmark configuration\n\t--ignore-schema - Will not validate configuration using the internal json schema\n"); - return; - } - - MainController controller = new MainController(); - String config =argc[0]; - Boolean validate = true; - if(argc.length==2){ - if(argc[0].equals("--ignore-schema")){ - validate=false; - } - config = argc[1]; - } - controller.start(config, validate); - LOGGER.info("Stopping Iguana"); - //System.exit(0); - } - - /** - * Starts a configuration using the config file an states if Iguana should validate it using a json-schema - * - * @param configFile the Iguana config file - * @param validate should the config file be validated using a json-schema - * @throws IOException - */ - public void start(String configFile, Boolean validate) throws IOException{ - ConfigManager cmanager = new ConfigManager(); - File f = new File(configFile); - if (f.length()!=0) { - cmanager.receiveData(f, validate); - } else { - LOGGER.error("Empty configuration."); - - } - - } -} diff --git a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/controller/TaskController.java b/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/controller/TaskController.java deleted file mode 100644 index 12838b94c..000000000 --- a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/controller/TaskController.java +++ /dev/null @@ -1,47 +0,0 @@ -/** - * - */ -package org.aksw.iguana.cc.controller; - -import org.aksw.iguana.cc.config.elements.Connection; -import org.aksw.iguana.cc.config.elements.Task; -import org.aksw.iguana.cc.tasks.TaskFactory; -import org.aksw.iguana.cc.tasks.TaskManager; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.IOException; -import java.util.HashMap; -import java.util.Map; -import java.util.concurrent.TimeoutException; - - -/** - * Task Controlling, will start the actual benchmark tasks and its {@link org.aksw.iguana.cc.tasks.TaskManager} - * - * - * - * @author f.conrads - * - */ -public class TaskController { - - private static Map shortHandMap = new HashMap(); - - private static final Logger LOGGER = LoggerFactory - .getLogger(TaskController.class); - - public void startTask(String[] ids, String dataset, Connection con, Task task) { - TaskManager tmanager = new TaskManager(); - String className=task.getClassName(); - TaskFactory factory = new TaskFactory(); - tmanager.setTask(factory.create(className, task.getConfiguration())); - try { - tmanager.startTask(ids, dataset, con, task.getName()); - } catch (IOException | TimeoutException e) { - LOGGER.error("Could not start Task "+className, e); - } - } - - -} diff --git a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/lang/AbstractLanguageProcessor.java b/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/lang/AbstractLanguageProcessor.java deleted file mode 100644 index b31da62b3..000000000 --- a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/lang/AbstractLanguageProcessor.java +++ /dev/null @@ -1,63 +0,0 @@ -package org.aksw.iguana.cc.lang; - -import org.aksw.iguana.commons.constants.COMMON; -import org.aksw.iguana.commons.streams.Streams; -import org.aksw.iguana.rp.vocab.Vocab; -import org.apache.http.Header; -import org.apache.http.client.methods.CloseableHttpResponse; -import org.apache.jena.rdf.model.Model; -import org.apache.jena.rdf.model.ModelFactory; -import org.apache.jena.rdf.model.Resource; -import org.apache.jena.rdf.model.ResourceFactory; -import org.apache.jena.vocabulary.RDF; -import org.apache.jena.vocabulary.RDFS; -import org.json.simple.parser.ParseException; -import org.xml.sax.SAXException; - -import javax.xml.parsers.ParserConfigurationException; -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.io.InputStream; -import java.time.Instant; -import java.util.List; -import java.util.concurrent.TimeoutException; - -public abstract class AbstractLanguageProcessor implements LanguageProcessor { - - @Override - public String getQueryPrefix() { - return "query"; - } - - @Override - public Model generateTripleStats(List queries, String resourcePrefix, String taskID) { - Model model = ModelFactory.createDefaultModel(); - for(QueryWrapper wrappedQuery : queries) { - Resource subject = ResourceFactory.createResource(COMMON.RES_BASE_URI + resourcePrefix + "/" + wrappedQuery.getId()); - model.add(subject, RDF.type, Vocab.queryClass); - model.add(subject, Vocab.rdfsID, wrappedQuery.getId()); - model.add(subject, RDFS.label, wrappedQuery.getQuery().toString()); - } - return model; - } - - @Override - public Long getResultSize(CloseableHttpResponse response) throws ParserConfigurationException, SAXException, ParseException, IOException { - return response.getEntity().getContentLength(); - } - - @Override - public Long getResultSize(Header contentTypeHeader, ByteArrayOutputStream content, long contentLength) throws ParserConfigurationException, SAXException, ParseException, IOException { - return Long.valueOf(content.size()); - } - - @Override - public long readResponse(InputStream inputStream, ByteArrayOutputStream responseBody) throws IOException, TimeoutException { - return Streams.inputStream2ByteArrayOutputStream(inputStream, responseBody); - } - - //@Override - public long readResponse(InputStream inputStream, Instant startTime, Double timeOut, ByteArrayOutputStream responseBody) throws IOException, TimeoutException { - return Streams.inputStream2ByteArrayOutputStream(inputStream, startTime, timeOut, responseBody); - } -} diff --git a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/lang/LanguageProcessor.java b/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/lang/LanguageProcessor.java deleted file mode 100644 index 211d50aa8..000000000 --- a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/lang/LanguageProcessor.java +++ /dev/null @@ -1,55 +0,0 @@ -package org.aksw.iguana.cc.lang; - -import org.apache.http.Header; -import org.apache.http.client.methods.CloseableHttpResponse; -import org.apache.jena.rdf.model.Model; -import org.json.simple.parser.ParseException; -import org.xml.sax.SAXException; - -import javax.xml.parsers.ParserConfigurationException; -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.io.InputStream; -import java.time.Instant; -import java.util.List; -import java.util.concurrent.TimeoutException; - -/** - * Language Processor tells how to handle Http responses as well as how to analyze queries and generate stats. - */ -public interface LanguageProcessor { - - /** - * Returns the prefix used for the queries (e.g. sparql, query or document) - * @return - */ - String getQueryPrefix(); - - /** - * Method to generate Triple Statistics for provided queries - * - * - * @param taskID - * @return Model with the triples to add to the results - */ - Model generateTripleStats(List queries, String resourcePrefix, String taskID); - - - /** - * Gets the result size of a given HTTP response - * - * @param response - * @return - * @throws ParserConfigurationException - * @throws SAXException - * @throws ParseException - * @throws IOException - */ - Long getResultSize(CloseableHttpResponse response) throws ParserConfigurationException, SAXException, ParseException, IOException; - - Long getResultSize(Header contentTypeHeader, ByteArrayOutputStream content, long contentLength) throws ParserConfigurationException, SAXException, ParseException, IOException; - - - long readResponse(InputStream inputStream, ByteArrayOutputStream responseBody) throws IOException, TimeoutException; - -} diff --git a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/lang/QueryWrapper.java b/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/lang/QueryWrapper.java deleted file mode 100644 index 080a87919..000000000 --- a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/lang/QueryWrapper.java +++ /dev/null @@ -1,31 +0,0 @@ -package org.aksw.iguana.cc.lang; - -/** - * Util class to wrap a Query of what ever class it may be and it's id - */ -public class QueryWrapper { - - private Object query; - private String id; - - public QueryWrapper(Object query, String id){ - this.query=query; - this.id=id; - } - - public Object getQuery() { - return query; - } - - public void setQuery(Object query) { - this.query = query; - } - - public String getId() { - return id; - } - - public void setId(String id) { - this.id = id; - } -} diff --git a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/lang/impl/RDFLanguageProcessor.java b/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/lang/impl/RDFLanguageProcessor.java deleted file mode 100644 index a69e5e671..000000000 --- a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/lang/impl/RDFLanguageProcessor.java +++ /dev/null @@ -1,110 +0,0 @@ -package org.aksw.iguana.cc.lang.impl; - -import org.aksw.iguana.cc.lang.AbstractLanguageProcessor; -import org.aksw.iguana.cc.lang.LanguageProcessor; -import org.aksw.iguana.cc.lang.QueryWrapper; -import org.aksw.iguana.commons.annotation.Shorthand; -import org.aksw.iguana.commons.constants.COMMON; -import org.aksw.iguana.rp.vocab.Vocab; -import org.apache.http.Header; -import org.apache.http.client.methods.CloseableHttpResponse; -import org.apache.jena.rdf.model.Model; -import org.apache.jena.rdf.model.ModelFactory; -import org.apache.jena.rdf.model.Resource; -import org.apache.jena.rdf.model.ResourceFactory; -import org.apache.jena.riot.Lang; -import org.apache.jena.vocabulary.RDF; -import org.apache.jena.vocabulary.RDFS; -import org.json.simple.parser.ParseException; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.xml.sax.SAXException; - -import javax.xml.parsers.ParserConfigurationException; -import java.io.ByteArrayInputStream; -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.io.InputStream; -import java.lang.reflect.Field; -import java.util.List; - -/** - * Language for everything which returns RDF in any rdf format. - * - * Counts triples returned as ResultSize - */ -@Shorthand("lang.RDF") -public class RDFLanguageProcessor extends AbstractLanguageProcessor implements LanguageProcessor { - - private static Logger LOGGER = LoggerFactory.getLogger(RDFLanguageProcessor.class); - protected String queryPrefix="document"; - - @Override - public String getQueryPrefix() { - return this.queryPrefix; - } - - @Override - public Model generateTripleStats(List queries, String resourcePrefix, String taskID) { - Model model = ModelFactory.createDefaultModel(); - for(QueryWrapper wrappedQuery : queries) { - Resource subject = ResourceFactory.createResource(COMMON.RES_BASE_URI + resourcePrefix + "/" + wrappedQuery.getId()); - model.add(subject, RDF.type, Vocab.queryClass); - model.add(subject, Vocab.rdfsID, wrappedQuery.getId().replace(queryPrefix, "").replace("sparql", "")); - model.add(subject, RDFS.label, wrappedQuery.getQuery().toString()); - } - return model; - } - - @Override - public Long getResultSize(CloseableHttpResponse response) throws ParserConfigurationException, SAXException, ParseException, IOException { - Model m; - try { - Header contentTypeHeader = response.getEntity().getContentType(); - InputStream inputStream = response.getEntity().getContent(); - m = getModel(contentTypeHeader, inputStream); - } catch (IllegalAccessException e) { - LOGGER.error("Could not read response as model", e); - return -1L; - } - return countSize(m); - } - - @Override - public Long getResultSize(Header contentTypeHeader, ByteArrayOutputStream content, long contentLength) throws IOException { - Model m; - try { - //TODO BBAIS - InputStream inputStream = new ByteArrayInputStream(content.toByteArray()); - m = getModel(contentTypeHeader, inputStream); - } catch (IllegalAccessException e) { - LOGGER.error("Could not read response as model", e); - return -1L; - } - return countSize(m); - } - - protected Long countSize(Model m) { - return m.size(); - } - - private Model getModel(Header contentTypeHeader, InputStream contentInputStream) throws IOException, IllegalAccessException { - Model m = ModelFactory.createDefaultModel(); - Lang lang = null; - // get actual content type - String contentType = contentTypeHeader.getValue(); - // use reflect to iterate over all static Lang fields of the Lang.class - for (Field langField : Lang.class.getFields()) { - //create the Language of the field - Lang susLang = (Lang) langField.get(Lang.class); - //if they are the same we have our language - if (contentType.equals(susLang.getContentType().getContentTypeStr())) { - lang = susLang; - break; - } - } - if (lang != null) - m.read(contentInputStream, null, lang.getName()); - return m; - } -} diff --git a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/lang/impl/SPARQLLanguageProcessor.java b/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/lang/impl/SPARQLLanguageProcessor.java deleted file mode 100644 index d17104857..000000000 --- a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/lang/impl/SPARQLLanguageProcessor.java +++ /dev/null @@ -1,189 +0,0 @@ -package org.aksw.iguana.cc.lang.impl; - -import org.aksw.iguana.cc.lang.AbstractLanguageProcessor; -import org.aksw.iguana.cc.lang.LanguageProcessor; -import org.aksw.iguana.cc.lang.QueryWrapper; -import org.aksw.iguana.cc.utils.SPARQLQueryStatistics; -import org.aksw.iguana.commons.annotation.Shorthand; -import org.aksw.iguana.commons.constants.COMMON; -import org.aksw.iguana.rp.vocab.Vocab; -import org.apache.commons.lang.StringUtils; -import org.apache.http.Header; -import org.apache.http.HeaderElement; -import org.apache.http.HttpEntity; -import org.apache.http.NameValuePair; -import org.apache.http.client.methods.CloseableHttpResponse; -import org.apache.jena.ext.com.google.common.hash.HashCode; -import org.apache.jena.ext.com.google.common.hash.Hashing; -import org.apache.jena.ext.com.google.common.io.BaseEncoding; -import org.apache.jena.query.Query; -import org.apache.jena.query.QueryFactory; -import org.apache.jena.rdf.model.Model; -import org.apache.jena.rdf.model.ModelFactory; -import org.apache.jena.rdf.model.Resource; -import org.apache.jena.rdf.model.ResourceFactory; -import org.apache.jena.vocabulary.OWL; -import org.apache.jena.vocabulary.RDF; -import org.apache.jena.vocabulary.RDFS; -import org.json.simple.parser.JSONParser; -import org.json.simple.parser.ParseException; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.w3c.dom.Document; -import org.w3c.dom.NodeList; -import org.xml.sax.InputSource; -import org.xml.sax.SAXException; - -import javax.xml.parsers.DocumentBuilder; -import javax.xml.parsers.DocumentBuilderFactory; -import javax.xml.parsers.ParserConfigurationException; -import java.io.*; -import java.nio.charset.Charset; -import java.nio.charset.StandardCharsets; -import java.util.List; - -import static org.aksw.iguana.commons.streams.Streams.inputStream2String; - -/** - * SPARQL Language Processor. - * Tries to analyze Queries as SPARQL queries and checks http response for either application/sparql-results+json - * or application/sparql-results+xml to count the result size correctly. Otherwise assumes it record per line and counts the returning lines. - */ -@Shorthand("lang.SPARQL") -public class SPARQLLanguageProcessor extends AbstractLanguageProcessor implements LanguageProcessor { - - private static Logger LOGGER = LoggerFactory.getLogger(SPARQLLanguageProcessor.class); - - public static final String XML_RESULT_ELEMENT_NAME = "result"; - public static final String XML_RESULT_ROOT_ELEMENT_NAME = "results"; - public static final String QUERY_RESULT_TYPE_JSON = "application/sparql-results+json"; - public static final String QUERY_RESULT_TYPE_XML = "application/sparql-results+xml"; - private static final String LSQ_RES = "http://lsq.aksw.org/res/q-"; - - @Override - public String getQueryPrefix() { - return "sparql"; - } - - @Override - public Model generateTripleStats(List queries, String resourcePrefix, String taskID) { - Model model = ModelFactory.createDefaultModel(); - for(QueryWrapper wrappedQuery : queries) { - Resource subject = ResourceFactory.createResource(COMMON.RES_BASE_URI + resourcePrefix + "/" + wrappedQuery.getId()); - model.add(subject, RDF.type, Vocab.queryClass); - model.add(subject, Vocab.rdfsID, wrappedQuery.getId().replace("sparql", "")); - model.add(subject, RDFS.label, wrappedQuery.getQuery().toString()); - try { - Query q = QueryFactory.create(wrappedQuery.getQuery().toString()); - SPARQLQueryStatistics qs2 = new SPARQLQueryStatistics(); - qs2.getStatistics(q); - - model.add(subject, Vocab.aggrProperty, model.createTypedLiteral(qs2.aggr==1)); - model.add(subject, Vocab.filterProperty, model.createTypedLiteral(qs2.filter==1)); - model.add(subject, Vocab.groupByProperty, model.createTypedLiteral(qs2.groupBy==1)); - model.add(subject, Vocab.havingProperty, model.createTypedLiteral(qs2.having==1)); - model.add(subject, Vocab.triplesProperty, model.createTypedLiteral(qs2.triples)); - model.add(subject, Vocab.offsetProperty, model.createTypedLiteral(qs2.offset==1)); - model.add(subject, Vocab.optionalProperty, model.createTypedLiteral(qs2.optional==1)); - model.add(subject, Vocab.orderByProperty, model.createTypedLiteral(qs2.orderBy==1)); - model.add(subject, Vocab.unionProperty, model.createTypedLiteral(qs2.union==1)); - model.add(subject, OWL.sameAs, getLSQHash(q)); - }catch(Exception e){ - LOGGER.warn("Query statistics could not be created. Not using SPARQL?"); - } - } - return model; - } - - private Resource getLSQHash(Query query){ - HashCode hashCode = Hashing.sha256().hashString(query.toString(), StandardCharsets.UTF_8); - String result = BaseEncoding.base64Url().omitPadding().encode(hashCode.asBytes()); - return ResourceFactory.createResource(LSQ_RES+result); - } - - - public static String getContentTypeVal(Header header) { - for (HeaderElement el : header.getElements()) { - NameValuePair cTypePair = el.getParameterByName("Content-Type"); - - if (cTypePair != null && !cTypePair.getValue().isEmpty()) { - return cTypePair.getValue(); - } - } - int index = header.toString().indexOf("Content-Type"); - if (index >= 0) { - String ret = header.toString().substring(index + "Content-Type".length() + 1); - if (ret.contains(";")) { - return ret.substring(0, ret.indexOf(";")).trim(); - } - return ret.trim(); - } - return "application/sparql-results+json"; - } - - public static long getJsonResultSize(ByteArrayOutputStream res) throws ParseException, UnsupportedEncodingException { - JSONParser parser = new JSONParser(); - SaxSparqlJsonResultCountingParser handler = new SaxSparqlJsonResultCountingParser(); - parser.parse(res.toString(StandardCharsets.UTF_8), handler, true); - return handler.getNoBindings(); - } - - public static long getXmlResultSize(ByteArrayOutputStream res) throws ParserConfigurationException, IOException, SAXException { - DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance(); - DocumentBuilder dBuilder = dbFactory.newDocumentBuilder(); - - ByteArrayInputStream bbais = new ByteArrayInputStream(res.toByteArray()); - Document doc = dBuilder.parse(bbais); - NodeList childNodes = doc.getDocumentElement().getElementsByTagName(XML_RESULT_ROOT_ELEMENT_NAME).item(0).getChildNodes(); - - long size = 0; - for (int i = 0; i < childNodes.getLength(); i++) { - if (XML_RESULT_ELEMENT_NAME.equalsIgnoreCase(childNodes.item(i).getNodeName())) { - size++; - } - } - return size; - - } - - @Override - public Long getResultSize(CloseableHttpResponse response) throws ParserConfigurationException, SAXException, ParseException, IOException { - HttpEntity httpResponse = response.getEntity(); - Header contentTypeHeader = response.getEntity().getContentType(); - - ByteArrayOutputStream entity; - try (InputStream inputStream = httpResponse.getContent()) { - - entity = inputStream2String(inputStream); - } catch (IOException e) { - LOGGER.error("Query result could not be read.", e); - throw e; - } - return getResultSize(contentTypeHeader, entity, entity.size()); - } - - @Override - public Long getResultSize(Header contentTypeHeader, ByteArrayOutputStream content, long contentLength) throws ParserConfigurationException, SAXException, ParseException, IOException { - try { - switch (getContentTypeVal(contentTypeHeader)) { - case QUERY_RESULT_TYPE_JSON: - return getJsonResultSize(content); - - case QUERY_RESULT_TYPE_XML: - return getXmlResultSize(content); - default: - //return content.countMatches('\n')+1; - long matches=0; - for(byte b: content.toByteArray()){ - if(b=='\n'){ - matches++; - } - } - return matches+1; - } - } catch (ParseException | ParserConfigurationException | IOException | SAXException e) { - LOGGER.error("Query results could not be parsed: ", e); - throw e; - } - } -} diff --git a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/lang/impl/SaxSparqlJsonResultCountingParser.java b/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/lang/impl/SaxSparqlJsonResultCountingParser.java deleted file mode 100644 index d4c1f3e29..000000000 --- a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/lang/impl/SaxSparqlJsonResultCountingParser.java +++ /dev/null @@ -1,114 +0,0 @@ -package org.aksw.iguana.cc.lang.impl; - -import org.json.simple.parser.ContentHandler; -import org.json.simple.parser.ParseException; - - -import static org.json.simple.parser.ParseException.ERROR_UNEXPECTED_EXCEPTION; - -/** - * SAX Parser for SPARQL JSON Results. - * For correct SPARQL JSON Results it returns the correct size. - * For malformed results it may or may not fail. For malformed JSON it fails if the underlying json.simple.parser fails. - */ -class SaxSparqlJsonResultCountingParser implements ContentHandler { - - private boolean headFound = false; - - private int objectDepth = 0; - private boolean inResults = false; - private boolean inBindings = false; - private boolean inBindingsArray = false; - - private long noBindings = 0; - - public long getNoBindings() { - return noBindings; - } - - @Override - public void startJSON() { - } - - @Override - public void endJSON() throws ParseException { - if (inResults || inBindings || inBindingsArray || !headFound || objectDepth != 0) - throw new ParseException(ERROR_UNEXPECTED_EXCEPTION, "SPARQL Json Response was malformed."); - } - - @Override - public boolean startObject() { - objectDepth += 1; - if (objectDepth == 3 && inBindingsArray) { - noBindings += 1; - } - return true; - } - - @Override - public boolean endObject() { - switch (objectDepth) { - case 1: - if (inResults) - inResults = false; - break; - case 2: - if (inBindings) { - inBindings = false; - } - break; - } - objectDepth -= 1; - return true; - } - - @Override - public boolean startArray() { - if (objectDepth == 2 && inResults && inBindings && !inBindingsArray) { - inBindingsArray = true; - } - return true; - } - - @Override - public boolean endArray() { - if (objectDepth == 2 && inResults && inBindings && inBindingsArray) { - inBindingsArray = false; - } - return true; - } - - @Override - public boolean startObjectEntry(String key) { - switch (objectDepth) { - case 1: - switch (key) { - case "head": - headFound = true; - break; - case "results": - if (headFound) - inResults = true; - break; - } - break; - case 2: - if ("bindings".compareTo(key) == 0) { - inBindings = true; - } - break; - } - return true; - } - - @Override - public boolean endObjectEntry() { - return true; - } - - public boolean primitive(Object value) { - return true; - } - - -} \ No newline at end of file diff --git a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/lang/impl/ThrowawayLanguageProcessor.java b/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/lang/impl/ThrowawayLanguageProcessor.java deleted file mode 100644 index 5f2267936..000000000 --- a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/lang/impl/ThrowawayLanguageProcessor.java +++ /dev/null @@ -1,35 +0,0 @@ -package org.aksw.iguana.cc.lang.impl; - -import org.aksw.iguana.cc.lang.AbstractLanguageProcessor; -import org.aksw.iguana.commons.annotation.Shorthand; -import org.aksw.iguana.commons.streams.Streams; -import org.apache.http.Header; -import org.json.simple.parser.ParseException; -import org.xml.sax.SAXException; - -import javax.xml.parsers.ParserConfigurationException; -import java.io.ByteArrayOutputStream; -import java.io.IOException; -import java.io.InputStream; -import java.time.Instant; -import java.util.concurrent.TimeoutException; - -@Shorthand("lang.SIMPLE") -public class ThrowawayLanguageProcessor extends AbstractLanguageProcessor { - - @Override - public long readResponse(InputStream inputStream, ByteArrayOutputStream responseBody) throws IOException, TimeoutException { - return Streams.inputStream2Length(inputStream, Instant.now(), 0); - } - - @Override - public long readResponse(InputStream inputStream, Instant startTime, Double timeOut, ByteArrayOutputStream responseBody) throws IOException, TimeoutException { - return Streams.inputStream2Length(inputStream, startTime, timeOut); - } - - @Override - public Long getResultSize(Header contentTypeHeader, ByteArrayOutputStream content, long contentLength) throws ParserConfigurationException, SAXException, ParseException, IOException { - return contentLength; - } - -} diff --git a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/model/QueryExecutionStats.java b/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/model/QueryExecutionStats.java deleted file mode 100644 index 3103e277a..000000000 --- a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/model/QueryExecutionStats.java +++ /dev/null @@ -1,62 +0,0 @@ -package org.aksw.iguana.cc.model; - -/** - * Wrapper for a query execution. - */ -public class QueryExecutionStats { - private String queryID; - private long responseCode; - private double executionTime; - private long resultSize; - - public QueryExecutionStats(String queryID, long responseCode, double executionTime) - { - this.queryID = queryID; - this.responseCode = responseCode; - this.executionTime = executionTime; - } - - - public QueryExecutionStats(String queryID, long responseCode, double executionTime, long resultSize) - { - this.queryID = queryID; - this.responseCode = responseCode; - this.executionTime = executionTime; - this.resultSize = resultSize; - } - - public QueryExecutionStats() { - } - - public String getQueryID() { - return queryID; - } - - public void setQueryID(String queryID) { - this.queryID = queryID; - } - - public long getResponseCode() { - return responseCode; - } - - public void setResponseCode(long responseCode) { - this.responseCode = responseCode; - } - - public double getExecutionTime() { - return executionTime; - } - - public void setExecutionTime(double executionTime) { - this.executionTime = executionTime; - } - - public long getResultSize() { - return resultSize; - } - - public void setResultSize(long resultSize) { - this.resultSize = resultSize; - } -} diff --git a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/model/QueryResultHashKey.java b/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/model/QueryResultHashKey.java deleted file mode 100644 index 21ad255c6..000000000 --- a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/model/QueryResultHashKey.java +++ /dev/null @@ -1,55 +0,0 @@ -package org.aksw.iguana.cc.model; - -import java.util.Objects; - -/** - * Creates a Result Hash key for a query, thus a result size only has to be checked once and it will be cached using this key - */ -public class QueryResultHashKey { - - private String queryId; - private long uniqueKey; - - public QueryResultHashKey(String queryId, long uniqueKey) { - this.queryId = queryId; - this.uniqueKey = uniqueKey; - } - - public String getQueryId() { - return queryId; - } - - public void setQueryId(String queryId) { - this.queryId = queryId; - } - - public long getUniqueKey() { - return uniqueKey; - } - - public void setUniqueKey(long uniqueKey) { - this.uniqueKey = uniqueKey; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - QueryResultHashKey that = (QueryResultHashKey) o; - return uniqueKey == that.uniqueKey && - queryId.equals(that.queryId); - } - - @Override - public int hashCode() { - return Objects.hash(queryId, uniqueKey); - } - - @Override - public String toString() { - return "QueryResultHashKey{" + - "queryId='" + queryId + '\'' + - ", uniqueKey=" + uniqueKey + - '}'; - } -} diff --git a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/query/AbstractWorkerQueryHandler.java b/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/query/AbstractWorkerQueryHandler.java deleted file mode 100644 index 4361c84b1..000000000 --- a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/query/AbstractWorkerQueryHandler.java +++ /dev/null @@ -1,80 +0,0 @@ -package org.aksw.iguana.cc.query; - -import org.aksw.iguana.cc.query.set.QuerySet; -import org.aksw.iguana.cc.worker.AbstractWorker; -import org.aksw.iguana.cc.worker.Worker; -import org.aksw.iguana.cc.worker.impl.UPDATEWorker; - -import java.io.File; -import java.util.Collection; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; - -/** - * - * An abstract class to use if the QueryHandler should work with Workers. (e.g. in the Stresstest Task) - * - * @author f.conrads - * - */ -public abstract class AbstractWorkerQueryHandler implements QueryHandler{ - - /** - * Will contain the path of the worker specified query files to - * the Files where the final querys will be saved - */ - private Map mapping = new HashMap(); - private HashSet sparqlKeys = new HashSet(); - private HashSet updateKeys = new HashSet(); - private Collection workers; - - /** - * - * @param workers - */ - public AbstractWorkerQueryHandler(Collection workers) { - this.workers = workers; - for(Worker worker : workers) { - if(worker instanceof UPDATEWorker) { - updateKeys.add(((UPDATEWorker)worker).getQueriesFileName()); - } else { - sparqlKeys.add(((AbstractWorker)worker).getQueriesFileName()); - } - } - } - - @Override - public Map generate() { - for(String sparqlKey : sparqlKeys) { - mapping.put(sparqlKey, generateQueries(sparqlKey)); - } - for(String updateKey : updateKeys) { - mapping.put(updateKey, generateUPDATE(updateKey)); - } - for(Worker worker : workers) { - if(worker instanceof AbstractWorker) { - ((AbstractWorker)worker).setQueriesList( - mapping.get(((AbstractWorker)worker).getQueriesFileName())); - } - } - return mapping; - } - - /** - * This method will generate SPARQL Queries given a file with queries. - * - * @param queryFileName The queries file - * @return for each query in the file, a File representing the query - */ - protected abstract QuerySet[] generateQueries(String queryFileName) ; - - /** - * This method will generate UPDATE Queries given a folder with files in which updates are stated. - * - * @param updatePath The path to the updates - * @return for each update, a File representing it. - */ - protected abstract QuerySet[] generateUPDATE(String updatePath) ; - -} diff --git a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/query/QueryHandler.java b/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/query/QueryHandler.java deleted file mode 100644 index ab79141fd..000000000 --- a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/query/QueryHandler.java +++ /dev/null @@ -1,32 +0,0 @@ -package org.aksw.iguana.cc.query; - -import org.aksw.iguana.cc.query.set.QuerySet; -import org.apache.jena.rdf.model.Model; - -import java.io.File; -import java.util.Map; - -/** - * The QueryHandler interface - *
- * The QueryHandler can be used to generate queries in the Tasks. - * - * @author f.conrads - * - */ -public interface QueryHandler { - - /** - * This will generate the queries. - * @return - */ - Map generate(); - - /** - * Generates some stats for the queries - * @param taskID - * @return - */ - Model generateTripleStats(String taskID); - -} diff --git a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/query/QueryHandlerFactory.java b/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/query/QueryHandlerFactory.java deleted file mode 100644 index 3014d6607..000000000 --- a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/query/QueryHandlerFactory.java +++ /dev/null @@ -1,15 +0,0 @@ -package org.aksw.iguana.cc.query; - -import org.aksw.iguana.commons.factory.TypedFactory; - - -/** - * Factory to create a QueryHandler based upon a class name and constructor arguments - * - * @author f.conrads - * - */ -public class QueryHandlerFactory extends TypedFactory { - - -} diff --git a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/query/impl/DelimInstancesQueryHandler.java b/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/query/impl/DelimInstancesQueryHandler.java deleted file mode 100644 index 70f956924..000000000 --- a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/query/impl/DelimInstancesQueryHandler.java +++ /dev/null @@ -1,86 +0,0 @@ -package org.aksw.iguana.cc.query.impl; - -import org.aksw.iguana.cc.query.set.QuerySet; -import org.aksw.iguana.cc.query.set.impl.InMemQuerySet; -import org.aksw.iguana.cc.worker.Worker; -import org.aksw.iguana.commons.annotation.Shorthand; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.*; -import java.util.LinkedList; -import java.util.List; - -/** - * Uses a delimiter line to read one query - * default uses empty line - */ -@Shorthand("DelimInstancesQueryHandler") -public class DelimInstancesQueryHandler extends InstancesQueryHandler { - - private static final Logger LOGGER = LoggerFactory.getLogger(DelimInstancesQueryHandler.class); - - - private String delim= ""; - - public DelimInstancesQueryHandler(List workers) { - super(workers); - } - - - public DelimInstancesQueryHandler(String delim, List workers) { - super(workers); - this.delim = delim; - } - - - public DelimInstancesQueryHandler(List workers, String lang) { - super(workers, lang); - } - - public DelimInstancesQueryHandler(List workers, String lang, String delim) { - super(workers, lang); - this.delim = delim; - } - - @Override - protected QuerySet[] generateUpdatesPerLine(String updatePath, String idPrefix, int hashcode) { - return generateQueryPerLine(updatePath, idPrefix, hashcode); - } - - @Override - protected QuerySet[] generateQueryPerLine(String queryFileName, String idPrefix, int hashcode) { - - File queryFile = new File(queryFileName); - List ret = new LinkedList(); - try ( - BufferedReader reader = new BufferedReader(new FileReader(queryFileName))) { - StringBuilder currentQuery = new StringBuilder(); - String queryStr; - int id = 0; - while ((queryStr = reader.readLine()) != null) { - if (queryStr.equals(delim)) { - if(currentQuery.toString().trim().isEmpty()){ - currentQuery = new StringBuilder(); - continue; - } - ret.add(new InMemQuerySet(idPrefix + id++, getInstances(currentQuery.toString().trim()))); - currentQuery = new StringBuilder(); - continue; - } - currentQuery.append(queryStr).append("\n"); - - } - if(!currentQuery.toString().trim().isEmpty()) { - ret.add(new InMemQuerySet(idPrefix + id++, getInstances(currentQuery.toString()))); - } - currentQuery = new StringBuilder(); - } catch (IOException e) { - LOGGER.error("could not read queries"); - } - return ret.toArray(new QuerySet[]{}); - } - - - -} diff --git a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/query/impl/InstancesQueryHandler.java b/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/query/impl/InstancesQueryHandler.java deleted file mode 100644 index ee06448f0..000000000 --- a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/query/impl/InstancesQueryHandler.java +++ /dev/null @@ -1,194 +0,0 @@ -package org.aksw.iguana.cc.query.impl; - -import org.aksw.iguana.cc.lang.LanguageProcessor; -import org.aksw.iguana.cc.lang.QueryWrapper; -import org.aksw.iguana.cc.lang.impl.SPARQLLanguageProcessor; -import org.aksw.iguana.cc.query.AbstractWorkerQueryHandler; -import org.aksw.iguana.cc.query.set.QuerySet; -import org.aksw.iguana.cc.query.set.impl.FileBasedQuerySet; -import org.aksw.iguana.cc.query.set.impl.InMemQuerySet; -import org.aksw.iguana.cc.utils.FileUtils; -import org.aksw.iguana.cc.utils.SPARQLQueryStatistics; -import org.aksw.iguana.cc.worker.Worker; -import org.aksw.iguana.commons.annotation.Shorthand; -import org.aksw.iguana.commons.factory.TypedFactory; -import org.apache.jena.ext.com.google.common.collect.Lists; -import org.apache.jena.rdf.model.Model; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.*; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.LinkedList; -import java.util.List; - -/** - * - * A QueryHandler for already instances of queries. - * - * @author f.conrads - * - */ -@Shorthand("InstancesQueryHandler") -public class InstancesQueryHandler extends AbstractWorkerQueryHandler { - - private static final Logger LOGGER = LoggerFactory.getLogger(InstancesQueryHandler.class); - - protected String outputFolder = "queryCache"; - - protected HashMap type2IDcounter = new HashMap(); - - protected SPARQLQueryStatistics qs = new SPARQLQueryStatistics(); - - private QuerySet[] queryFiles; - - protected LanguageProcessor langProcessor = new SPARQLLanguageProcessor(); - protected int hashcode; - - //protected int hashcode; - - /** - * Default Constructor - * - * @param workers Workers to consider queryFiles/updatePaths of - */ - public InstancesQueryHandler(List workers) { - super(workers); - } - - public InstancesQueryHandler(List workers, String lang) { - super(workers); - langProcessor = new TypedFactory().create(lang, new HashMap()); - } - - @Override - protected QuerySet[] generateQueries(String queryFileName) { - // Save hashcode of the file content for later use in generating stats - hashcode = FileUtils.getHashcodeFromFileContent(queryFileName); - - QuerySet[] queries = generateQueryPerLine(queryFileName, langProcessor.getQueryPrefix(), hashcode); - this.queryFiles = queries; - - - return queries; - } - - protected QuerySet[] generateQueryPerLine(String queryFileName, String idPrefix, int hashcode) { - File queryFile = new File(queryFileName); - List ret = new LinkedList(); - LOGGER.info("[QueryHandler: {{}}] Queries will now be instantiated", this.getClass().getName()); - - try (BufferedReader reader = new BufferedReader(new FileReader(queryFileName))) { - String queryStr; - int id=0; - while ((queryStr = reader.readLine()) != null) { - if (queryStr.isEmpty()) { - continue; - } - ret.add(new InMemQuerySet(idPrefix+id++, getInstances(queryStr))); - - } - } catch (IOException e) { - LOGGER.error("could not read queries"); - } - LOGGER.info("[QueryHandler: {{}}] Finished instantiation of queries", this.getClass().getName()); - return ret.toArray(new QuerySet[]{}); - - } - - protected List getInstances(String queryStr) { - return Lists.newArrayList(queryStr); - } - - - protected File createFileWithID(File rootFolder, String idPrefix) throws IOException { - // create a File with an ID - int id = 0; - if (type2IDcounter.containsKey(idPrefix)) { - id = type2IDcounter.get(idPrefix); - } - File out = new File(rootFolder.getAbsolutePath() + File.separator + idPrefix + id); - out.createNewFile(); - id++; - type2IDcounter.put(idPrefix, id); - return out; - } - - @Override - protected QuerySet[] generateUPDATE(String updatePath) { - File dir = new File(updatePath); - if (dir.exists()) { - if (dir.isDirectory()) { - LOGGER.info("[QueryHandler: {{}}] Uses all UPDATE files in {{}}", this.getClass().getName(), - updatePath); - // dir is directory, get all files in the folder - File[] files = dir.listFiles(); - QuerySet[] sets = new QuerySet[files.length]; - for(int i=0;i ret = new LinkedList(); - LOGGER.info("[QueryHandler: {{}}] Queries will now be instantiated", this.getClass().getName()); - - try (BufferedReader reader = new BufferedReader(new FileReader(updatePath))) { - String queryStr; - int id=0; - while ((queryStr = reader.readLine()) != null) { - if (queryStr.isEmpty()) { - continue; - } - ret.add(new InMemQuerySet(idPrefix+id++, Lists.newArrayList(queryStr))); - - } - } catch (IOException e) { - LOGGER.error("could not read queries"); - } - LOGGER.info("[QueryHandler: {{}}] Finished instantiation of queries", this.getClass().getName()); - return ret.toArray(new QuerySet[]{}); - } - - @Override - public Model generateTripleStats(String taskID) { - List queries = new ArrayList(); - for (QuerySet queryFile : queryFiles) { - try { - String query = queryFile.getQueryAtPos(0); - queries.add(new QueryWrapper(query, queryFile.getName())); - }catch(IOException e){ - LOGGER.error("[QueryHandler: {{}}] Cannot read file {{}}", this.getClass().getName(), - queryFile.getName()); - } - } - return langProcessor.generateTripleStats(queries, hashcode+"", taskID); - } - - - public void setOutputFolder(String outputFolder) { - this.outputFolder = outputFolder; - } -} diff --git a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/query/impl/PatternQueryHandler.java b/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/query/impl/PatternQueryHandler.java deleted file mode 100644 index 1d14b046f..000000000 --- a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/query/impl/PatternQueryHandler.java +++ /dev/null @@ -1,233 +0,0 @@ -package org.aksw.iguana.cc.query.impl; - -import org.aksw.iguana.cc.query.set.QuerySet; -import org.aksw.iguana.cc.query.set.impl.FileBasedQuerySet; -import org.aksw.iguana.cc.worker.Worker; -import org.aksw.iguana.commons.annotation.Shorthand; -import org.apache.jena.query.*; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.*; -import java.util.*; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -/** - * This QueryHandler uses query patterns and converts them into query instances.
- * A query pattern is a SPARQL 1.1 Query which can have additional variables %%var[0/9]+%% in the - * Basic Graph Pattern.

- * For example: SELECT * {?s %%var1%% ?o . ?o <http://exa.com> %%var100%%}
- * This QueryHandler will then convert this query to:
- * SELECT ?var1 ?var100 {?s ?var1 ?o . ?o <http://exa.com> ?var100}
- * and will request query solutions from a user given sparql endpoint (e.g DBpedia)
- * The solution will then be instantiated into the query pattern. - * The result can look like follows:

- * SELECT * {?s <http://prop/1> ?o . ?o <http://exa.com> "123"}
- * SELECT * {?s <http://prop/1> ?o . ?o <http://exa.com> "12"}
- * SELECT * {?s <http://prop/2> ?o . ?o <http://exa.com> "1234"}
- * - * - * @author f.conrads - * - */ -@Shorthand("PatternQueryHandler") -public class PatternQueryHandler extends InstancesQueryHandler { - - private static final Logger LOGGER = LoggerFactory - .getLogger(PatternQueryHandler.class); - - private String service; - - private Long limit = 2000l; - - /** - * - * The constructor for the pattern based QueryHandler.
- * Query Instances will be restricted to 2000 per QueryPattern
- *
- * This will check all Workers if they are of type SPARQL or UPDATEWorker and gets their - * querysFile/updatePath to generate queries out of it. - * - * @param workers - * @param endpoint the sparql endpoint to derive the variable instances - */ - public PatternQueryHandler(List workers, String endpoint) { - super(workers); - this.service = endpoint; - } - - /** - * - * The constructor for the pattern based QueryHandler.
- *
- * This will check all Workers if they are of type SPARQL or UPDATEWorker and gets their - * querysFile/updatePath to generate queries out of it. - * - * @param workers - * @param endpoint the sparql endpoint to derive the variable instances - * @param limit the resitriction of query instances per query pattern as String - */ - public PatternQueryHandler(LinkedList workers, String endpoint, String limit) { - this(workers, endpoint, Long.parseLong(limit)); - } - - /** - * - * The constructor for the pattern based QueryHandler.
- *
- * This will check all Workers if they are of type SPARQL or UPDATEWorker and gets their - * querysFile/updatePath to generate queries out of it. - * - * @param workers - * @param endpoint the sparql endpoint to derive the variable instances - * @param limit the resitriction of query instances per query pattern - */ - public PatternQueryHandler(LinkedList workers, String endpoint, Long limit) { - super(workers); - this.service = endpoint; - this.limit = limit; - } - - - - protected String replaceVars(String queryStr, Set varNames) { - String command = queryStr; - Pattern pattern = Pattern.compile("%%var[0-9]+%%"); - Matcher m = pattern.matcher(queryStr); - while(m.find()) { - String eVar = m.group(); - String var = eVar.replace("%", ""); - command = command.replace(eVar, "?"+var); - varNames.add(var); - } - return command; - } - - - @Override - protected QuerySet[] generateQueryPerLine(String queryFileName, String idPrefix, int hashcode) { - File queryFile = new File(queryFileName); - List ret = new LinkedList(); - // check if folder is cached - if (queryFile.exists()) { - File outputFolder = new File(this.outputFolder + File.separator + hashcode); - if (outputFolder.exists()) { - LOGGER.warn("[QueryHandler: {{}}] queries were instantiated already, will use old instances. To generate them new remove the {{}} folder", - this.getClass().getName(), this.outputFolder + File.separator + hashcode); - // is cached use caching - for(File f : outputFolder.listFiles()){ - try { - ret.add(new FileBasedQuerySet(f)); - } catch (IOException e) { - e.printStackTrace(); - } - } - return ret.toArray(new QuerySet[]{}); - } else { - LOGGER.info("[QueryHandler: {{}}] Queries will now be instantiated", this.getClass().getName()); - // create directorys - outputFolder.mkdirs(); - try (BufferedReader reader = new BufferedReader(new FileReader(queryFileName))) { - String queryStr; - // iterate over all queries - while ((queryStr = reader.readLine()) != null) { - if (queryStr.isEmpty()) { - continue; - } - //create file with id and write query to it - File out = createFileWithID(outputFolder, idPrefix); - try (PrintWriter pw = new PrintWriter(out)) { - for (String query : getInstances(queryStr)) { - pw.println(query); - } - } - QuerySet qs = new FileBasedQuerySet(out); - ret.add(qs); - - } - } catch (IOException e) { - LOGGER.error("[QueryHandler: {{}}] could not write instances to folder {{}}", - this.getClass().getName(), outputFolder.getAbsolutePath()); - } - LOGGER.info("[QueryHandler: {{}}] Finished instantiation of queries", this.getClass().getName()); - } - return ret.toArray(new QuerySet[]{}); - } else { - LOGGER.error("[QueryHandler: {{}}] Queries with file {{}} could not be instantiated due to missing file", - this.getClass().getName(), queryFileName); - } - return new QuerySet[]{}; - } - - @Override - protected List getInstances(String queryStr) { - List instances = new ArrayList<>(); - - //check if query is already an instance - try{ - QueryFactory.create(queryStr); - //query is instance - LOGGER.debug("[QueryHandler: {{}}] Query is already an instance: {{}}", this.getClass().getName(), queryStr); - instances.add(queryStr); - return instances; - }catch(Exception e) { - //query is pattern, nothing to do - } - - //get vars from queryStr - Set varNames = new HashSet(); - String command = replaceVars(queryStr, varNames); - - //generate parameterized sparql query - ParameterizedSparqlString pss = new ParameterizedSparqlString(); - pss.setCommandText(command); - ResultSet exchange = getInstanceVars(pss, varNames); - // exchange vars in PSS - if(!exchange.hasNext()) { - //no solution - LOGGER.warn("[QueryHandler: {{}}] Query has no solution, will use variables instead of var instances: {{}}", this.getClass().getName(), queryStr); - instances.add(command); - } - while(exchange.hasNext()) { - QuerySolution solution = exchange.next(); - for(String var : exchange.getResultVars()) { - //exchange variable with - pss.clearParam(var); - pss.setParam(var, solution.get(var)); - } - instances.add(pss.toString()); - } - LOGGER.debug("Found instances {}", instances); - - return instances; - } - - - protected ResultSet getInstanceVars(ParameterizedSparqlString pss, Set varNames) { - QueryExecution exec = QueryExecutionFactory.createServiceRequest(service, convertToSelect(pss,varNames)); - //return result set - return exec.execSelect(); - } - - protected Query convertToSelect(ParameterizedSparqlString pss, Set varNames) { - if(varNames.isEmpty()){ - return pss.asQuery(); - } - Query queryCpy = pss.asQuery(); - queryCpy.getQueryPattern(); - - StringBuilder queryStr = new StringBuilder("SELECT DISTINCT "); - for(String varName : varNames) { - queryStr.append("?").append(varName).append(" "); - } - queryStr.append(queryCpy.getQueryPattern()); - ParameterizedSparqlString pssSelect = new ParameterizedSparqlString(); - pssSelect.setCommandText(queryStr.toString()); - pssSelect.setNsPrefixes(pss.getNsPrefixMap()); - pssSelect.append(" LIMIT "); - pssSelect.append(this.limit); - return pssSelect.asQuery(); - } - -} diff --git a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/query/set/QuerySet.java b/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/query/set/QuerySet.java deleted file mode 100644 index b21073c37..000000000 --- a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/query/set/QuerySet.java +++ /dev/null @@ -1,23 +0,0 @@ -package org.aksw.iguana.cc.query.set; - - -import java.io.IOException; - -/** - * A query set contains a benchmark query (this might be several queries in itself) - */ -public interface QuerySet { - - /** - * Gets a query at the position pos. - * @param pos Position of the query in the set - * @return The query at position pos - */ - String getQueryAtPos(int pos) throws IOException; - - int size(); - - String getName(); - - String getContent() throws IOException; -} diff --git a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/query/set/impl/FileBasedQuerySet.java b/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/query/set/impl/FileBasedQuerySet.java deleted file mode 100644 index 54093b17f..000000000 --- a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/query/set/impl/FileBasedQuerySet.java +++ /dev/null @@ -1,49 +0,0 @@ -package org.aksw.iguana.cc.query.set.impl; - -import org.aksw.iguana.cc.query.set.QuerySet; -import org.aksw.iguana.cc.utils.FileUtils; - -import java.io.File; -import java.io.IOException; - -/** - * File based query set - */ -public class FileBasedQuerySet implements QuerySet { - - private File queryFile; - private int size=0; - - - - public FileBasedQuerySet(File queryFile) throws IOException { - this.queryFile=queryFile; - size=FileUtils.countLines(queryFile); - } - - public File getFile(){ - return queryFile; - } - - @Override - public String getQueryAtPos(int pos) throws IOException { - return FileUtils.readLineAt(pos, queryFile); - } - - @Override - public int size() { - return size; - } - - @Override - public String getName() { - return queryFile.getName(); - } - - @Override - public String getContent() throws IOException { - return org.apache.commons.io.FileUtils.readFileToString(queryFile, "UTF-8"); - } - - -} diff --git a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/query/set/impl/InMemQuerySet.java b/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/query/set/impl/InMemQuerySet.java deleted file mode 100644 index ade10b206..000000000 --- a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/query/set/impl/InMemQuerySet.java +++ /dev/null @@ -1,37 +0,0 @@ -package org.aksw.iguana.cc.query.set.impl; - -import org.aksw.iguana.cc.query.set.QuerySet; - -import java.io.IOException; -import java.util.List; - -public class InMemQuerySet implements QuerySet { - - private List queries; - private String name; - - public InMemQuerySet(String queryID, List queries){ - name=queryID; - this.queries=queries; - } - - @Override - public String getQueryAtPos(int pos) throws IOException { - return queries.get(pos); - } - - @Override - public int size() { - return queries.size(); - } - - @Override - public String getName() { - return name; - } - - @Override - public String getContent() throws IOException { - return queries.toString(); - } -} diff --git a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/tasks/AbstractTask.java b/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/tasks/AbstractTask.java deleted file mode 100644 index 79778f935..000000000 --- a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/tasks/AbstractTask.java +++ /dev/null @@ -1,116 +0,0 @@ -/** - * - */ -package org.aksw.iguana.cc.tasks; - -import org.aksw.iguana.cc.config.elements.Connection; -import org.aksw.iguana.commons.annotation.Shorthand; -import org.aksw.iguana.commons.constants.COMMON; -import org.aksw.iguana.rp.experiment.ExperimentManager; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.IOException; -import java.util.Properties; - -/** - * Abstract Task to help create a Task. - * Will do the background work - * - * @author f.conrads - * - */ -public abstract class AbstractTask implements Task { - - private Logger LOGGER = LoggerFactory.getLogger(getClass()); - - private ExperimentManager rpControl = ExperimentManager.getInstance(); - protected String taskID; - protected Connection con; - - /** - * Properties to add task specific metaData before start and execute which then - * will be send to the resultprocessor - */ - protected Properties metaData = new Properties(); - private String expID; - private String suiteID; - private String datasetID; - private String conID; - private String taskName; - - /** - * Creates an AbstractTask with the TaskID - */ - public AbstractTask() { - - } - - - /* - * (non-Javadoc) - * - * @see org.aksw.iguana.tp.tasks.Task#init() - */ - @Override - public void init(String[] ids, String dataset, Connection con, String taskName) { - this.suiteID=ids[0]; - this.expID=ids[1]; - this.taskID=ids[2]; - this.taskName=taskName; - this.datasetID=dataset; - this.conID=con.getName(); - this.con=con; - } - - @Override - public void start() { - // send to ResultProcessor - rpControl.receiveData(metaData); - - } - - @Override - public void sendResults(Properties data) throws IOException { - data.setProperty(COMMON.EXPERIMENT_TASK_ID_KEY, this.taskID); - rpControl.receiveData(data); - } - - @Override - public void close() { - Properties end = new Properties(); - // set exp task id - end.setProperty(COMMON.EXPERIMENT_TASK_ID_KEY, this.taskID); - // set end flag - end.put(COMMON.RECEIVE_DATA_END_KEY, true); - // send to ResultProcessor - rpControl.receiveData(end); - } - - @Override - public void addMetaData() { - // set exp Task ID - metaData.setProperty(COMMON.EXPERIMENT_TASK_ID_KEY, this.taskID); - // set start flag - metaData.put(COMMON.RECEIVE_DATA_START_KEY, true); - // - metaData.setProperty(COMMON.EXPERIMENT_ID_KEY, this.expID); - metaData.setProperty(COMMON.SUITE_ID_KEY, this.suiteID); - metaData.setProperty(COMMON.DATASET_ID_KEY, this.datasetID); - metaData.setProperty(COMMON.CONNECTION_ID_KEY, this.conID); - if(this.taskName!=null) { - metaData.setProperty(COMMON.EXPERIMENT_TASK_NAME_KEY, this.taskName); - } - if(this.con.getVersion()!=null) { - metaData.setProperty(COMMON.CONNECTION_VERSION_KEY, this.con.getVersion()); - } - String className=this.getClass().getCanonicalName(); - if(this.getClass().isAnnotationPresent(Shorthand.class)){ - className = this.getClass().getAnnotation(Shorthand.class).value(); - } - metaData.setProperty(COMMON.EXPERIMENT_TASK_CLASS_ID_KEY, className); - this.metaData.put(COMMON.EXTRA_META_KEY, new Properties()); - } - - -} diff --git a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/tasks/Task.java b/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/tasks/Task.java deleted file mode 100644 index fe3b1cc3d..000000000 --- a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/tasks/Task.java +++ /dev/null @@ -1,66 +0,0 @@ -/** - * - */ -package org.aksw.iguana.cc.tasks; - -import org.aksw.iguana.cc.config.elements.Connection; - -import java.io.IOException; -import java.util.Properties; - -/** - * A simple Task to execute - * - * @author f.conrads - * - */ -public interface Task { - - /** - * Will execute the Task - */ - public void execute(); - - /** - * Will start the Task (sending the rabbitMQ start flag) - */ - public void start(); - - /** - * Will send the results to the result processing. - * @param data - * @throws IOException - */ - void sendResults(Properties data) throws IOException; - - - /** - * Will close the Task and post process everything (e.g. send the end flag to the rabbit mq queue) - */ - void close(); - - /** - * Will add the Meta data for the start which then can be saved into the triple based storages - */ - void addMetaData(); - - - /** - * Will initialize the task - * @param ids normally the suiteID, experimentID, taskID - * @param dataset the dataset name - * @param con the current connection to execute the task against - * @param taskName the taskName - */ - void init(String[] ids, String dataset, Connection con, String taskName); - - /** - * Will initialize the task - * @param ids normally the suiteID, experimentID, taskID - * @param dataset the dataset name - * @param con the current connection to execute the task against - */ - default void init(String[] ids, String dataset, Connection con){ - init(ids, dataset, con, null); - } -} diff --git a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/tasks/TaskFactory.java b/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/tasks/TaskFactory.java deleted file mode 100644 index bf753ef1b..000000000 --- a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/tasks/TaskFactory.java +++ /dev/null @@ -1,18 +0,0 @@ -/** - * - */ -package org.aksw.iguana.cc.tasks; - -import org.aksw.iguana.commons.factory.TypedFactory; - - -/** - * Factory to create Tasks. see {@link TypedFactory} for more information. - * - * @author f.conrads - * - */ -public class TaskFactory extends TypedFactory{ - - -} diff --git a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/tasks/TaskManager.java b/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/tasks/TaskManager.java deleted file mode 100644 index f2d806c89..000000000 --- a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/tasks/TaskManager.java +++ /dev/null @@ -1,45 +0,0 @@ -/** - * - */ -package org.aksw.iguana.cc.tasks; - -import org.aksw.iguana.cc.config.elements.Connection; - -import java.io.IOException; -import java.util.concurrent.TimeoutException; - -/** - * Will manage the Tasks - * - * @author f.conrads - * - */ -public class TaskManager { - - private Task task; - - /** - * Will simply set the Task to execute - * @param task - */ - public void setTask(Task task){ - this.task = task; - } - - /** - * Will initialize and start the Task - * - * @throws IOException - * @throws TimeoutException - */ - public void startTask(String[] ids, String dataset, Connection con, String taskName) throws IOException, TimeoutException{ - this.task.init(ids, dataset, con, taskName); - this.task.addMetaData(); - this.task.start(); - this.task.execute(); - this.task.close(); - } - - - -} diff --git a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/tasks/impl/Stresstest.java b/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/tasks/impl/Stresstest.java deleted file mode 100644 index 95a4763f8..000000000 --- a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/tasks/impl/Stresstest.java +++ /dev/null @@ -1,393 +0,0 @@ -/** - * - */ -package org.aksw.iguana.cc.tasks.impl; - -import org.aksw.iguana.cc.config.CONSTANTS; -import org.aksw.iguana.cc.config.elements.Connection; -import org.aksw.iguana.cc.query.QueryHandler; -import org.aksw.iguana.cc.query.QueryHandlerFactory; -import org.aksw.iguana.cc.query.impl.InstancesQueryHandler; -import org.aksw.iguana.cc.tasks.AbstractTask; -import org.aksw.iguana.cc.worker.Worker; -import org.aksw.iguana.cc.worker.WorkerFactory; -import org.aksw.iguana.commons.annotation.Shorthand; -import org.aksw.iguana.commons.constants.COMMON; -import org.apache.jena.rdf.model.Model; -import org.apache.jena.riot.RDFDataMgr; -import org.apache.jena.riot.RDFFormat; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.FileNotFoundException; -import java.io.IOException; -import java.io.StringWriter; -import java.time.Instant; -import java.util.*; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.concurrent.TimeUnit; - -import static org.aksw.iguana.commons.time.TimeUtils.durationInMilliseconds; - - -/** - * Stresstest. - * Will stresstest a connection using several Workers (simulated Users) each in one thread. - */ -@Shorthand("Stresstest") -public class Stresstest extends AbstractTask { - - - private static final Logger LOGGER = LoggerFactory - .getLogger(Stresstest.class); - - private ArrayList workerConfig; - private LinkedHashMap warmupConfig; - - private Double timeLimit; - private Long noOfQueryMixes; - protected List workers = new LinkedList(); - private Instant startTime; - private String qhClassName; - private Long noOfWorkers= 0L; - protected String qhCacheFolder = "queryInstances"; - - private Double warmupTimeMS; - - - - private HashMap qhConfig; - private List warmupWorkers = new ArrayList<>(); - private HashMap warmupQHConfig; - private String warmupQHClass; - - - public Stresstest(Integer timeLimit, ArrayList workers, LinkedHashMap queryHandler) throws FileNotFoundException { - this(timeLimit, workers, queryHandler, null); - } - - public Stresstest(Integer timeLimit, ArrayList workers, LinkedHashMap queryHandler, LinkedHashMap warmup) throws FileNotFoundException { - this.timeLimit=timeLimit.doubleValue(); - this.workerConfig = workers; - this.qhConfig = queryHandler; - this.warmupConfig = warmup; - } - - public Stresstest(ArrayList workers, LinkedHashMap queryHandler, Integer noOfQueryMixes) throws FileNotFoundException { - this(workers, queryHandler, null, noOfQueryMixes); - } - - public Stresstest(ArrayList workers, LinkedHashMap queryHandler, LinkedHashMap warmup, Integer noOfQueryMixes) throws FileNotFoundException { - this.noOfQueryMixes=noOfQueryMixes.longValue(); - this.workerConfig = workers; - this.qhConfig = queryHandler; - this.warmupConfig = warmup; - } - - private void setConfig(ArrayList workers, HashMap queryHandler, HashMap warmup){ - - noOfWorkers+=createWorkers(workers, this.workers, this.timeLimit); - //let TypedFactory create queryHandlerConfiguration from className and configuration and add Workers - this.qhClassName = queryHandler.get("className").toString(); - this.qhConfig = (HashMap)queryHandler.getOrDefault("configuration", new HashMap<>()); - qhConfig.put("workers", this.workers); - - //If warmup - if(warmup!=null){ - //set time - this.warmupTimeMS = ((Integer) warmup.get("timeLimit")).doubleValue(); - //set warmup workers - ArrayList warmupWorkerConfig = (ArrayList) warmup.get("workers"); - createWorkers(warmupWorkerConfig, this.warmupWorkers, this.warmupTimeMS); - //if warmup uses a different queryHandler than the actual one create the query handler - createWarmupQueryHandler(warmup); - } - addMetaData(); - } - - private void createWarmupQueryHandler(HashMap warmup) { - if(warmup.containsKey("queryHandler")){ - HashMap warmupQueryHandler = (HashMap) warmup.get("queryHandler"); - this.warmupQHClass = warmupQueryHandler.get("className").toString(); - this.warmupQHConfig = (HashMap)warmupQueryHandler.getOrDefault("configuration", new HashMap<>()); - this.warmupQHConfig.put("workers", this.warmupWorkers); - }else{ - //otherwise use default - this.warmupQHClass = qhClassName; - //create copy of the current configuration - this.warmupQHConfig = new HashMap(qhConfig); - this.warmupQHConfig.put("workers", this.warmupWorkers); - } - } - - private int createWorkers(ArrayList workers, List workersToAddTo, Double timeLimit){ - int noOfWorkers=0; - for(HashMap workerConfig : workers){ - noOfWorkers += createWorker(workerConfig, workersToAddTo, timeLimit, noOfWorkers); - } - return noOfWorkers; - } - - private int createWorker(HashMap workerConfig, List workersToAddTo, Double timeLimit, Integer baseID) { - //let TypedFactory create from className and configuration - String className = workerConfig.remove("className").toString(); - //if shorthand classname is used, exchange to full classname - Integer threads = (Integer)workerConfig.remove("threads"); - workerConfig.put("connection", con); - workerConfig.put("taskID", taskID); - if(timeLimit!=null) - workerConfig.put("timeLimit", timeLimit.intValue()); - for(int i=0;i props = worker.popQueryResults(); - if(props == null){ - return; - } - - for (Properties results : props) { - try { - - // send results via RabbitMQ - LOGGER.debug("[TaskID: {{}}] Send results", taskID); - this.sendResults(results); - LOGGER.debug("[TaskID: {{}}] results could be send", taskID); - } catch (IOException e) { - LOGGER.error("[TaskID: {{}}] Could not send results due to exc.",taskID, e); - LOGGER.error("[TaskID: {{}}] Results: {{}}", taskID, results); - } - } - } - - - @Override - public void close() { - super.close(); - - } - - protected long warmup() { - if(warmupTimeMS==null||warmupTimeMS==0l) { - return 0; - } - if(warmupWorkers.size()==0) { - return 0; - } - LOGGER.info("[TaskID: {{}}] will start {{}}ms warmup now using {} no of workers in total.", taskID, warmupTimeMS, warmupWorkers.size()); - return executeWarmup(warmupWorkers); - } - - - private long executeWarmup(List warmupWorkers) { - ExecutorService exec = Executors.newFixedThreadPool(2); - for(Worker worker : warmupWorkers) { - exec.submit(worker); - } - //wait as long as needed - Instant start = Instant.now(); - exec.shutdown(); - while(durationInMilliseconds(start, Instant.now()) <= warmupTimeMS) { - //clean up RAM - for(Worker worker: warmupWorkers) { - worker.popQueryResults(); - } - try { - TimeUnit.MILLISECONDS.sleep(50); - }catch(Exception e) { - LOGGER.error("Could not warmup "); - } - } - for(Worker worker : warmupWorkers) { - worker.stopSending(); - } - try { - exec.awaitTermination(5, TimeUnit.SECONDS); - - } catch (InterruptedException e) { - LOGGER.warn("[TaskID: {{}}] Warmup. Could not await Termination of Workers.", taskID); - } - try { - exec.shutdownNow(); - }catch(Exception e1) { - LOGGER.error("Shutdown problems ", e1); - } - //clear up - long queriesExec = 0; - for(Worker w : warmupWorkers){ - queriesExec+=w.getExecutedQueries(); - } - warmupWorkers.clear(); - LOGGER.info("[TaskID: {{}}] Warmup finished.", taskID); - return queriesExec; - } - - /** - * Checks if restriction (e.g. timelimit or noOfQueryMixes for each Worker) - * occurs - * - * @return true if restriction occurs, false otherwise - */ - protected boolean isFinished() { - if (timeLimit !=null) { - - Instant current = Instant.now(); - double passed_time = timeLimit - durationInMilliseconds(this.startTime, current); - return passed_time <= 0D; - } - else if (noOfQueryMixes != null) { - - // use noOfQueries of SPARQLWorkers (as soon as a worker hit the noOfQueries, it - // will stop sending results - // UpdateWorker are allowed to execute all their updates - boolean endFlag=true; - for (Worker worker : workers) { - long queriesInMix = 0; - - LOGGER.debug("No of query Mixes: {} , queriesInMix {}", worker.getExecutedQueries(),noOfQueryMixes); - //Check for each worker, if the - if (worker.hasExecutedNoOfQueryMixes(noOfQueryMixes)) { - if(!worker.isTerminated()) { - //if the worker was not already terminated, send last results, as tehy will not be sended afterwards - sendWorkerResult(worker); - } - worker.stopSending(); - } - else { - endFlag = false; - } - - } - return endFlag; - } - LOGGER.error("Neither time limit nor NoOfQueryMixes is set. executing task now"); - return true; - } - - public long getExecutedQueries(){ - long ret = 0; - for(Worker worker: workers){ - ret += worker.getExecutedQueries(); - } - return ret; - } - -} diff --git a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/utils/CLIProcessManager.java b/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/utils/CLIProcessManager.java deleted file mode 100644 index 42a8bdd61..000000000 --- a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/utils/CLIProcessManager.java +++ /dev/null @@ -1,137 +0,0 @@ -package org.aksw.iguana.cc.utils; - -import org.apache.commons.lang.SystemUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.*; -import java.util.ArrayList; -import java.util.List; - -/** - * CLI Utils class - */ -public class CLIProcessManager { - - private static final Logger LOGGER = LoggerFactory.getLogger(CLIProcessManager.class); - - /** - * Creates a process - * @param command - * @return - */ - public static Process createProcess(String command) { - ProcessBuilder processBuilder = new ProcessBuilder(); - processBuilder.redirectErrorStream(true); - - Process process = null; - try { - if (SystemUtils.IS_OS_LINUX) { - - processBuilder.command("bash", "-c", command); - - } else if (SystemUtils.IS_OS_WINDOWS) { - processBuilder.command("cmd.exe", "-c", command); - } - process = processBuilder.start(); - - } catch (IOException e) { - LOGGER.error("New process could not be created: {}", e); - } - - return process; - } - - /** - * Destroys a process forcibly - * @param process - */ - public static void destroyProcess(Process process) { - process.destroyForcibly(); - } - - /** - * Short handler for destroyProcess and createProcess - * @param process - * @param command - * @return - */ - public static Process destroyAndCreateNewProcess(Process process, String command) { - destroyProcess(process); - return createProcess(command); - } - - /** - * Create n processes of the same command - * @param n the amount of processes created - * @param command the command to create the process with - * @return - */ - public static List createProcesses(int n, String command) { - List processList = new ArrayList<>(5); - for (int i = 0; i < n; i++) { - processList.add(createProcess(command)); - } - - return processList; - } - - /** - * Count and returns the no. of lines of one process until a certain string appears, - * @param process - * @param successString the string of the process after the no of line should be returned - * @param errorString the error string, will throw an IOException if this appeared. - * @return - * @throws IOException - */ - public static long countLinesUntilStringOccurs(Process process, String successString, String errorString) throws IOException { - String line; - LOGGER.debug("Will look for: {} or as error: {}",successString, errorString); - StringBuilder output = new StringBuilder(); - - long size = -1; - BufferedReader reader = new BufferedReader(new InputStreamReader(process.getInputStream())); - - try { - while ((line = reader.readLine()) != null) { - if (line.contains(errorString)) { - LOGGER.debug("Found error"); - LOGGER.debug("Query finished with {}", errorString); - - throw new IOException(line); - } else if (line.contains(successString)) { - LOGGER.debug("Query finished with {}", successString); - break; - } - - // Only save first 1000 lines of the output - if (size < 1000) { - output.append(line).append("\n"); - } - size++; - } - - } catch (IOException e) { - LOGGER.debug("Exception in reading the output of the process. ", e); - throw e; - } - - return size; - } - - public static void executeCommand(Process process, String command) throws IOException { - BufferedWriter output = new BufferedWriter(new OutputStreamWriter(process.getOutputStream())); - output.write(command + "\n"); - output.flush(); - } - - /** - * Checks if the process input stream is ready to be read. - * @param process - * @return - * @throws IOException - */ - public static boolean isReaderReady(Process process) throws IOException { - return new BufferedReader(new InputStreamReader(process.getInputStream())).ready(); - } -} diff --git a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/utils/FileUtils.java b/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/utils/FileUtils.java deleted file mode 100644 index 69c2a9a94..000000000 --- a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/utils/FileUtils.java +++ /dev/null @@ -1,105 +0,0 @@ -package org.aksw.iguana.cc.utils; - -import java.io.*; -import java.nio.charset.StandardCharsets; -import java.nio.file.Files; -import java.nio.file.Paths; - -/** - * Methods to work easier with Files. - * - * @author f.conrads - * - */ -public class FileUtils { - - /** - * Counts the lines in a file efficently Props goes to: - * http://stackoverflow.com/a/453067/2917596 - * - * - * @param filename File to count lines of - * @return No. of lines in File - * @throws IOException - */ - public static int countLines(File filename) throws IOException { - try (InputStream is = new BufferedInputStream(new FileInputStream(filename));) { - - byte[] c = new byte[1024]; - int count = 0; - int readChars = 0; - boolean empty = true; - byte lastChar = '\n'; - while ((readChars = is.read(c)) != -1) { - for (int i = 0; i < readChars; ++i) { - if (c[i] == '\n') { - // Check if line was empty - if (lastChar != '\n') { - ++count; - } - } else { - empty = false; - } - lastChar = c[i]; - } - } - if (lastChar != '\n') { - count++; - } - return (count == 0 && !empty) ? 1 : count; - } - } - - /** - * Returns a line at a given position of a File - * - * - * @param pos line which should be returned - * @param filename File in which the queries are stated - * @return line at pos - * @throws IOException - */ - public static String readLineAt(int pos, File filename) throws IOException { - try (InputStream is = new BufferedInputStream(new FileInputStream(filename));){ - StringBuilder line = new StringBuilder(); - - byte[] c = new byte[1024]; - int count = 0; - int readChars = 0; - byte lastChar = '\n'; - while ((readChars = is.read(c)) != -1) { - for (int i = 0; i < readChars; ++i) { - if (c[i] == '\n') { - // Check if line was empty - if (lastChar != '\n') { - ++count; - } - } else if (count == pos) { - // Now the line - line.append((char) c[i]); - } - lastChar = c[i]; - } - } - - return line.toString(); - } - } - - public static int getHashcodeFromFileContent(String filepath) { - int hashcode; - try { - String fileContents = readFile(filepath); - hashcode = Math.abs(fileContents.hashCode()); - } catch (IOException e) { - hashcode = 0; - } - return hashcode; - } - - public static String readFile(String path) throws IOException { - byte[] encoded = Files.readAllBytes(Paths.get(path)); - return new String(encoded, StandardCharsets.UTF_8); - } - -} diff --git a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/utils/ResultSizeRetriever.java b/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/utils/ResultSizeRetriever.java deleted file mode 100644 index 097843606..000000000 --- a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/utils/ResultSizeRetriever.java +++ /dev/null @@ -1,49 +0,0 @@ -package org.aksw.iguana.cc.utils; - -import org.apache.jena.query.QueryExecution; -import org.apache.jena.query.QueryExecutionFactory; -import org.apache.jena.query.ResultSetFormatter; - -import java.io.BufferedReader; -import java.io.FileReader; -import java.io.PrintWriter; - -/** - * Util class to retrieve the resultsize of a queryfile and an sparql endpoint. - */ -public class ResultSizeRetriever { - - public static void main(String[] args) { - if(args.length!=3) { - System.out.println("resretriever.sh http://endpoint queryfile.sparql outputfile.tsv"); - return; - } - int i=0; - try(BufferedReader reader = new BufferedReader(new FileReader(args[1]));PrintWriter pw = new PrintWriter(args[2])){ - String line; - while((line=reader.readLine())!=null) { - if(line.isEmpty()) { - continue; - } - try { - pw.println(i+"\t"+retrieveSize(args[0], line)); - }catch(Exception e) { - pw.println(i+"\t?"); - e.printStackTrace(); - } - System.out.println(i+" done"); - i++; - } - - }catch(Exception e) { - e.printStackTrace(); - } - } - - - public static int retrieveSize(String endpoint, String query) { - QueryExecution exec = QueryExecutionFactory.sparqlService(endpoint, query); - return ResultSetFormatter.consume(exec.execSelect()); - } - -} diff --git a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/utils/SPARQLQueryStatistics.java b/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/utils/SPARQLQueryStatistics.java deleted file mode 100644 index 14b58f8af..000000000 --- a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/utils/SPARQLQueryStatistics.java +++ /dev/null @@ -1,49 +0,0 @@ -package org.aksw.iguana.cc.utils; - -import org.apache.jena.query.Query; -import org.apache.jena.sparql.syntax.ElementWalker; - -/** - * Simple SPARQL Query statistics - */ -public class SPARQLQueryStatistics { - - public int aggr=0; - public int filter=0; - public int optional=0; - public int union=0; - public int having=0; - public int groupBy=0; - public int offset=0; - public double size=0.0; - public int orderBy=0; - public int triples=0; - - - /** - * Will add the stats of the provided query to this statistics count. - * @param q - */ - public void getStatistics(Query q) { - if(q.isSelectType()) { - - size++; - offset+=q.hasOffset()?1:0; - aggr+=q.hasAggregators()?1:0; - groupBy+=q.hasGroupBy()?1:0; - having+=q.hasHaving()?1:0; - orderBy+=q.hasOrderBy()?1:0; - - StatisticsVisitor visitor = new StatisticsVisitor(); - visitor.setElementWhere(q.getQueryPattern()); - ElementWalker.walk(q.getQueryPattern(), visitor); - - union+=visitor.union?1:0; - optional+=visitor.optional?1:0; - filter+=visitor.filter?1:0; - triples += visitor.bgps; - - } - } - -} diff --git a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/utils/StatisticsVisitor.java b/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/utils/StatisticsVisitor.java deleted file mode 100644 index c1d033b0b..000000000 --- a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/utils/StatisticsVisitor.java +++ /dev/null @@ -1,48 +0,0 @@ -package org.aksw.iguana.cc.utils; - -import org.apache.jena.sparql.syntax.*; - - -/** - * Simple visitor to check if simple statistics of a SPARQL Query appeared. - */ -public class StatisticsVisitor extends RecursiveElementVisitor{ - - public boolean filter; - public boolean regexFilter=false; - public boolean cmpFilter=false; - public boolean union; - public boolean optional; - private boolean started; - private Element where; - public int bgps; - - public StatisticsVisitor() { - super(new ElementVisitorBase()); - } - - public void startElement(ElementGroup el) { - if (!started && el.equals(where)) { - // root element found - started = true; - - } - } - - public void setElementWhere(Element el) { - this.where = el; - } - - public void endElement(ElementPathBlock el) { - - if (started) { - bgps+=el.getPattern().getList().size(); - } - - } - - public void startElement(ElementFilter el) {this.filter=true;el.getExpr();} - public void startElement(ElementUnion el) {this.union=true;} - public void startElement(ElementOptional el) {this.optional=true;} - -} diff --git a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/worker/AbstractRandomQueryChooserWorker.java b/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/worker/AbstractRandomQueryChooserWorker.java deleted file mode 100644 index a6d322bf5..000000000 --- a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/worker/AbstractRandomQueryChooserWorker.java +++ /dev/null @@ -1,47 +0,0 @@ -package org.aksw.iguana.cc.worker; - -import org.aksw.iguana.cc.config.elements.Connection; -import org.aksw.iguana.cc.query.set.QuerySet; -import org.aksw.iguana.cc.utils.FileUtils; - -import java.io.File; -import java.io.IOException; -import java.util.Random; - -public abstract class AbstractRandomQueryChooserWorker extends AbstractWorker { - - protected int currentQueryID; - protected Random queryChooser; - - - public AbstractRandomQueryChooserWorker(String taskID, Connection connection, String queriesFile, Integer timeOut, Integer timeLimit, Integer fixedLatency, Integer gaussianLatency, String workerType, Integer workerID) { - super(taskID, connection, queriesFile, timeOut, timeLimit, fixedLatency, gaussianLatency, workerType, workerID); - queryChooser = new Random(this.workerID); - - } - - @Override - public void setQueriesList(QuerySet[] queries) { - super.setQueriesList(queries); - this.currentQueryID = queryChooser.nextInt(this.queryFileList.length); - } - - - @Override - public void getNextQuery(StringBuilder queryStr, StringBuilder queryID) throws IOException { - // get next Query File and next random Query out of it. - QuerySet currentQueryFile = this.queryFileList[this.currentQueryID++]; - queryID.append(currentQueryFile.getName()); - - int queriesInFile = currentQueryFile.size(); - int queryLine = queryChooser.nextInt(queriesInFile); - queryStr.append(currentQueryFile.getQueryAtPos(queryLine)); - - // If there is no more query(Pattern) start from beginning. - if (this.currentQueryID >= this.queryFileList.length) { - this.currentQueryID = 0; - } - - } - -} diff --git a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/worker/AbstractWorker.java b/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/worker/AbstractWorker.java deleted file mode 100644 index 1a6b2dae7..000000000 --- a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/worker/AbstractWorker.java +++ /dev/null @@ -1,313 +0,0 @@ -package org.aksw.iguana.cc.worker; - -import org.aksw.iguana.cc.config.CONSTANTS; -import org.aksw.iguana.cc.config.elements.Connection; -import org.aksw.iguana.cc.model.QueryExecutionStats; -import org.aksw.iguana.cc.query.set.QuerySet; -import org.aksw.iguana.cc.utils.FileUtils; -import org.aksw.iguana.commons.annotation.Nullable; -import org.aksw.iguana.commons.constants.COMMON; -import org.apache.http.HttpHost; -import org.apache.http.auth.AuthScope; -import org.apache.http.auth.UsernamePasswordCredentials; -import org.apache.http.client.AuthCache; -import org.apache.http.client.CredentialsProvider; -import org.apache.http.client.protocol.HttpClientContext; -import org.apache.http.impl.auth.BasicScheme; -import org.apache.http.impl.client.BasicAuthCache; -import org.apache.http.impl.client.BasicCredentialsProvider; -import org.apache.http.protocol.HttpContext; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.File; -import java.io.IOException; -import java.net.URI; -import java.net.URISyntaxException; -import java.time.Instant; -import java.util.Collection; -import java.util.LinkedList; -import java.util.Properties; -import java.util.Random; - - -/** - * The Abstract Worker which will implement the runnable, the main loop, the - * time to wait before a query and will send the results to the ResultProcessor - * module
- * so the Implemented Workers only need to implement which query to test next - * and how to test this query. - * - * @author f.conrads - * - */ -public abstract class AbstractWorker implements Worker { - - /** - * Logger which should be used - */ - protected static final Logger LOGGER = LoggerFactory.getLogger(AbstractWorker.class); - - protected boolean endSignal = false; - protected long executedQueries; - - private Collection results = new LinkedList(); - protected String taskID; - - /** - * The worker Type. f.e. SPARQL or UPDATE or SQL or whatever - */ - protected String workerType; - /** - * The unique ID of the worker, should be from 0 to n - */ - protected Integer workerID; - protected Properties extra = new Properties(); - - private Integer fixedLatency=0; - - private Integer gaussianLatency=0; - - private Random latencyRandomizer; - private Long endAtNOQM = null; - - /** - * List which contains all Files representing one query(Pattern) - */ - protected QuerySet[] queryFileList; - - protected Double timeLimit; - - protected Instant startTime; - - protected String queriesFileName; - - protected Connection con; - - protected Double timeOut=180000D; - - protected int queryHash; - - public AbstractWorker(String taskID, Connection connection, String queriesFile, @Nullable Integer timeOut, @Nullable Integer timeLimit, @Nullable Integer fixedLatency, @Nullable Integer gaussianLatency, String workerType, Integer workerID) { - this.taskID=taskID; - this.workerID = workerID; - this.workerType = workerType; - this.con = connection; - if (timeLimit != null){ - this.timeLimit = timeLimit.doubleValue(); - } - latencyRandomizer = new Random(this.workerID); - if(timeOut!=null) - this.timeOut = timeOut.doubleValue(); - // Add latency Specs, add defaults - if(fixedLatency!=null) - this.fixedLatency = fixedLatency; - if(gaussianLatency!=null) - this.gaussianLatency = gaussianLatency; - // set Query file/folder Name - this.queriesFileName = queriesFile; - LOGGER.debug("Initialized new Worker[{{}} : {{}}] for taskID {{}}", workerType, workerID, taskID); - } - - - @Override - public void waitTimeMs() { - Double wait = this.fixedLatency.doubleValue(); - double gaussian = latencyRandomizer.nextDouble(); - wait += (gaussian * 2) * this.gaussianLatency; - LOGGER.debug("Worker[{} : {}]: Time to wait for next Query {}", workerType, workerID, wait); - try { - if(wait>0) - Thread.sleep(wait.intValue()); - } catch (InterruptedException e) { - LOGGER.error("Worker[{{}} : {}]: Could not wait time before next query due to: {}", workerType, - workerID, e); - } - } - - /** - * This will start the worker. It will get the next query, wait as long as it - * should wait before executing the next query, then it will test the query and - * send it if not aborted yet to the ResultProcessor Module - * - */ - public void startWorker() { - // set extra meta key to send late - this.extra = new Properties(); - this.extra.put(CONSTANTS.WORKER_ID_KEY, workerID); - this.extra.setProperty(CONSTANTS.WORKER_TYPE_KEY, workerType); - this.extra.put(CONSTANTS.WORKER_TIMEOUT_MS, timeOut); - if(this.queryFileList!=null) - this.extra.put(COMMON.NO_OF_QUERIES, this.queryFileList.length); - // For Update and Logging purpose get startTime of Worker - this.startTime = Instant.now(); - - this.queryHash = FileUtils.getHashcodeFromFileContent(this.queriesFileName); - - LOGGER.info("Starting Worker[{{}} : {{}}].", this.workerType, this.workerID); - // Execute Queries as long as the Stresstest will need. - while (!this.endSignal && !hasExecutedNoOfQueryMixes(this.endAtNOQM)) { - // Get next query - StringBuilder query = new StringBuilder(); - StringBuilder queryID = new StringBuilder(); - try { - getNextQuery(query, queryID); - // check if endsignal was triggered - if (this.endSignal) { - break; - } - } catch (IOException e) { - LOGGER.error( - "Worker[{{}} : {{}}] : Something went terrible wrong in getting the next query. Worker will be shut down.", - this.workerType, this.workerID); - LOGGER.error("Error which occured:_", e); - break; - } - // Simulate Network Delay (or whatever should be simulated) - waitTimeMs(); - - // benchmark query - try { - executeQuery(query.toString(), queryID.toString()); - } catch (Exception e) { - LOGGER.error("Worker[{{}} : {{}}] : ERROR with query: {{}}", this.workerType, this.workerID, - query.toString()); - } - //this.executedQueries++; - } - LOGGER.info("Stopping Worker[{{}} : {{}}].", this.workerType, this.workerID); - } - - protected HttpContext getAuthContext(String endpoint){ - HttpClientContext context = HttpClientContext.create(); - - if(con.getPassword()!=null && con.getUser()!=null && !con.getPassword().isEmpty() && !con.getUser().isEmpty()) { - CredentialsProvider provider = new BasicCredentialsProvider(); - - provider.setCredentials(new AuthScope(AuthScope.ANY_HOST, AuthScope.ANY_PORT), - new UsernamePasswordCredentials(con.getUser(), con.getPassword())); - - //create target host - String targetHost = endpoint; - try { - URI uri = new URI(endpoint); - targetHost = uri.getScheme() + "://" + uri.getHost() + ":" + uri.getPort(); - } catch (URISyntaxException e) { - e.printStackTrace(); - } - //set Auth cache - AuthCache authCache = new BasicAuthCache(); - BasicScheme basicAuth = new BasicScheme(); - authCache.put(HttpHost.create(targetHost), basicAuth); - - context.setCredentialsProvider(provider); - context.setAuthCache(authCache); - - } - return context; - } - - public synchronized void addResults(QueryExecutionStats results) - { - if (!this.endSignal && !hasExecutedNoOfQueryMixes(this.endAtNOQM)) { - // create Properties store it in List - Properties result = new Properties(); - result.setProperty(COMMON.EXPERIMENT_TASK_ID_KEY, this.taskID); - result.put(COMMON.RECEIVE_DATA_TIME, results.getExecutionTime()); - result.put(COMMON.RECEIVE_DATA_SUCCESS, results.getResponseCode()); - result.put(COMMON.RECEIVE_DATA_SIZE, results.getResultSize()); - result.put(COMMON.QUERY_HASH, queryHash); - result.setProperty(COMMON.QUERY_ID_KEY, results.getQueryID()); - result.put(COMMON.PENALTY, this.timeOut); - // Add extra Meta Key, worker ID and worker Type - result.put(COMMON.EXTRA_META_KEY, this.extra); - setResults(result); - executedQueries++; - - // - if(getNoOfQueries() > 0 && getExecutedQueries() % getNoOfQueries() == 0 ){ - LOGGER.info("Worker executed {} queryMixes", getExecutedQueries()*1.0/getNoOfQueries()); - } - } - } - - protected synchronized void setResults(Properties result) { - results.add(result); - } - - @Override - public synchronized Collection popQueryResults() { - if(results.isEmpty()){ - return null; - } - Collection ret = this.results; - this.results = new LinkedList(); - return ret; - } - - @Override - public long getExecutedQueries() { - return this.executedQueries; - } - - @Override - public void stopSending() { - this.endSignal = true; - LOGGER.debug("Worker[{{}} : {{}}] got stop signal.", workerType, workerID); - } - - @Override - public boolean isTerminated(){ - return this.endSignal; - } - - - @Override - public void run() { - startWorker(); - } - - /** - * Returns the name of the queries file name/update path - * - * @return file name/update path - */ - public String getQueriesFileName() { - return this.queriesFileName; - } - - /** - * Sets the Query Instances repr. in Files. - * - * @param queries - * File containing the query instances. - */ - public void setQueriesList(QuerySet[] queries) { - this.queryFileList = queries; - } - - /** - * The number of Queries in one mix - * - * @return - */ - public long getNoOfQueries() { - if(this.queryFileList == null){ - return 0; - } - return this.queryFileList.length; - } - - @Override - public boolean hasExecutedNoOfQueryMixes(Long noOfQueryMixes){ - if(noOfQueryMixes==null){ - return false; - } - return getExecutedQueries() / (getNoOfQueries() * 1.0) >= noOfQueryMixes; - } - - @Override - public void endAtNoOfQueryMixes(Long noOfQueryMixes){ - this.endAtNOQM=noOfQueryMixes; - } -} diff --git a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/worker/LatencyStrategy.java b/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/worker/LatencyStrategy.java deleted file mode 100644 index dfcca2a14..000000000 --- a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/worker/LatencyStrategy.java +++ /dev/null @@ -1,27 +0,0 @@ -package org.aksw.iguana.cc.worker; - -/** - * The Strategy Names to simulate different network latency behaviors - * - * @author f.conrads - * - */ -public enum LatencyStrategy { - /** - * No Latency should be simulated - */ - NONE, - - /** - * A fixed time/ms should be waited between queries (time is the latency base value) - */ - FIXED, - - /** - * The time/ms should be calculated randomly each time - * out of a gaussian intervall based on the latency base value as follows - * - * [0;2*latencyBaseValue] - */ - VARIABLE -} diff --git a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/worker/Worker.java b/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/worker/Worker.java deleted file mode 100644 index 077f58d76..000000000 --- a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/worker/Worker.java +++ /dev/null @@ -1,89 +0,0 @@ -package org.aksw.iguana.cc.worker; - -import org.aksw.iguana.cc.tasks.impl.Stresstest; - -import java.io.IOException; -import java.util.Collection; -import java.util.Properties; - -/** - * Interface for the Worker Thread used in the {@link Stresstest} - * - * @author f.conrads - * - */ -public interface Worker extends Runnable{ - - - /** - * This method executes a query and adds the results to the Result Processor for proper result and metric calculations. - * Note: Some of the Worker implementations employ background threads to process the result of the query. - * Due to this, this method does not return anything and each implementation of this method must also add the - * results to Result Processor within this method. This can be done by calling AbstractWorker.addResults(QueryExecutionStats) - * @param query The query which should be executed - * @param queryID the ID of the query which should be executed - */ - public void executeQuery(String query, String queryID); - - /** - * This method saves the next query in the queryStr StringBuilder and - * the query id in the queryID. - * - * @param queryStr The query should be stored in here! - * @param queryID The queryID should be stored in here! - * @throws IOException - */ - public void getNextQuery(StringBuilder queryStr, StringBuilder queryID) throws IOException; - - - /** - * This should stop the next sending process. - * If an execution started before this method was called, but answered after, it should not be counted! - */ - public void stopSending(); - - /** - * This will simulate the Time in ms to wait before testing the next query. - * It can be used to simulate network delay. - */ - public void waitTimeMs(); - - - /** - * This will return the amount of executed queries so far - * - * @return no. of executed queries - */ - public long getExecutedQueries(); - - /** - * Get and remove all internal stored results of finished queries - * - * @return list of Properties to send to RabbitMQ - */ - public Collection popQueryResults(); - - boolean isTerminated(); - - /** - * Returns the no of queries in the queryset of the worker - * @return - */ - long getNoOfQueries(); - - /** - * Returns if the no of query mixes were already executed - * @param noOfQueryMixes - * @return - */ - boolean hasExecutedNoOfQueryMixes(Long noOfQueryMixes); - - - /** - * Sets the end restriction - * - * @param noOfQueryMixes - */ - void endAtNoOfQueryMixes(Long noOfQueryMixes); - -} diff --git a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/worker/WorkerFactory.java b/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/worker/WorkerFactory.java deleted file mode 100644 index 401cf5fdb..000000000 --- a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/worker/WorkerFactory.java +++ /dev/null @@ -1,14 +0,0 @@ -package org.aksw.iguana.cc.worker; - -import org.aksw.iguana.commons.factory.TypedFactory; - -/** - * Factory to create a {@link Worker} - * - * @author f.conrads - * - */ -public class WorkerFactory extends TypedFactory{ - - -} diff --git a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/worker/impl/CLIInputFileWorker.java b/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/worker/impl/CLIInputFileWorker.java deleted file mode 100644 index e1bf1a9d6..000000000 --- a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/worker/impl/CLIInputFileWorker.java +++ /dev/null @@ -1,52 +0,0 @@ -package org.aksw.iguana.cc.worker.impl; - -import org.aksw.iguana.cc.config.elements.Connection; -import org.aksw.iguana.commons.annotation.Nullable; -import org.aksw.iguana.commons.annotation.Shorthand; - -import java.io.File; -import java.io.IOException; -import java.io.PrintWriter; - -/** - * Worker to execute a query against a CLI process, the connection.service will be the command to execute the query against. - * - * Assumes that the CLI process won't stop but will just accepts queries one after another and returns the results in the CLI output. - * also assumes that the query has to be read from a file instead of plain input - * - * This worker can be set to be created multiple times in the background if one process will throw an error, a backup process was already created and can be used. - * This is handy if the process won't just prints an error message, but simply exits. - * - */ -@Shorthand("CLIInputFileWorker") -public class CLIInputFileWorker extends MultipleCLIInputWorker { - - - private String dir; - - public CLIInputFileWorker(String taskID, Connection connection, String queriesFile, String initFinished, String queryFinished, String queryError, @Nullable Integer numberOfProcesses, String directory, @Nullable Integer timeOut, @Nullable Integer timeLimit, @Nullable Integer fixedLatency, @Nullable Integer gaussianLatency, Integer workerID) { - super(taskID, connection, queriesFile, initFinished,queryFinished,queryError, numberOfProcesses,timeOut, timeLimit, fixedLatency, gaussianLatency, "CLIInputFileWorker", workerID); - this.dir = directory; - } - - @Override - protected String writableQuery(String query) { - File f; - - try { - new File(dir).mkdirs(); - f = new File(dir+File.separator+"tmpquery.sparql"); - f.createNewFile(); - f.deleteOnExit(); - try(PrintWriter pw = new PrintWriter(f)){ - pw.print(query); - } - return f.getName(); - } catch (IOException e) { - e.printStackTrace(); - } - - return query; - } - -} diff --git a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/worker/impl/CLIInputPrefixWorker.java b/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/worker/impl/CLIInputPrefixWorker.java deleted file mode 100644 index 7c7cc9af8..000000000 --- a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/worker/impl/CLIInputPrefixWorker.java +++ /dev/null @@ -1,36 +0,0 @@ -package org.aksw.iguana.cc.worker.impl; - - -import org.aksw.iguana.cc.config.elements.Connection; -import org.aksw.iguana.commons.annotation.Nullable; -import org.aksw.iguana.commons.annotation.Shorthand; - -/** - * Worker to execute a query against a CLI process, the connection.service will be the command to execute the query against. - * - * Assumes that the CLI process won't stop but will just accepts queries one after another and returns the results in the CLI output. - * also assumes that the query has to be prefixed and suffixed. - * For example: SPARQL SELECT * {?s ?p ?o} ; whereas 'SPARQL' is the prefix and ';' is the suffix. - * - * This worker can be set to be created multiple times in the background if one process will throw an error, a backup process was already created and can be used. - * This is handy if the process won't just prints an error message, but simply exits. - * - */ -@Shorthand("CLIInputPrefixWorker") -public class CLIInputPrefixWorker extends MultipleCLIInputWorker { - - private String prefix; - private String suffix; - - public CLIInputPrefixWorker(String taskID, Connection connection, String queriesFile, String initFinished, String queryFinished, String queryError, @Nullable Integer numberOfProcesses, String queryPrefix, String querySuffix, @Nullable Integer timeOut, @Nullable Integer timeLimit, @Nullable Integer fixedLatency, @Nullable Integer gaussianLatency, Integer workerID) { - super(taskID, connection, queriesFile, initFinished,queryFinished,queryError, numberOfProcesses,timeOut, timeLimit, fixedLatency, gaussianLatency, "CLIInputPrefixWorker", workerID); - this.prefix=queryPrefix; - this.suffix=querySuffix; - } - - @Override - protected String writableQuery(String query) { - return prefix+" "+query+" "+suffix; - } - -} diff --git a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/worker/impl/CLIInputWorker.java b/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/worker/impl/CLIInputWorker.java deleted file mode 100644 index 45780098e..000000000 --- a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/worker/impl/CLIInputWorker.java +++ /dev/null @@ -1,139 +0,0 @@ -package org.aksw.iguana.cc.worker.impl; - -import org.aksw.iguana.cc.config.elements.Connection; -import org.aksw.iguana.cc.model.QueryExecutionStats; -import org.aksw.iguana.cc.utils.CLIProcessManager; -import org.aksw.iguana.cc.worker.AbstractRandomQueryChooserWorker; -import org.aksw.iguana.commons.annotation.Nullable; -import org.aksw.iguana.commons.annotation.Shorthand; -import org.aksw.iguana.commons.constants.COMMON; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.IOException; -import java.time.Instant; -import java.util.Random; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.atomic.AtomicLong; - -import static org.aksw.iguana.commons.time.TimeUtils.durationInMilliseconds; - -/** - * Worker to execute a query against a CLI process, the connection.service will be the command to execute the query against. - * - * Assumes that the CLI process won't stop but will just accepts queries one after another and returns the results in the CLI output. - * - */ -@Shorthand("CLIInputWorker") -public class CLIInputWorker extends AbstractRandomQueryChooserWorker { - - private Logger LOGGER = LoggerFactory.getLogger(getClass()); - - private int currentQueryID; - private Random queryChooser; - private Process process; - private String initFinished; - private String queryFinished; - private String error; - - public CLIInputWorker(String taskID, Connection connection, String queriesFile, String initFinished, String queryFinished, String queryError, @Nullable Integer timeOut, @Nullable Integer timeLimit, @Nullable Integer fixedLatency, @Nullable Integer gaussianLatency, Integer workerID) { - super(taskID, connection, queriesFile, timeOut, timeLimit, fixedLatency, gaussianLatency, "CLIInputWorker", workerID); - queryChooser = new Random(this.workerID); - this.initFinished = initFinished; - this.queryFinished = queryFinished; - this.error = queryError; - this.setWorkerProperties(); - - } - - private void setWorkerProperties() - { - queryChooser = new Random(this.workerID); - - // Create a CLI process, initialize it - this.process = CLIProcessManager.createProcess(this.con.getEndpoint()); - try { - CLIProcessManager.countLinesUntilStringOccurs(process, this.initFinished, this.error); //Init - } catch (IOException e) { - LOGGER.error("Exception while trying to wait for init of CLI Process",e); - } - } - - @Override - public void executeQuery(String query, String queryID) { - Instant start = Instant.now(); - - try { - // Create background thread that will watch the output of the process and prepare results - AtomicLong size = new AtomicLong(-1); - AtomicBoolean failed = new AtomicBoolean(false); - ExecutorService executor = Executors.newSingleThreadExecutor(); - executor.execute(new Runnable() { - - @Override - public void run() { - try { - LOGGER.debug("Process Alive: {}", process.isAlive()); - LOGGER.debug("Reader ready: {}", CLIProcessManager.isReaderReady(process)); - size.set(CLIProcessManager.countLinesUntilStringOccurs(process, queryFinished, error)); - } catch (IOException e) { - failed.set(true); - } - } - }); - - // Execute the query on the process - try { - if (process.isAlive()) { - CLIProcessManager.executeCommand(process, writableQuery(query)); - } else if (this.endSignal) { - super.addResults(new QueryExecutionStats (queryID, COMMON.QUERY_UNKNOWN_EXCEPTION, durationInMilliseconds(start, Instant.now()) )); - return; - } else { - super.addResults(new QueryExecutionStats (queryID, COMMON.QUERY_UNKNOWN_EXCEPTION, durationInMilliseconds(start, Instant.now()) )); - return; - } - } finally { - executor.shutdown(); - executor.awaitTermination((long)(double)this.timeOut, TimeUnit.MILLISECONDS); - } - - // At this point, query is executed and background thread has processed the results. - // Next, calculate time for benchmark. - double duration = durationInMilliseconds(start, Instant.now()); - - if (duration >= timeOut) { - super.addResults(new QueryExecutionStats (queryID, COMMON.QUERY_SOCKET_TIMEOUT, duration )); - return; - } else if (failed.get()) { - super.addResults(new QueryExecutionStats (queryID, COMMON.QUERY_UNKNOWN_EXCEPTION, duration )); - return; - } - - // SUCCESS - LOGGER.debug("Query successfully executed size: {}", size.get()); - super.addResults(new QueryExecutionStats (queryID, COMMON.QUERY_SUCCESS, duration, size.get() )); - return; - } catch (IOException | InterruptedException e) { - LOGGER.warn("Exception while executing query ",e); - // ERROR - super.addResults(new QueryExecutionStats (queryID, COMMON.QUERY_UNKNOWN_EXCEPTION, durationInMilliseconds(start, Instant.now()) )); - } - } - - - protected String writableQuery(String query) { - return query; - } - - - - @Override - public void stopSending() { - super.stopSending(); - process.destroyForcibly(); - } -} diff --git a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/worker/impl/CLIWorker.java b/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/worker/impl/CLIWorker.java deleted file mode 100644 index 329b67952..000000000 --- a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/worker/impl/CLIWorker.java +++ /dev/null @@ -1,114 +0,0 @@ -package org.aksw.iguana.cc.worker.impl; - -import org.aksw.iguana.cc.config.elements.Connection; -import org.aksw.iguana.cc.model.QueryExecutionStats; -import org.aksw.iguana.cc.worker.AbstractRandomQueryChooserWorker; -import org.aksw.iguana.commons.annotation.Nullable; -import org.aksw.iguana.commons.annotation.Shorthand; -import org.aksw.iguana.commons.constants.COMMON; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.BufferedReader; -import java.io.InputStreamReader; -import java.io.UnsupportedEncodingException; -import java.net.URLEncoder; -import java.time.Instant; - -import static org.aksw.iguana.commons.time.TimeUtils.durationInMilliseconds; - -/** - * Worker to execute a query again a CLI process, the connection.service will be the command to execute the query against. - * - * command may look like the following

- * cliprocess.sh $QUERY$ $USER$ $PASSWORD$ - *
- * whereas $QUERY$ will be exchanged with the actual query as well as user and password. - * Further on it is possible to encode the query using $ENCODEDQUERY$ instead of $QUERY$ - * - */ -@Shorthand("CLIWorker") -public class CLIWorker extends AbstractRandomQueryChooserWorker { - - private Logger LOGGER = LoggerFactory.getLogger(getClass()); - - - public CLIWorker(String taskID, Connection connection, String queriesFile, @Nullable Integer timeOut, @Nullable Integer timeLimit, @Nullable Integer fixedLatency, @Nullable Integer gaussianLatency, Integer workerID) { - super(taskID, connection, queriesFile, timeOut, timeLimit, fixedLatency, gaussianLatency, "CLIWorker", workerID); - } - - - @Override - public void executeQuery(String query, String queryID) { - Instant start = Instant.now(); - // use cli as service - String encodedQuery = ""; - try { - encodedQuery = URLEncoder.encode(query, "UTF-8"); - } catch (UnsupportedEncodingException e1) { - LOGGER.error("Could not encode Query", e1); - } - String queryCLI = getReplacedQuery(query, encodedQuery); - // execute queryCLI and read response - ProcessBuilder processBuilder = new ProcessBuilder().redirectErrorStream(true); - processBuilder.command(new String[] { "bash", "-c", queryCLI }); - try { - - Process process = processBuilder.start(); - - StringBuilder output = new StringBuilder(); - long size = -1; - - try (BufferedReader reader = new BufferedReader(new InputStreamReader(process.getInputStream()))) { - - String line; - // -1 as the first line should be the header - while ((line = reader.readLine()) != null) { - - output.append(line + "\n"); - size++; - } - } catch (Exception e) { - e.printStackTrace(); - } - int exitVal = process.waitFor(); - if (exitVal == 0) { - LOGGER.debug("Query successfully executed size: {}", size); - } else { - LOGGER.warn("Exit Value of Process was not 0, was {} ", exitVal); - super.addResults(new QueryExecutionStats(queryID, COMMON.QUERY_UNKNOWN_EXCEPTION, durationInMilliseconds(start, Instant.now()) )); - return; - } - super.addResults(new QueryExecutionStats(queryID, COMMON.QUERY_SUCCESS, durationInMilliseconds(start, Instant.now()), size )); - return; - } catch (Exception e) { - LOGGER.warn("Unknown Exception while executing query", e); - } - // ERROR - super.addResults(new QueryExecutionStats(queryID, COMMON.QUERY_UNKNOWN_EXCEPTION, durationInMilliseconds(start, Instant.now()) )); - } - - private String getReplacedQuery(String query, String encodedQuery) { - String queryCLI = this.con.getEndpoint().replace("$QUERY$", query); - queryCLI = queryCLI.replace("$ENCODEDQUERY$", encodedQuery); - - if (this.con.getUser() != null) { - queryCLI = queryCLI.replace("$USER$", this.con.getUser()); - } - else{ - queryCLI = queryCLI.replace("$USER$", ""); - - } - if (this.con.getPassword() != null) { - queryCLI = queryCLI.replace("$PASSWORD$", this.con.getPassword()); - } - else{ - queryCLI = queryCLI.replace("$PASSWORD$", ""); - - } - return queryCLI; - - } - - -} diff --git a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/worker/impl/HttpGetWorker.java b/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/worker/impl/HttpGetWorker.java deleted file mode 100644 index f6d5b544c..000000000 --- a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/worker/impl/HttpGetWorker.java +++ /dev/null @@ -1,63 +0,0 @@ -package org.aksw.iguana.cc.worker.impl; - -import org.aksw.iguana.cc.config.elements.Connection; -import org.aksw.iguana.cc.lang.LanguageProcessor; -import org.aksw.iguana.commons.annotation.Nullable; -import org.aksw.iguana.commons.annotation.Shorthand; -import org.aksw.iguana.commons.factory.TypedFactory; -import org.apache.http.HttpHeaders; -import org.apache.http.client.config.RequestConfig; -import org.apache.http.client.methods.HttpGet; - -import java.io.UnsupportedEncodingException; -import java.net.URLEncoder; -import java.nio.charset.StandardCharsets; -import java.util.HashMap; - - -/** - * HTTP Get Worker. - * Uses HTTP Get to execute a Query.

- * if the parameter type was not set it will use 'query' as the parameter as default, otherwise it will use the provided parameter - */ -@Shorthand("HttpGetWorker") -public class HttpGetWorker extends HttpWorker { - - protected String parameter = "query"; - - protected String responseType = null; - - - public HttpGetWorker(String taskID, Connection connection, String queriesFile, @Nullable String responseType, @Nullable String parameterName, @Nullable String language, @Nullable Integer timeOut, @Nullable Integer timeLimit, @Nullable Integer fixedLatency, @Nullable Integer gaussianLatency, @Nullable String workerType, Integer workerID) { - super(taskID, connection, queriesFile, timeOut, timeLimit, fixedLatency, gaussianLatency, workerType == null ? "HttpGetWorker" : workerType, workerID); - if (language != null) { - resultProcessor = new TypedFactory().create(language, new HashMap()); - } - if (parameterName != null) { - parameter = parameterName; - } - if (responseType != null) { - this.responseType = responseType; - } - } - - void buildRequest(String query, String queryID) throws UnsupportedEncodingException { - String qEncoded = URLEncoder.encode(query, StandardCharsets.UTF_8); - String addChar = "?"; - if (con.getEndpoint().contains("?")) { - addChar = "&"; - } - String url = con.getEndpoint() + addChar + parameter + "=" + qEncoded; - request = new HttpGet(url); - RequestConfig requestConfig = - RequestConfig.custom() - .setSocketTimeout(timeOut.intValue()) - .setConnectTimeout(timeOut.intValue()) - .build(); - - if (this.responseType != null) - request.setHeader(HttpHeaders.ACCEPT, this.responseType); - - request.setConfig(requestConfig); - } -} diff --git a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/worker/impl/HttpPostWorker.java b/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/worker/impl/HttpPostWorker.java deleted file mode 100644 index e03e85a70..000000000 --- a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/worker/impl/HttpPostWorker.java +++ /dev/null @@ -1,60 +0,0 @@ -package org.aksw.iguana.cc.worker.impl; - -import org.aksw.iguana.cc.config.elements.Connection; -import org.aksw.iguana.commons.annotation.Nullable; -import org.aksw.iguana.commons.annotation.Shorthand; -import org.apache.http.HttpHeaders; -import org.apache.http.client.config.RequestConfig; -import org.apache.http.client.methods.HttpPost; -import org.apache.http.entity.StringEntity; - -import java.io.UnsupportedEncodingException; -import java.net.URLEncoder; - -/** - * HTTP Post worker. - * Uses HTTP posts to execute a query. - *

- * Sends the query in plain as POST data if parameter type was not set, otherwise uses json as follows:
- * {PARAMETER: QUERY} - */ -@Shorthand("HttpPostWorker") -public class HttpPostWorker extends HttpGetWorker { - - private String contentType = "text/plain"; - - - public HttpPostWorker(String taskID, Connection connection, String queriesFile, @Nullable String contentType, @Nullable String responseType, @Nullable String parameterName, @Nullable String language, @Nullable Integer timeOut, @Nullable Integer timeLimit, @Nullable Integer fixedLatency, @Nullable Integer gaussianLatency, @Nullable String workerType, Integer workerID) { - super(taskID, connection, queriesFile, responseType, parameterName, language, timeOut, timeLimit, fixedLatency, gaussianLatency, workerType, workerID); - if (parameterName == null) { - parameter = null; - } - if (contentType != null) { - this.contentType = contentType; - } - } - - void buildRequest(String query, String queryID) throws UnsupportedEncodingException { - StringBuilder data = new StringBuilder(); - if (parameter != null) { - String qEncoded = URLEncoder.encode(query, "UTF-8"); - data.append("{ \"" + parameter + "\": \"").append(qEncoded).append("\"}"); - } else { - data.append(query); - } - StringEntity entity = new StringEntity(data.toString()); - request = new HttpPost(con.getUpdateEndpoint()); - ((HttpPost) request).setEntity(entity); - request.setHeader("Content-Type", contentType); - RequestConfig requestConfig = RequestConfig.custom() - .setSocketTimeout(timeOut.intValue()) - .setConnectTimeout(timeOut.intValue()) - .build(); - - if (this.responseType != null) - request.setHeader(HttpHeaders.ACCEPT, this.responseType); - - request.setConfig(requestConfig); - } - -} diff --git a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/worker/impl/HttpWorker.java b/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/worker/impl/HttpWorker.java deleted file mode 100644 index d896c8020..000000000 --- a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/worker/impl/HttpWorker.java +++ /dev/null @@ -1,300 +0,0 @@ -package org.aksw.iguana.cc.worker.impl; - -import org.aksw.iguana.cc.config.elements.Connection; -import org.aksw.iguana.cc.lang.LanguageProcessor; -import org.aksw.iguana.cc.lang.impl.SPARQLLanguageProcessor; -import org.aksw.iguana.cc.model.QueryExecutionStats; -import org.aksw.iguana.cc.model.QueryResultHashKey; -import org.aksw.iguana.cc.worker.AbstractRandomQueryChooserWorker; -import org.aksw.iguana.commons.annotation.Nullable; -import org.aksw.iguana.commons.constants.COMMON; -import org.apache.http.Header; -import org.apache.http.HttpEntity; -import org.apache.http.client.ClientProtocolException; -import org.apache.http.client.methods.CloseableHttpResponse; -import org.apache.http.client.methods.HttpRequestBase; -import org.apache.http.impl.client.CloseableHttpClient; -import org.apache.http.impl.client.HttpClients; -import org.apache.http.impl.conn.BasicHttpClientConnectionManager; -import org.apache.http.message.BasicHeader; -import org.json.simple.parser.ParseException; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.xml.sax.SAXException; - -import javax.xml.parsers.ParserConfigurationException; -import java.io.*; -import java.nio.charset.StandardCharsets; -import java.time.Instant; -import java.util.concurrent.*; - -import static org.aksw.iguana.commons.time.TimeUtils.durationInMilliseconds; - -/** - * Abstract HTTP worker - */ -public abstract class HttpWorker extends AbstractRandomQueryChooserWorker { - - - protected final ExecutorService resultProcessorService = Executors.newFixedThreadPool(5); - protected ScheduledThreadPoolExecutor timeoutExecutorPool = new ScheduledThreadPoolExecutor(1); - protected ConcurrentMap processedResults = new ConcurrentHashMap<>(); - protected LanguageProcessor resultProcessor = new SPARQLLanguageProcessor(); - protected CloseableHttpClient client; - protected HttpRequestBase request; - protected ScheduledFuture abortCurrentRequestFuture; - protected CloseableHttpResponse response; - protected boolean resultsSaved = false; - protected boolean requestTimedOut = false; - protected String queryId; - protected Instant requestStartTime; - protected long tmpExecutedQueries = 0; - - - - public HttpWorker(String taskID, Connection connection, String queriesFile, @Nullable Integer timeOut, @Nullable Integer timeLimit, @Nullable Integer fixedLatency, @Nullable Integer gaussianLatency, String workerType, Integer workerID) { - super(taskID, connection, queriesFile, timeOut, timeLimit, fixedLatency, gaussianLatency, workerType, workerID); - timeoutExecutorPool.setRemoveOnCancelPolicy(true); - } - - public ConcurrentMap getProcessedResults() { - return processedResults; - } - - protected void setTimeout(int timeOut) { - assert (request != null); - abortCurrentRequestFuture = timeoutExecutorPool.schedule( - () -> { - synchronized (this) { - request.abort(); - requestTimedOut = true; - } - }, - timeOut, TimeUnit.MILLISECONDS); - } - - protected void abortTimeout() { - if (!abortCurrentRequestFuture.isDone()) - abortCurrentRequestFuture.cancel(false); - } - - - @Override - public void stopSending() { - super.stopSending(); - abortTimeout(); - try { - if (request != null && !request.isAborted()) - request.abort(); - } catch (Exception ignored) { - } - closeClient(); - this.shutdownResultProcessor(); - } - - - public void shutdownResultProcessor() { - this.resultProcessorService.shutdown(); - try { - boolean finished = this.resultProcessorService.awaitTermination(3000, TimeUnit.MILLISECONDS); - if (!finished) { - LOGGER.error("Result Processor could be shutdown orderly. Terminating."); - this.resultProcessorService.shutdownNow(); - } - } catch (InterruptedException e) { - LOGGER.error("Could not shut down http result processor: " + e.getLocalizedMessage()); - } - - try { - boolean finished = this.timeoutExecutorPool.awaitTermination(3000, TimeUnit.MILLISECONDS); - if (!finished) { - LOGGER.error("Timeout Executor could be shutdown orderly. Terminating."); - this.timeoutExecutorPool.shutdownNow(); - } - } catch (InterruptedException e) { - LOGGER.error("Could not shut down http timout executor: " + e.getLocalizedMessage()); - } - } - - boolean checkResponseStatus() { - int responseCode = response.getStatusLine().getStatusCode(); - if (responseCode == 200) { - return true; - } else { - double duration = durationInMilliseconds(requestStartTime, Instant.now()); - addResultsOnce(new QueryExecutionStats(queryId, COMMON.QUERY_HTTP_FAILURE, duration)); - return false; - } - } - - synchronized protected void addResultsOnce(QueryExecutionStats queryExecutionStats) { - if (!resultsSaved) { - this.addResults(queryExecutionStats); - resultsSaved = true; - } - } - - @Override - public void executeQuery(String query, String queryID) { - queryId = queryID; - resultsSaved = false; - requestTimedOut = false; - - if (client == null) - initClient(); - - try { - buildRequest(query, queryId); - - setTimeout(timeOut.intValue()); - - requestStartTime = Instant.now(); - response = client.execute(request, getAuthContext(con.getEndpoint())); - // method to process the result in background - processHttpResponse(); - - abortTimeout(); - - } catch (ClientProtocolException e) { - handleException(query, COMMON.QUERY_HTTP_FAILURE, e); - } catch (IOException e) { - if (requestTimedOut) { - LOGGER.warn("Worker[{} : {}]: Reached timeout on query (ID {})\n{}", - this.workerType, this.workerID, queryId, query); - addResultsOnce(new QueryExecutionStats(queryId, COMMON.QUERY_SOCKET_TIMEOUT, timeOut)); - } else { - handleException(query, COMMON.QUERY_UNKNOWN_EXCEPTION, e); - } - } catch (Exception e) { - handleException(query, COMMON.QUERY_UNKNOWN_EXCEPTION, e); - } finally { - abortTimeout(); - closeResponse(); - } - } - - private void handleException(String query, Long cause, Exception e) { - double duration = durationInMilliseconds(requestStartTime, Instant.now()); - addResultsOnce(new QueryExecutionStats(queryId, cause, duration)); - LOGGER.warn("Worker[{} : {}]: {} on query (ID {})\n{}", - this.workerType, this.workerID, e.getMessage(), queryId, query); - closeClient(); - initClient(); - } - - protected void processHttpResponse() { - // check if query execution took already longer than timeout - boolean responseCodeOK = checkResponseStatus(); - if (responseCodeOK) { // response status is OK (200) - // get content type header - HttpEntity httpResponse = response.getEntity(); - Header contentTypeHeader = new BasicHeader(httpResponse.getContentType().getName(), httpResponse.getContentType().getValue()); - // get content stream - try (InputStream inputStream = httpResponse.getContent()) { - // read content stream - //Stream in resultProcessor, return length, set string in StringBuilder. - ByteArrayOutputStream responseBody = new ByteArrayOutputStream(); - long length = resultProcessor.readResponse(inputStream, responseBody); - tmpExecutedQueries++; - // check if such a result was already parsed and is cached - double duration = durationInMilliseconds(requestStartTime, Instant.now()); - synchronized (this) { - QueryResultHashKey resultCacheKey = new QueryResultHashKey(queryId, length); - if (processedResults.containsKey(resultCacheKey)) { - LOGGER.debug("found result cache key {} ", resultCacheKey); - Long preCalculatedResultSize = processedResults.get(resultCacheKey); - addResultsOnce(new QueryExecutionStats(queryId, COMMON.QUERY_SUCCESS, duration, preCalculatedResultSize)); - } else { - // otherwise: parse it. The parsing result is cached for the next time. - if (!this.endSignal) { - resultProcessorService.submit(new HttpResultProcessor(this, queryId, duration, contentTypeHeader, responseBody, length)); - resultsSaved = true; - } - } - } - - } catch (IOException | TimeoutException e) { - double duration = durationInMilliseconds(requestStartTime, Instant.now()); - addResultsOnce(new QueryExecutionStats(queryId, COMMON.QUERY_HTTP_FAILURE, duration)); - } - } - } - - abstract void buildRequest(String query, String queryID) throws UnsupportedEncodingException; - - protected void initClient() { - client = HttpClients.custom().setConnectionManager(new BasicHttpClientConnectionManager()).build(); - } - - protected void closeClient() { - closeResponse(); - try { - if (client != null) - client.close(); - } catch (IOException e) { - LOGGER.error("Could not close http response ", e); - } - client = null; - } - - protected void closeResponse() { - try { - if (response != null) - response.close(); - } catch (IOException e) { - LOGGER.error("Could not close Client ", e); - } - response = null; - } - - /** - * Http Result Processor, analyzes the http response in the background, if it was cached already, what is the result size, - * did the response was a success or failure. - */ - static class HttpResultProcessor implements Runnable { - - private final Logger LOGGER = LoggerFactory.getLogger(getClass()); - - private final HttpWorker httpWorker; - private final String queryId; - private final double duration; - private final Header contentTypeHeader; - private ByteArrayOutputStream contentStream; - private final long contentLength; - - public HttpResultProcessor(HttpWorker httpWorker, String queryId, double duration, Header contentTypeHeader, ByteArrayOutputStream contentStream, long contentLength) { - this.httpWorker = httpWorker; - this.queryId = queryId; - this.duration = duration; - this.contentTypeHeader = contentTypeHeader; - this.contentStream = contentStream; - this.contentLength = contentLength; - } - - @Override - public void run() { - // Result size is not saved before. Process the http response. - - ConcurrentMap processedResults = httpWorker.getProcessedResults(); - QueryResultHashKey resultCacheKey = new QueryResultHashKey(queryId, contentLength); - try { - //String content = contentStream.toString(StandardCharsets.UTF_8.name()); - //contentStream = null; // might be hugh, dereference immediately after consumed - Long resultSize = httpWorker.resultProcessor.getResultSize(contentTypeHeader, contentStream, contentLength); - contentStream = null; - // Save the result size to be re-used - processedResults.put(resultCacheKey, resultSize); - LOGGER.debug("added Result Cache Key {}", resultCacheKey); - - httpWorker.addResults(new QueryExecutionStats(queryId, COMMON.QUERY_SUCCESS, duration, resultSize)); - - } catch (IOException | ParseException | ParserConfigurationException | SAXException e) { - LOGGER.error("Query results could not be parsed. ", e); - httpWorker.addResults(new QueryExecutionStats(queryId, COMMON.QUERY_UNKNOWN_EXCEPTION, duration)); - } catch (Exception e) { - e.printStackTrace(); - } - } - } -} - diff --git a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/worker/impl/MultipleCLIInputWorker.java b/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/worker/impl/MultipleCLIInputWorker.java deleted file mode 100644 index 071a65539..000000000 --- a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/worker/impl/MultipleCLIInputWorker.java +++ /dev/null @@ -1,195 +0,0 @@ -package org.aksw.iguana.cc.worker.impl; - -import org.aksw.iguana.cc.config.elements.Connection; -import org.aksw.iguana.cc.model.QueryExecutionStats; -import org.aksw.iguana.cc.utils.CLIProcessManager; -import org.aksw.iguana.cc.worker.AbstractRandomQueryChooserWorker; -import org.aksw.iguana.commons.annotation.Nullable; -import org.aksw.iguana.commons.annotation.Shorthand; -import org.aksw.iguana.commons.constants.COMMON; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.IOException; -import java.time.Instant; -import java.util.List; -import java.util.Random; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.atomic.AtomicLong; - -import static org.aksw.iguana.commons.time.TimeUtils.durationInMilliseconds; - -/** - * Worker to execute a query against a CLI process, the connection.service will be the command to execute the query against. - * - * Assumes that the CLI process won't stop but will just accepts queries one after another and returns the results in the CLI output. - * - * This worker can be set to be created multiple times in the background if one process will throw an error, a backup process was already created and can be used. - * This is handy if the process won't just prints an error message, but simply exits. - * - */ -@Shorthand("MultipleCLIInputWorker") -public class MultipleCLIInputWorker extends AbstractRandomQueryChooserWorker { - - private Logger LOGGER = LoggerFactory.getLogger(getClass()); - - private Process currentProcess; - protected List processList; - protected int currentProcessId = 0; - private String initFinished; - private String queryFinished; - private String error; - protected int numberOfProcesses = 5; - - public MultipleCLIInputWorker(String taskID, Connection connection, String queriesFile, String initFinished, String queryFinished, String queryError, @Nullable Integer numberOfProcesses, @Nullable Integer timeOut, @Nullable Integer timeLimit, @Nullable Integer fixedLatency, @Nullable Integer gaussianLatency, Integer workerID) { - this(taskID, connection, queriesFile, initFinished,queryFinished,queryError, numberOfProcesses,timeOut, timeLimit, fixedLatency, gaussianLatency, "MultipleCLIInputWorker", workerID); - } - - public MultipleCLIInputWorker(String taskID, Connection connection, String queriesFile, String initFinished, String queryFinished, String queryError, @Nullable Integer numberOfProcesses, @Nullable Integer timeOut, @Nullable Integer timeLimit, @Nullable Integer fixedLatency, @Nullable Integer gaussianLatency, String workerType, Integer workerID) { - super(taskID, connection, queriesFile, timeOut, timeLimit, fixedLatency, gaussianLatency, workerType, workerID); - this.initFinished = initFinished; - this.queryFinished = queryFinished; - this.error = queryError; - if(numberOfProcesses!=null){ - this.numberOfProcesses=numberOfProcesses; - } - this.setWorkerProperties(); - - } - - private void setWorkerProperties() { - queryChooser = new Random(this.workerID); - // start cli input - - // Create processes, set first process as current process - this.processList = CLIProcessManager.createProcesses(this.numberOfProcesses, this.con.getEndpoint()); - this.currentProcess = processList.get(0); - - // Make sure that initialization is complete - for (Process value : processList) { - try { - CLIProcessManager.countLinesUntilStringOccurs(value, initFinished, error); - } catch (IOException e) { - LOGGER.error("Exception while trying to wait for init of CLI Process",e); - } - } - } - - - @Override - public void executeQuery(String query, String queryID) { - Instant start = Instant.now(); - // execute queryCLI and read response - try { - // Create background thread that will watch the output of the process and prepare results - AtomicLong size = new AtomicLong(-1); - AtomicBoolean failed = new AtomicBoolean(false); - ExecutorService executor = Executors.newSingleThreadExecutor(); - executor.execute(new Runnable() { - - @Override - public void run() { - try { - LOGGER.debug("Process Alive: {}", currentProcess.isAlive()); - LOGGER.debug("Reader ready: {}", CLIProcessManager.isReaderReady(currentProcess)); - size.set(CLIProcessManager.countLinesUntilStringOccurs(currentProcess, queryFinished, error)); - } catch (IOException e) { - failed.set(true); - } - } - }); - - // Execute the query on the process - try { - if (currentProcess.isAlive()) { - CLIProcessManager.executeCommand(currentProcess, writableQuery(query)); - } else if (this.endSignal) { - super.addResults(new QueryExecutionStats(queryID, COMMON.QUERY_UNKNOWN_EXCEPTION, durationInMilliseconds(start, Instant.now()) )); - return; - } else { - setNextProcess(); - super.addResults(new QueryExecutionStats(queryID, COMMON.QUERY_UNKNOWN_EXCEPTION, durationInMilliseconds(start, Instant.now()) )); - return; - } - } finally { - executor.shutdown(); - executor.awaitTermination((long) (double)this.timeOut, TimeUnit.MILLISECONDS); - } - - // At this point, query is executed and background thread has processed the results. - // Next, calculate time for benchmark. - double duration = durationInMilliseconds(start, Instant.now()); - - if (duration >= timeOut) { - setNextProcess(); - super.addResults(new QueryExecutionStats(queryID, COMMON.QUERY_SOCKET_TIMEOUT, duration )); - return; - } else if (failed.get()) { - if (!currentProcess.isAlive()) { - setNextProcess(); - } - super.addResults(new QueryExecutionStats(queryID, COMMON.QUERY_UNKNOWN_EXCEPTION, duration )); - return; - } - - // SUCCESS - LOGGER.debug("Query successfully executed size: {}", size.get()); - super.addResults(new QueryExecutionStats(queryID, COMMON.QUERY_SUCCESS, duration, size.get() )); - return; - } catch (IOException | InterruptedException e) { - LOGGER.warn("Exception while executing query ",e); - // ERROR - super.addResults(new QueryExecutionStats(queryID, COMMON.QUERY_UNKNOWN_EXCEPTION, durationInMilliseconds(start, Instant.now()) )); - } - } - - private void setNextProcess() { - int oldProcessId = currentProcessId; - currentProcessId = currentProcessId == processList.size() -1 ? 0 : currentProcessId + 1; - - // destroy old process - CLIProcessManager.destroyProcess(currentProcess); - if(oldProcessId== currentProcessId) { - try { - currentProcess.waitFor(); - } catch (InterruptedException e) { - LOGGER.error("Process was Interrupted",e); - } - } - - // create and initialize new process to replace previously destroyed process - Process replacementProcess = CLIProcessManager.createProcess(this.con.getEndpoint()); - try { - CLIProcessManager.countLinesUntilStringOccurs(replacementProcess, initFinished, error); // Init - processList.set(oldProcessId, replacementProcess); - } catch (IOException e) { - LOGGER.error("Process replacement didn't work", e); - } - - // finally, update current process - currentProcess = processList.get(currentProcessId); - } - - protected String writableQuery(String query) { - return query; - } - - - - - @Override - public void stopSending() { - super.stopSending(); - for (Process pr : processList) { - pr.destroyForcibly(); - try { - pr.waitFor(); - } catch (InterruptedException e) { - LOGGER.error("Process waitFor was Interrupted", e); - } - } - } -} diff --git a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/worker/impl/SPARQLWorker.java b/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/worker/impl/SPARQLWorker.java deleted file mode 100644 index b8e659f5a..000000000 --- a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/worker/impl/SPARQLWorker.java +++ /dev/null @@ -1,21 +0,0 @@ -package org.aksw.iguana.cc.worker.impl; - -import org.aksw.iguana.cc.config.elements.Connection; -import org.aksw.iguana.commons.annotation.Nullable; -import org.aksw.iguana.commons.annotation.Shorthand; - - - -/** - * A Worker using SPARQL 1.1 to create service request. - * - * @author f.conrads - */ -@Shorthand("SPARQLWorker") -public class SPARQLWorker extends HttpGetWorker { - - public SPARQLWorker(String taskID, Connection connection, String queriesFile, @Nullable String responseType, @Nullable String parameterName, @Nullable Integer timeOut, @Nullable Integer timeLimit, @Nullable Integer fixedLatency, @Nullable Integer gaussianLatency, Integer workerID) { - super(taskID, connection, queriesFile, responseType, parameterName, "lang.SPARQL", timeOut, timeLimit, fixedLatency, gaussianLatency, "SPARQLWorker", workerID); - } - -} diff --git a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/worker/impl/UPDATEWorker.java b/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/worker/impl/UPDATEWorker.java deleted file mode 100644 index 859039abb..000000000 --- a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/worker/impl/UPDATEWorker.java +++ /dev/null @@ -1,142 +0,0 @@ -package org.aksw.iguana.cc.worker.impl; - -import org.aksw.iguana.cc.config.elements.Connection; -import org.aksw.iguana.cc.model.QueryExecutionStats; -import org.aksw.iguana.cc.query.set.QuerySet; -import org.aksw.iguana.cc.worker.impl.update.UpdateTimer; -import org.aksw.iguana.commons.annotation.Nullable; -import org.aksw.iguana.commons.annotation.Shorthand; -import org.aksw.iguana.commons.constants.COMMON; - -import java.io.File; -import java.io.IOException; -import java.time.Instant; -import java.util.Properties; - -import static org.aksw.iguana.commons.time.TimeUtils.durationInMilliseconds; - -/** - * - * A Worker using SPARQL Updates to create service request. - * - * @author f.conrads - * - */ -@Shorthand("UPDATEWorker") -public class UPDATEWorker extends HttpPostWorker { - - private int currentQueryID = 0; - private UpdateTimer updateTimer = new UpdateTimer(); - private String timerStrategy; - - public UPDATEWorker(String taskID, Connection connection, String queriesFile, @Nullable String timerStrategy, @Nullable Integer timeOut, @Nullable Integer timeLimit, @Nullable Integer fixedLatency, @Nullable Integer gaussianLatency, Integer workerID) { - super(taskID, connection, queriesFile, "application/sparql-update", null, null, "lang.SPARQL", timeOut, timeLimit, fixedLatency, gaussianLatency, "UPDATEWorker", workerID); - this.timerStrategy=timerStrategy; - } - - @Override - public void startWorker(){ - setUpdateTimer(this.timerStrategy); - super.startWorker(); - } - - @Override - public void waitTimeMs() { - double wait = this.updateTimer.calculateTime(durationInMilliseconds(startTime, Instant.now()), this.tmpExecutedQueries); - LOGGER.debug("Worker[{{}} : {{}}]: Time to wait for next Query {{}}", workerType, workerID, wait); - try { - Thread.sleep((long)wait); - } catch (InterruptedException e) { - LOGGER.error("Worker[{{}} : {{}}]: Could not wait time before next query due to: {{}}", workerType, - workerID, e); - LOGGER.error("", e); - } - super.waitTimeMs(); - } - - @Override - public synchronized void addResults(QueryExecutionStats results) - { - // create Properties store it in List - Properties result = new Properties(); - result.setProperty(COMMON.EXPERIMENT_TASK_ID_KEY, this.taskID); - result.put(COMMON.RECEIVE_DATA_TIME, results.getExecutionTime()); - result.put(COMMON.RECEIVE_DATA_SUCCESS, results.getResponseCode()); - result.put(COMMON.RECEIVE_DATA_SIZE, results.getResultSize()); - result.put(COMMON.QUERY_HASH, queryHash); - result.setProperty(COMMON.QUERY_ID_KEY, results.getQueryID()); - result.put(COMMON.PENALTY, this.timeOut); - // Add extra Meta Key, worker ID and worker Type - result.put(COMMON.EXTRA_META_KEY, this.extra); - setResults(result); - executedQueries++; - - - } - - @Override - public void getNextQuery(StringBuilder queryStr, StringBuilder queryID) throws IOException { - // If there is no more update send end signal, as their is nothing to do anymore - if (this.currentQueryID >= this.queryFileList.length) { - this.stopSending(); - return; - } - // get next Query File and next random Query out of it. - QuerySet currentQueryFile = this.queryFileList[this.currentQueryID++]; - queryID.append(currentQueryFile.getName()); - - queryStr.append(currentQueryFile.getContent()); - - } - - @Override - public void setQueriesList(QuerySet[] updateFiles) { - super.setQueriesList(updateFiles); - } - - /** - * Sets Update Timer according to strategy - * - * @param strategyStr - * The String representation of a UpdateTimer.Strategy - */ - private void setUpdateTimer(String strategyStr) { - if (strategyStr == null) - return; - UpdateTimer.Strategy strategy = UpdateTimer.Strategy.valueOf(strategyStr.toUpperCase()); - switch (strategy) { - case FIXED: - if (timeLimit != null) { - this.updateTimer = new UpdateTimer(this.timeLimit/this.queryFileList.length); - } else { - LOGGER.warn("Worker[{{}} : {{}}]: FIXED Updates can only be used with timeLimit!", workerType, - workerID); - } - break; - case DISTRIBUTED: - if (timeLimit != null) { - this.updateTimer = new UpdateTimer(this.queryFileList.length, (double) this.timeLimit); - } else { - LOGGER.warn("Worker[{{}} : {{}}]: DISTRIBUTED Updates can only be used with timeLimit!", workerType, - workerID); - } - break; - default: - break; - } - LOGGER.debug("Worker[{{}} : {{}}]: UpdateTimer was set to UpdateTimer:{{}}", workerType, workerID, updateTimer); - } - - - - /** - * Checks if one queryMix was already executed, as it does not matter how many mixes should be executed - * @param noOfQueryMixes - * @return - */ - @Override - public boolean hasExecutedNoOfQueryMixes(Long noOfQueryMixes){ - return getExecutedQueries() / (getNoOfQueries() * 1.0) >= 1; - } - -} diff --git a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/worker/impl/update/UpdateTimer.java b/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/worker/impl/update/UpdateTimer.java deleted file mode 100644 index f1660baa1..000000000 --- a/iguana.corecontroller/src/main/java/org/aksw/iguana/cc/worker/impl/update/UpdateTimer.java +++ /dev/null @@ -1,100 +0,0 @@ -package org.aksw.iguana.cc.worker.impl.update; - -/** - * - * Class to calculate time between two update queries. - * - * @author f.conrads - * - */ -public class UpdateTimer { - - private Strategy strategy; - private double baseValue; - private Double timeLimit; - - - /** - * - * The possible strategies - *
    - *
  • NONE: updates will be executed immediately after another
  • - *
  • FIXED: a fixed value in ms will be waited before the next update query
  • - *
  • DISTRIBUTED: the updates will be equally distributed over the time limit of the task
  • - *
- * - * @author f.conrads - * - */ - public enum Strategy { - /** - * updates will be executed immediately after another - */ - NONE, - - /** - * a fixed value in ms will be waited before the next update query - */ - FIXED, - - /** - * the updates will be equally distributed over the time limit of the task - */ - DISTRIBUTED - } - - /** - * Creates the default UpdateTimer - * All update queries will be executed immediately after another - */ - public UpdateTimer() { - this.strategy= Strategy.NONE; - } - - /** - * Creates a FixedUpdateTimer - * - * @param fixedValue the fixed time to wait between queries - */ - public UpdateTimer(double fixedValue) { - this.strategy= Strategy.FIXED; - this.baseValue=fixedValue; - } - - /** - * Creates a distributed UpdateTimer - * - * @param noOfUpdates the number of update queries - * @param timeLimit the timeLimit of the task - */ - public UpdateTimer(int noOfUpdates, Double timeLimit) { - this.strategy= Strategy.DISTRIBUTED; - this.baseValue=noOfUpdates; - this.timeLimit = timeLimit; - } - - - /** - * calculates the time the UPDATEWorker has to wait until the next update query - * - * @param timeExceeded The time it took from start of the task to now - * @param executedQueries currently number of executed Update Queries - * @return The time to wait - */ - public double calculateTime(double timeExceeded, long executedQueries) { - switch(strategy) { - case FIXED: - return baseValue; - case DISTRIBUTED: - return (timeLimit-timeExceeded)/(baseValue-executedQueries); - default: - return 0; - } - } - - - @Override - public String toString() { - return "[strategy: "+this.strategy.name()+"]"; - } -} \ No newline at end of file diff --git a/iguana.corecontroller/src/main/resources/iguana-schema.json b/iguana.corecontroller/src/main/resources/iguana-schema.json deleted file mode 100644 index f215da2c4..000000000 --- a/iguana.corecontroller/src/main/resources/iguana-schema.json +++ /dev/null @@ -1,406 +0,0 @@ -{ - "$schema": "http://json-schema.org/draft-07/schema#", - - "definitions": { - "connection": { - "type": "object", - "properties": { - "endpoint": { "type": "string" }, - "updateEndpoint": { "type": "string" }, - "user": { "type": "string" }, - "password": { "type": "string" } - }, - "required": ["endpoint"] - }, - "warmup" : { - "type": "object", - "properties": { - "timeLimit": { - "type": "integer" - }, - "queryHandler": { - "$ref": "#/definitions/genericClassObject" - }, - "workers": { - "type": "array", - "items": { - "oneOf": [ - { - "$ref": "#/definitions/AbstractWorker" - } - ] - } - } - }, - "required": ["workers","timeLimit"] - }, - "stresstest": { - "type": "object", - "properties": { - "timeLimit": { "type": "integer" }, - "noOfQueryMixes": {"type": "integer"}, - "queryHandler": {"$ref" : "#/definitions/genericClassObject" }, - "warmup" : {"$ref" : "#/definitions/warmup"}, - "workers": { - "type": "array", - "items": { - "oneOf": [ - { - "$ref": "#/definitions/AbstractWorker" - } - ] - } - } - }, - "required": ["queryHandler", "workers"] - }, - "AbstractWorker": { - "type": "object", - "properties": { - "className": { - "type": "string" - } - - }, - "allOf": [{ - "if": { - "properties": { - "className" : { - "oneOf": [ {"const": "SPARQLWorker"},{"const": "org.aksw.iguana.cc.worker.impl.SPARQLWorker"}] - } - } - }, - "then": - { - "additionalProperties": {"type": "undefined"}, - - "required": [ - "className", - "threads", - "queriesFile" - ], - "properties": { - "className": { - "type": "string" - }, - "threads": { - "type": "integer" - }, - "queriesFile": { - "type": "string" - }, - "timeOut": { - "type": "integer" - }, - "fixedLatency": { - "type": "integer" - }, - "gaussianLatency": { - "type": "integer" - }, - "responseType": { - "type": "string" - }, - "parameterName": { - "type": "string" - } - } - } - - }, - { - "if": { - "properties": { - "className" : { - "oneOf": [{"const": "UPDATEWorker"},{"const": "org.aksw.iguana.cc.worker.impl.UPDATEWorker"}] - } - } - }, - "then": - {"required": ["className", "threads", "queriesFile"], - "properties": { - "className": { - "type": "string" - }, - "threads" : {"type": "integer"}, - "queriesFile" : {"type": "string"}, - "timeOut" : {"type": "integer"}, - "fixedLatency" : {"type": "integer"}, - "gaussianLatency" : {"type": "integer"}, - "timerStrategy" : {"type": "string"} - }, - "additionalProperties": {"type": "undefined"} - } - - }, - {"if": {"properties": { - "className" : { - "oneOf": [{"const": "MultipleCLIInputWorker"}, {"const": "org.aksw.iguana.cc.worker.impl.MultipleCLIInputWorker"}] - } - }}, - "then": - {"required": ["className", "threads", "queriesFile", "queryError", "queryFinished", "initFinished"], - "properties": { - "className": { - "type": "string" - }, - "threads" : {"type": "integer"}, - "queriesFile" : {"type": "string"}, - "timeOut" : {"type": "integer"}, - "fixedLatency" : {"type": "integer"}, - "gaussianLatency" : {"type": "integer"}, - "queryError" : {"type": "string"}, - "queryFinished" : {"type": "string"}, - "initFinished" : {"type": "string"}, - "numberOfProcesses" : {"type": "integer"} - }, "additionalProperties": {"type": "undefined"} - } - }, - { - "if": { - "properties": { - "className" : { - "oneOf": [{"const": "CLIInputWorker"}, {"const": "org.aksw.iguana.cc.worker.impl.CLIInputWorker"}] - } - } - }, - "then": - {"required": ["className", "threads", "queriesFile", "queryError", "queryFinished", "initFinished"], - "properties": { - "className": { - "type": "string" - }, - "threads" : {"type": "integer"}, - "queriesFile" : {"type": "string"}, - "timeOut" : {"type": "integer"}, - "fixedLatency" : {"type": "integer"}, - "gaussianLatency" : {"type": "integer"}, - "queryError" : {"type": "string"}, - "queryFinished" : {"type": "string"}, - "initFinished" : {"type": "string"} - }, "additionalProperties": {"type": "undefined"} - } - }, - { - "if": { - "properties": { - "className" : { - "oneOf": [{"const": "CLIPrefixWorker"}, {"const": "org.aksw.iguana.cc.worker.impl.CLIPrefixWorker"}] - } - } - }, - "then": { - "required": [ - "className", - "threads", - "queriesFile", - "queryError", - "queryFinished", - "initFinished", - "queryPrefix", - "querySuffix" - ], - "properties": { - "className": { - "type": "string" - }, - "threads": { - "type": "integer" - }, - "queriesFile": { - "type": "string" - }, - "timeOut": { - "type": "integer" - }, - "fixedLatency": { - "type": "integer" - }, - "gaussianLatency": { - "type": "integer" - }, - "numberOfProcesses": { - "type": "integer" - }, - "queryError": { - "type": "string" - }, - "queryFinished": { - "type": "string" - }, - "initFinished": { - "type": "string" - }, - "querySuffix": { - "type": "string" - }, - "queryPrefix": { - "type": "string" - } - }, - "additionalProperties": {"type": "undefined"} - } - - }, - {"if": { - "properties": { - "className" : { - "oneOf": [{"const": "MultipleCLIInputFileWorker"}, {"const": "org.aksw.iguana.cc.worker.impl.MultipleCLIInputFileWorker"}] - } - } - }, - "then": { - "required": [ - "className", - "threads", - "queriesFile", - "directory", - "queryError", - "queryFinished", - "initFinished" - ], - "properties": { - "className": { - "type": "string" - }, - "threads": { - "type": "integer" - }, - "queriesFile": { - "type": "string" - }, - "timeOut": { - "type": "integer" - }, - "fixedLatency": { - "type": "integer" - }, - "gaussianLatency": { - "type": "integer" - }, - "queryError": { - "type": "string" - }, - "queryFinished": { - "type": "string" - }, - "initFinished": { - "type": "string" - }, - "directory": { - "type": "string" - }, - "numberOfProcesses": { - "type": "integer" - } - }, - "additionalProperties": {"type": "undefined"} - } - }, - { - "if": { - "properties": { - "className": { - "oneOf": [{"const": "CLIInputFileWorker"}, {"const": "org.aksw.iguana.cc.worker.impl.CLIInputFileWorker"}] - } - } - }, - "then": { - "allOf": [{ - "required": [ - "className", - "threads", - "queriesFile", - "directory", - "queryError", - "queryFinished", - "initFinished" - ]}, - {"properties": { - "className": { - "type": "string" - }, - "threads" : {"type": "integer"}, - "queriesFile" : {"type": "string"}, - "timeOut" : {"type": "integer"}, - "fixedLatency" : {"type": "integer"}, - "gaussianLatency" : {"type": "integer"}, - "queryError" : {"type": "string"}, - "queryFinished" : {"type": "string"}, - "initFinished" : {"type": "string"}, - "directory" : {"type" : "string"} - }, "additionalProperties": {"type": "undefined"} - }] - } - } - ] - }, - "task": { - "type": "object", - "properties": { - "className": { "type": "string" }, - "configuration": { - "oneOf": [{"$ref": "#/definitions/stresstest"}] - } - }, - "required": ["className", "configuration"] - }, - "genericClassObject": { - "type": "object", - "properties": { - "className": { "type": "string" }, - "configuration": { - "type": "object" - } - }, - "required": ["className"] - - } - - }, - - "type": "object", - - "properties": { - "connections": { - "type": "array", - "items": { - "$ref": "#/definitions/connection" - } - }, - "datasets": { - "type": "array", - "items": { - "type": "object", - "properties": { - "name" : {"type": "string"} - }, - "required": ["name"] - } - }, - "tasks": { - "type": "array", - "items": { - "$ref":"#/definitions/task" - } - }, - "preScriptHook": { - "type": "string" - }, - "postScriptHook": { - "type": "string" - }, - "metrics": { - "type": "array", - "items": { - "$ref": "#/definitions/genericClassObject" - } - }, - "storages": { - "type": "array", - "items": { - "$ref": "#/definitions/genericClassObject" - } - } - } -} diff --git a/iguana.corecontroller/src/main/resources/log4j2.yml b/iguana.corecontroller/src/main/resources/log4j2.yml deleted file mode 100644 index f7d5b1ffc..000000000 --- a/iguana.corecontroller/src/main/resources/log4j2.yml +++ /dev/null @@ -1,56 +0,0 @@ -Configuration: - status: info - name: iguana - properties: - property: - name: filename - value: iguana.log - thresholdFilter: - level: debug - appenders: - Console: - name: STDOUT - target: SYSTEM_OUT - PatternLayout: - Pattern: "%highlight{%d [%t] %p [%c] - <%m>%n}{FATAL=red blink, ERROR=red, WARN=yellow bold, INFO=green, DEBUG=green bold, TRACE=blue}" - disableAnsi: false - File: - name: File - fileName: ${filename} - PatternLayout: - Pattern: "%d [%t] %p [%c] - <%m>%n" - Filters: - ThresholdFilter: - level: warn - - Loggers: - logger: - - name: org.apache.http.client.protocol - level: error - additivity: true - AppenderRef: - - ref: STDOUT - - ref: File - - name: org.reflections.Reflections - level: info - additivity: true - AppenderRef: - - ref: STDOUT - - ref: File - - name: org.apache.http.impl - level: error - additivity: true - AppenderRef: - - ref: STDOUT - - ref: File - - name: org.apache.jena.riot - level: error - additivity: true - AppenderRef: - - ref: STDOUT - - ref: File - Root: - level: info - AppenderRef: - - ref: STDOUT - - ref: File \ No newline at end of file diff --git a/iguana.corecontroller/src/test/java/org/aksw/iguana/cc/config/ConfigTest.java b/iguana.corecontroller/src/test/java/org/aksw/iguana/cc/config/ConfigTest.java deleted file mode 100644 index 68a922e11..000000000 --- a/iguana.corecontroller/src/test/java/org/aksw/iguana/cc/config/ConfigTest.java +++ /dev/null @@ -1,57 +0,0 @@ -package org.aksw.iguana.cc.config; - -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.File; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collection; - -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; - -/** - * Checks if the config is read correctly as YAML as well as JSON and checks if the corresponding Task could be created - */ -@RunWith(Parameterized.class) -public class ConfigTest { - - private final Boolean valid; - private final String file; - public Logger LOGGER = LoggerFactory.getLogger(getClass()); - - @Parameterized.Parameters - public static Collection data(){ - Collection testData = new ArrayList(); - testData.add(new Object[]{"src/test/resources/iguana.yml", false}); - testData.add(new Object[]{"src/test/resources/iguana.json", false}); - testData.add(new Object[]{"src/test/resources/iguana-valid.yml", true}); - testData.add(new Object[]{"src/test/resources/iguana-valid.json", true}); - return testData; - } - - public ConfigTest(String file, Boolean valid){ - this.file=file; - this.valid=valid; - } - - @Test - public void checkValidity() throws IOException { - IguanaConfig config = IguanaConfigFactory.parse(new File(file)); - if(valid){ - assertNotNull(config); - } - else { - assertNull(config); - } - config = IguanaConfigFactory.parse(new File(file), false); - assertNotNull(config); - } - - - -} diff --git a/iguana.corecontroller/src/test/java/org/aksw/iguana/cc/config/WorkflowTest.java b/iguana.corecontroller/src/test/java/org/aksw/iguana/cc/config/WorkflowTest.java deleted file mode 100644 index 6baf25600..000000000 --- a/iguana.corecontroller/src/test/java/org/aksw/iguana/cc/config/WorkflowTest.java +++ /dev/null @@ -1,157 +0,0 @@ -package org.aksw.iguana.cc.config; - -import org.aksw.iguana.cc.tasks.MockupStorage; -import org.aksw.iguana.cc.tasks.MockupTask; -import org.aksw.iguana.commons.constants.COMMON; -import org.aksw.iguana.rp.metrics.Metric; -import org.aksw.iguana.rp.metrics.MetricManager; -import org.aksw.iguana.rp.metrics.impl.*; -import org.aksw.iguana.rp.storage.Storage; -import org.aksw.iguana.rp.storage.StorageManager; -import org.aksw.iguana.rp.storage.impl.NTFileStorage; -import org.apache.commons.io.FileUtils; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import java.io.File; -import java.io.IOException; -import java.util.HashSet; -import java.util.Properties; -import java.util.Set; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - -public class WorkflowTest { - private String file = "src/test/resources/config/mockupworkflow.yml"; - private String noDefaultFile = "src/test/resources/config/mockupworkflow-no-default.yml"; - private String preFile = "pre-shouldNotExist.txt"; - private String postFile = "post-shouldNotExist.txt"; - - private String expectedPreContent="TestSystem DatasetName testfile.txt\nTestSystem2 DatasetName testfile.txt\nTestSystem DatasetName2 testfile2.txt\nTestSystem2 DatasetName2 testfile2.txt\n"; - private String expectedPostContent="testfile.txt DatasetName TestSystem\ntestfile.txt DatasetName TestSystem2\ntestfile2.txt DatasetName2 TestSystem\ntestfile2.txt DatasetName2 TestSystem2\n"; - - @After - @Before - public void cleanUp(){ - File pre = new File(preFile); - File post = new File(postFile); - pre.delete(); - post.delete(); - StorageManager storageManager = StorageManager.getInstance(); - storageManager.getStorages().clear(); - MetricManager metricManager = MetricManager.getInstance(); - metricManager.getMetrics().clear(); - } - - @Test - public void hooks() throws IOException { - IguanaConfig config = IguanaConfigFactory.parse(new File(noDefaultFile), false); - //test if workflow was correct - config.start(); - File pre = new File(preFile); - File post = new File(postFile); - - String preContent = FileUtils.readFileToString(pre, "UTF-8"); - String postContent = FileUtils.readFileToString(post, "UTF-8"); - assertEquals(expectedPreContent, preContent); - assertEquals(expectedPostContent, postContent); - - } - - @Test - public void workflowTest() throws IOException { - IguanaConfig config = IguanaConfigFactory.parse(new File(file), false); - //test if workflow was correct - config.start(); - StorageManager storageManager = StorageManager.getInstance(); - Set storages = storageManager.getStorages(); - assertEquals(1, storages.size()); - Storage s = storages.iterator().next(); - assertTrue(s instanceof MockupStorage); - Set meta = ((MockupStorage)s).getMeta(); - //check if suiteID eq - // check if taskID suiteID/1/1 -> 1 etc. - Set suiteID = new HashSet(); - for(Properties p : meta){ - String suite = p.getProperty(COMMON.SUITE_ID_KEY); - suiteID.add(suite); - assertEquals(MockupTask.class.getCanonicalName(),p.get(COMMON.EXPERIMENT_TASK_CLASS_ID_KEY)); - String expID = p.getProperty(COMMON.EXPERIMENT_ID_KEY); - String taskID = p.getProperty(COMMON.EXPERIMENT_TASK_ID_KEY); - assertEquals(expID, taskID.substring(0, taskID.length()-2)); - if(taskID.equals(suite+"1/1")){ - assertEquals("TestSystem", p.get(COMMON.CONNECTION_ID_KEY)); - assertEquals("DatasetName", p.get(COMMON.DATASET_ID_KEY)); - } - else if(taskID.equals(suite+"1/2")){ - assertEquals("TestSystem2", p.get(COMMON.CONNECTION_ID_KEY)); - assertEquals("DatasetName", p.get(COMMON.DATASET_ID_KEY)); - } - else if(taskID.equals(suite+"2/1")){ - assertEquals("TestSystem", p.get(COMMON.CONNECTION_ID_KEY)); - assertEquals("DatasetName2", p.get(COMMON.DATASET_ID_KEY)); - } - else if(taskID.equals(suite+"2/2")){ - assertEquals("TestSystem2", p.get(COMMON.CONNECTION_ID_KEY)); - assertEquals("DatasetName2", p.get(COMMON.DATASET_ID_KEY)); - } - } - assertEquals(1, suiteID.size()); - } - - @Test - public void noDefaultTest() throws IOException { - IguanaConfig config = IguanaConfigFactory.parse(new File(noDefaultFile), false); - //test if correct defaults were loaded - config.start(); - StorageManager storageManager = StorageManager.getInstance(); - Set storages = storageManager.getStorages(); - assertEquals(1, storages.size()); - Storage s = storages.iterator().next(); - assertTrue(s instanceof MockupStorage); - - MetricManager metricManager = MetricManager.getInstance(); - Set metrics = metricManager.getMetrics(); - assertEquals(2, metrics.size()); - Set> seen = new HashSet>(); - for(Metric m : metrics){ - seen.add(m.getClass()); - } - assertEquals(2, seen.size()); - assertTrue(seen.contains(QMPHMetric.class)); - assertTrue(seen.contains(QPSMetric.class)); - - } - - @Test - public void initTest() throws IOException { - String file = "src/test/resources/config/mockupworkflow-default.yml"; - IguanaConfig config = IguanaConfigFactory.parse(new File(file), false); - //test if correct defaults were loaded - config.start(); - StorageManager storageManager = StorageManager.getInstance(); - Set storages = storageManager.getStorages(); - assertEquals(1, storages.size()); - Storage s = storages.iterator().next(); - assertTrue(s instanceof NTFileStorage); - File del = new File(((NTFileStorage)s).getFileName()); - del.delete(); - MetricManager metricManager = MetricManager.getInstance(); - Set metrics = metricManager.getMetrics(); - assertEquals(5, metrics.size()); - Set> seen = new HashSet>(); - for(Metric m : metrics){ - seen.add(m.getClass()); - } - assertEquals(5, seen.size()); - assertTrue(seen.contains(QMPHMetric.class)); - assertTrue(seen.contains(QPSMetric.class)); - assertTrue(seen.contains(AvgQPSMetric.class)); - assertTrue(seen.contains(NoQPHMetric.class)); - assertTrue(seen.contains(NoQMetric.class)); - - } - -} diff --git a/iguana.corecontroller/src/test/java/org/aksw/iguana/cc/lang/MockCloseableHttpResponse.java b/iguana.corecontroller/src/test/java/org/aksw/iguana/cc/lang/MockCloseableHttpResponse.java deleted file mode 100644 index 5d7fc06e4..000000000 --- a/iguana.corecontroller/src/test/java/org/aksw/iguana/cc/lang/MockCloseableHttpResponse.java +++ /dev/null @@ -1,49 +0,0 @@ -package org.aksw.iguana.cc.lang; - -import org.apache.http.HttpStatus; -import org.apache.http.ProtocolVersion; -import org.apache.http.ReasonPhraseCatalog; -import org.apache.http.StatusLine; -import org.apache.http.client.methods.CloseableHttpResponse; -import org.apache.http.entity.BasicHttpEntity; -import org.apache.http.message.BasicHttpResponse; -import org.apache.http.message.BasicStatusLine; - -import java.io.*; -import java.net.URL; -import java.util.Locale; - -public class MockCloseableHttpResponse extends BasicHttpResponse implements CloseableHttpResponse { - - public MockCloseableHttpResponse(StatusLine statusline, ReasonPhraseCatalog catalog, Locale locale) { - super(statusline, catalog, locale); - } - - public MockCloseableHttpResponse(StatusLine statusline) { - super(statusline); - } - - public MockCloseableHttpResponse(ProtocolVersion ver, int code, String reason) { - super(ver, code, reason); - } - - @Override - public void close() throws IOException { - - } - - public static CloseableHttpResponse buildMockResponse(String data, String contentType) throws FileNotFoundException, UnsupportedEncodingException { - ProtocolVersion protocolVersion = new ProtocolVersion("HTTP", 1, 1); - String reasonPhrase = "OK"; - StatusLine statusline = new BasicStatusLine(protocolVersion, HttpStatus.SC_OK, reasonPhrase); - MockCloseableHttpResponse mockResponse = new MockCloseableHttpResponse(statusline); - BasicHttpEntity entity = new BasicHttpEntity(); - entity.setContentType(contentType); - //entity.setContentType(contentType); - URL url = Thread.currentThread().getContextClassLoader().getResource("response.txt"); - InputStream instream = new ByteArrayInputStream(data.getBytes()); - entity.setContent(instream); - mockResponse.setEntity(entity); - return mockResponse; - } -} diff --git a/iguana.corecontroller/src/test/java/org/aksw/iguana/cc/lang/RDFLanguageProcessorTest.java b/iguana.corecontroller/src/test/java/org/aksw/iguana/cc/lang/RDFLanguageProcessorTest.java deleted file mode 100644 index 37bb4176d..000000000 --- a/iguana.corecontroller/src/test/java/org/aksw/iguana/cc/lang/RDFLanguageProcessorTest.java +++ /dev/null @@ -1,65 +0,0 @@ -package org.aksw.iguana.cc.lang; - -import org.aksw.iguana.cc.lang.impl.RDFLanguageProcessor; -import org.apache.http.client.methods.CloseableHttpResponse; -import org.apache.jena.rdf.model.Model; -import org.apache.jena.rdf.model.ModelFactory; -import org.apache.jena.rdf.model.ResourceFactory; -import org.apache.jena.riot.Lang; -import org.json.simple.parser.ParseException; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.xml.sax.SAXException; - -import javax.xml.parsers.ParserConfigurationException; -import java.io.IOException; -import java.io.StringWriter; -import java.lang.reflect.Field; -import java.util.ArrayList; -import java.util.Collection; - -import static org.junit.Assert.assertEquals; - -@RunWith(Parameterized.class) -public class RDFLanguageProcessorTest { - - private static Logger LOGGER = LoggerFactory.getLogger(RDFLanguageProcessorTest.class); - private final Lang lang; - private final Model m; - - @Parameterized.Parameters - public static Collection data() throws IllegalAccessException { - Collection testData = new ArrayList(); - for(Field langField : Lang.class.getFields()) { - Lang susLang = (Lang)langField.get(Lang.class); - if(susLang.equals(Lang.RDFTHRIFT) || susLang.equals(Lang.TRIX) || susLang.equals(Lang.SHACLC) || susLang.equals(Lang.TSV) || susLang.equals(Lang.CSV) || susLang.equals(Lang.RDFNULL)) { - //cannot test them as model doesn't allow them to write - continue; - } - testData.add(new Object[]{susLang}); - } - return testData; - } - - public RDFLanguageProcessorTest(Lang lang){ - this.lang = lang; - this.m = ModelFactory.createDefaultModel(); - m.add(ResourceFactory.createResource("uri://test"), ResourceFactory.createProperty("uri://prop1"), "abc"); - m.add(ResourceFactory.createResource("uri://test"), ResourceFactory.createProperty("uri://prop2"), "abc2"); - LOGGER.info("Testing Lanuage {} Content-Type: {}", lang.getName(), lang.getContentType()); - } - - @Test - public void testCorrectModel() throws IOException, ParserConfigurationException, SAXException, ParseException { - StringWriter sw = new StringWriter(); - m.write(sw, lang.getName(), null); - CloseableHttpResponse response = MockCloseableHttpResponse.buildMockResponse(sw.toString(), lang.getContentType().getContentTypeStr()); - RDFLanguageProcessor processor = new RDFLanguageProcessor(); - assertEquals(2, processor.getResultSize(response).longValue()); - } - - -} diff --git a/iguana.corecontroller/src/test/java/org/aksw/iguana/cc/lang/SPARQLLanguageProcessorTest.java b/iguana.corecontroller/src/test/java/org/aksw/iguana/cc/lang/SPARQLLanguageProcessorTest.java deleted file mode 100644 index 93b1aff5f..000000000 --- a/iguana.corecontroller/src/test/java/org/aksw/iguana/cc/lang/SPARQLLanguageProcessorTest.java +++ /dev/null @@ -1,144 +0,0 @@ -package org.aksw.iguana.cc.lang; - -import org.aksw.iguana.cc.lang.impl.SPARQLLanguageProcessor; -import org.apache.jena.ext.com.google.common.collect.Lists; -import org.apache.jena.query.Query; -import org.apache.jena.query.QueryFactory; -import org.apache.jena.rdf.model.Model; -import org.apache.jena.rdf.model.ModelFactory; -import org.json.simple.parser.ParseException; -import org.junit.Test; -import org.xml.sax.SAXException; - -import javax.xml.parsers.ParserConfigurationException; -import java.io.ByteArrayOutputStream; -import java.io.FileReader; -import java.io.FileWriter; -import java.io.IOException; -import java.nio.charset.StandardCharsets; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - -public class SPARQLLanguageProcessorTest { - - private String jsonResult = "{\n" + - " \"head\": { \"vars\": [ \"book\" , \"title\" ]\n" + - " } ,\n" + - " \"results\": { \n" + - " \"bindings\": [\n" + - " {\n" + - " \"book\": { \"type\": \"uri\" , \"value\": \"http://example.org/book/book3\" } ,\n" + - " \"title\": { \"type\": \"literal\" , \"value\": \"Example Book 3\" }\n" + - " } ,\n" + - " {\n" + - " \"book\": { \"type\": \"uri\" , \"value\": \"http://example.org/book/book2\" } ,\n" + - " \"title\": { \"type\": \"literal\" , \"value\": \"Example Book 2\" }\n" + - " } ,\n" + - " {\n" + - " \"book\": { \"type\": \"uri\" , \"value\": \"http://example.org/book/book1\" } ,\n" + - " \"title\": { \"type\": \"literal\" , \"value\": \"Example Book 1\" }\n" + - " }\n" + - " ]\n" + - " }\n" + - "}"; - private String xmlResult = "\n" + - "\n" + - " \n" + - " \n" + - " \n" + - " \n" + - "\n" + - " \n" + - " \n" + - " test1\n" + - " ... \n" + - " \n" + - "\n" + - " \n" + - " test2\n" + - " ... \n" + - " \n" + - " \n" + - " \n" + - "\n" + - ""; - - - - - @Test - public void checkJSON() throws ParseException, IOException { - ByteArrayOutputStream bbaos = new ByteArrayOutputStream(); - bbaos.write(jsonResult.getBytes()); - assertEquals(3, SPARQLLanguageProcessor.getJsonResultSize(bbaos)); - //test if valid json response provide 0 bindings - try { - //check if invalid json throws exception - bbaos = new ByteArrayOutputStream(); - bbaos.write("{ \"a\": \"b\"}".getBytes()); - SPARQLLanguageProcessor.getJsonResultSize(bbaos); - assertTrue("Should have thrown an error", false); - }catch(Exception e){ - assertTrue(true); - } - try { - //check if invalid json throws exception - bbaos = new ByteArrayOutputStream(); - bbaos.write("{ \"a\": \"b\"".getBytes()); - SPARQLLanguageProcessor.getJsonResultSize(bbaos); - assertTrue("Should have thrown an error", false); - }catch(Exception e){ - assertTrue(true); - } - } - - @Test - public void checkXML() throws IOException, SAXException, ParserConfigurationException { - ByteArrayOutputStream bbaos = new ByteArrayOutputStream(); - bbaos.write(xmlResult.getBytes(StandardCharsets.UTF_8)); - assertEquals(2, SPARQLLanguageProcessor.getXmlResultSize(bbaos)); - //test if valid xml response provide 0 bindings - try { - //check if invalid xml throws exception - bbaos = new ByteArrayOutputStream(); - bbaos.write("b".getBytes()); - SPARQLLanguageProcessor.getJsonResultSize(bbaos); - assertTrue("Should have thrown an error", false); - }catch(Exception e){ - assertTrue(true); - } - try { - //check if invalid xml throws exception - bbaos = new ByteArrayOutputStream(); - bbaos.write("{ \"a\": \"b\"".getBytes()); - SPARQLLanguageProcessor.getJsonResultSize(bbaos); - assertTrue("Should have thrown an error", false); - }catch(Exception e){ - assertTrue(true); - } - } - - @Test - public void checkResultSize() throws IOException, ParserConfigurationException, SAXException, ParseException { - SPARQLLanguageProcessor languageProcessor = new SPARQLLanguageProcessor(); - assertEquals(3, languageProcessor.getResultSize(MockCloseableHttpResponse.buildMockResponse(jsonResult, SPARQLLanguageProcessor.QUERY_RESULT_TYPE_JSON)).longValue()); - assertEquals(2, languageProcessor.getResultSize(MockCloseableHttpResponse.buildMockResponse(xmlResult, SPARQLLanguageProcessor.QUERY_RESULT_TYPE_XML)).longValue()); - assertEquals(4, languageProcessor.getResultSize(MockCloseableHttpResponse.buildMockResponse("a\na\na\nb", "text/plain")).longValue()); - } - - - @Test - public void checkGeneratedStatsModel() throws IOException { - Query q = QueryFactory.create("SELECT * {?s ?p ?o. ?o ?q ?t. FILTER(?t = \"abc\")} GROUP BY ?s"); - QueryWrapper wrapped = new QueryWrapper(q, "abc"); - SPARQLLanguageProcessor languageProcessor = new SPARQLLanguageProcessor(); - Model actual = languageProcessor.generateTripleStats(Lists.newArrayList(wrapped),"query","1/1/2"); - Model expected = ModelFactory.createDefaultModel(); - expected.read(new FileReader("src/test/resources/querystats.nt"), null, "N-TRIPLE"); - assertEquals(expected.size(), actual.size()); - expected.remove(actual); - actual.write(new FileWriter("test2.nt"), "N-TRIPLE"); - assertEquals(0, expected.size()); - } -} diff --git a/iguana.corecontroller/src/test/java/org/aksw/iguana/cc/model/QueryResultHashKeyTest.java b/iguana.corecontroller/src/test/java/org/aksw/iguana/cc/model/QueryResultHashKeyTest.java deleted file mode 100644 index e2c1cc538..000000000 --- a/iguana.corecontroller/src/test/java/org/aksw/iguana/cc/model/QueryResultHashKeyTest.java +++ /dev/null @@ -1,52 +0,0 @@ -package org.aksw.iguana.cc.model; - -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.UUID; - -import static org.junit.Assert.*; - -@RunWith(Parameterized.class) -public class QueryResultHashKeyTest { - - - private final String queryID; - private final long uniqueKey; - - @Parameterized.Parameters - public static Collection data(){ - Collection testData = new ArrayList(); - testData.add(new Object[]{"sparql1", 1}); - testData.add(new Object[]{"sparql2", 122323l}); - testData.add(new Object[]{"update", 122323l}); - testData.add(new Object[]{UUID.randomUUID().toString(), 122323l}); - testData.add(new Object[]{"", 0}); - return testData; - } - - public QueryResultHashKeyTest(String queryID, long uniqueKey){ - this.queryID=queryID; - this.uniqueKey=uniqueKey; - } - - @Test - public void checkEquals(){ - QueryResultHashKey key = new QueryResultHashKey(queryID, uniqueKey); - assertTrue(key.equals(key)); - assertFalse(key.equals(null)); - assertFalse(key.equals(queryID)); - assertFalse(key.equals(uniqueKey)); - QueryResultHashKey that = new QueryResultHashKey(queryID, uniqueKey); - assertEquals(key, that); - that = new QueryResultHashKey(queryID+"abc", uniqueKey); - assertNotEquals(key, that); - that = new QueryResultHashKey(queryID, uniqueKey+1); - assertNotEquals(key, that); - } - - -} diff --git a/iguana.corecontroller/src/test/java/org/aksw/iguana/cc/query/impl/DelimInstancesQueryHandlerTest.java b/iguana.corecontroller/src/test/java/org/aksw/iguana/cc/query/impl/DelimInstancesQueryHandlerTest.java deleted file mode 100644 index b84d03af9..000000000 --- a/iguana.corecontroller/src/test/java/org/aksw/iguana/cc/query/impl/DelimInstancesQueryHandlerTest.java +++ /dev/null @@ -1,120 +0,0 @@ -package org.aksw.iguana.cc.query.impl; - -import org.aksw.iguana.cc.config.elements.Connection; -import org.aksw.iguana.cc.query.set.QuerySet; -import org.aksw.iguana.cc.worker.Worker; -import org.aksw.iguana.cc.worker.impl.SPARQLWorker; -import org.aksw.iguana.cc.worker.impl.UPDATEWorker; -import org.apache.commons.io.FileUtils; -import org.apache.jena.ext.com.google.common.collect.Lists; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; - -import java.io.File; -import java.io.IOException; -import java.io.PrintWriter; -import java.util.*; - -import static org.junit.Assert.assertEquals; - -@RunWith(Parameterized.class) -public class DelimInstancesQueryHandlerTest { - - private final boolean isUpdate; - private final String delim; - private String[] queryStr; - private String dir = UUID.randomUUID().toString(); - private File queriesFile; - - @Parameterized.Parameters - public static Collection data(){ - Collection testData = new ArrayList(); - testData.add(new Object[]{new String[]{"SELECT * \n{\n?s ?p ?o\n}", "doesn't matter", "as long as they are not empty", "the only thing which won't do is the triplestats"}, false, ""}); - testData.add(new Object[]{new String[]{"SELECT * {?s ?p ?o}", "doesn't matter", "", "the only thing \nwhich won't do is the triplestats"}, false, ""}); - testData.add(new Object[]{new String[]{"UPDATE * \n{?s ?p ?o}", "UPDATE \ndoesn't matter", "", "UPDATE\n the only thing which won't do is the triplestats"}, true, ""}); - testData.add(new Object[]{new String[]{"SELECT * \n{\n?s ?p ?o\n}", "doesn't matter", "as long as they are not empty", "the only thing which won't do is the triplestats"}, false, "###"}); - testData.add(new Object[]{new String[]{"SELECT * {?s ?p ?o}", "doesn't matter", "", "the only thing \n\nwhich won't do is the triplestats"}, false, "###"}); - testData.add(new Object[]{new String[]{"UPDATE * \n{?s ?p ?o}", "UPDATE \ndoesn't matter", "", "UPDATE\n\n the only thing which won't do is the triplestats"}, true, "###"}); - - return testData; - } - - public DelimInstancesQueryHandlerTest(String[] queryStr, boolean isUpdate, String delim){ - this.queryStr = queryStr; - this.isUpdate=isUpdate; - this.delim=delim; - } - - @Before - public void createFolder() throws IOException { - //File f = new File(this.dir); - //f.mkdir(); - String queryFile = UUID.randomUUID().toString(); - File f = new File(queryFile); - f.createNewFile(); - try(PrintWriter pw = new PrintWriter(f)){ - for(String query : queryStr) { - pw.println(query); - pw.println(delim); - } - } - //remove empty lines after printing them, so the expected asserts will correctly assume that the empty limes are ignored - List tmpList = Lists.newArrayList(queryStr); - Iterator it = tmpList.iterator(); - while(it.hasNext()){ - if(it.next().isEmpty()){ - it.remove(); - } - } - this.queryStr= tmpList.toArray(new String[]{}); - this.queriesFile = f; - f.deleteOnExit(); - } - - @After - public void removeFolder() throws IOException { - File f = new File(this.dir); - FileUtils.deleteDirectory(f); - } - - @Test - public void testQueryCreation() throws IOException { - //Get queries file - Connection con = new Connection(); - con.setName("a"); - con.setEndpoint("http://test.com"); - Worker worker = getWorker(con, 1, "1"); - DelimInstancesQueryHandler qh = new DelimInstancesQueryHandler(delim, Lists.newArrayList(worker)); - qh.setOutputFolder(this.dir); - Map map = qh.generate(); - List expected = new ArrayList(); - List actual = new ArrayList(); - - for(String qStr : queryStr){ - expected.add(qStr); - } - - for(QuerySet querySet : map.get(this.queriesFile.getAbsolutePath())){ - assertEquals(1, querySet.size()); - actual.add(querySet.getQueryAtPos(0)); - } - assertEquals(expected.size(), actual.size()); - actual.removeAll(expected); - assertEquals(0, actual.size()); - assertEquals(queryStr.length, map.get(this.queriesFile.getAbsolutePath()).length); - } - - - public Worker getWorker(Connection con, int id, String taskID){ - if(isUpdate){ - return new UPDATEWorker(taskID, con, this.queriesFile.getAbsolutePath(), null, null, null, null,null, id); - } - else { - return new SPARQLWorker(taskID, con, this.queriesFile.getAbsolutePath(), null, null, null, null, null, null, id); - } - } - -} diff --git a/iguana.corecontroller/src/test/java/org/aksw/iguana/cc/query/impl/InstancesQueryHandlerTest.java b/iguana.corecontroller/src/test/java/org/aksw/iguana/cc/query/impl/InstancesQueryHandlerTest.java deleted file mode 100644 index 33f812245..000000000 --- a/iguana.corecontroller/src/test/java/org/aksw/iguana/cc/query/impl/InstancesQueryHandlerTest.java +++ /dev/null @@ -1,118 +0,0 @@ -package org.aksw.iguana.cc.query.impl; - -import org.aksw.iguana.cc.config.elements.Connection; -import org.aksw.iguana.cc.query.set.QuerySet; -import org.aksw.iguana.cc.query.set.impl.FileBasedQuerySet; -import org.aksw.iguana.cc.worker.Worker; -import org.aksw.iguana.cc.worker.impl.SPARQLWorker; -import org.aksw.iguana.cc.worker.impl.UPDATEWorker; -import org.apache.commons.io.FileUtils; -import org.apache.jena.ext.com.google.common.collect.Lists; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; - -import java.io.File; -import java.io.IOException; -import java.io.PrintWriter; -import java.util.*; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - -@RunWith(Parameterized.class) -public class InstancesQueryHandlerTest { - - private final boolean isUpdate; - private String[] queryStr; - private String dir = UUID.randomUUID().toString(); - private File queriesFile; - - @Parameterized.Parameters - public static Collection data(){ - Collection testData = new ArrayList(); - testData.add(new Object[]{new String[]{"SELECT * {?s ?p ?o}", "doesn't matter", "as long as they are not empty", "the only thing which won't do is the triplestats"}, false}); - testData.add(new Object[]{new String[]{"SELECT * {?s ?p ?o}", "doesn't matter", "", "the only thing which won't do is the triplestats"}, false}); - testData.add(new Object[]{new String[]{"UPDATE * {?s ?p ?o}", "UPDATE doesn't matter", "", "UPDATE the only thing which won't do is the triplestats"}, true}); - - return testData; - } - - public InstancesQueryHandlerTest(String[] queryStr, boolean isUpdate){ - this.queryStr = queryStr; - this.isUpdate=isUpdate; - } - - @Before - public void createFolder() throws IOException { - //File f = new File(this.dir); - //f.mkdir(); - String queryFile = UUID.randomUUID().toString(); - File f = new File(queryFile); - f.createNewFile(); - try(PrintWriter pw = new PrintWriter(f)){ - for(String query : queryStr) { - pw.println(query); - } - } - //remove empty lines after printing them, so the expected asserts will correctly assume that the empty limes are ignored - List tmpList = Lists.newArrayList(queryStr); - Iterator it = tmpList.iterator(); - while(it.hasNext()){ - if(it.next().isEmpty()){ - it.remove(); - } - } - this.queryStr= tmpList.toArray(new String[]{}); - this.queriesFile = f; - f.deleteOnExit(); - } - - @After - public void removeFolder() throws IOException { - File f = new File(this.dir); - FileUtils.deleteDirectory(f); - } - - - @Test - public void testQueryCreation() throws IOException { - //Get queries file - Connection con = new Connection(); - con.setName("a"); - con.setEndpoint("http://test.com"); - Worker worker = getWorker(con, 1, "1"); - InstancesQueryHandler qh = new InstancesQueryHandler(Lists.newArrayList(worker)); - qh.setOutputFolder(this.dir); - Map map = qh.generate(); - List expected = new ArrayList(); - List actual = new ArrayList(); - - for(String qStr : queryStr){ - expected.add(qStr); - } - - for(QuerySet querySet : map.get(this.queriesFile.getAbsolutePath())){ - assertEquals(1, querySet.size()); - actual.add(querySet.getQueryAtPos(0)); - } - assertEquals(expected.size(), actual.size()); - actual.removeAll(expected); - assertEquals(actual.size(),0); - assertEquals(queryStr.length, map.get(this.queriesFile.getAbsolutePath()).length); - } - - - public Worker getWorker(Connection con, int id, String taskID){ - if(isUpdate){ - return new UPDATEWorker(taskID, con, this.queriesFile.getAbsolutePath(), null, null, null, null,null, id); - } - else { - return new SPARQLWorker(taskID, con, this.queriesFile.getAbsolutePath(), null, null, null, null, null, null, id); - } - } - - -} diff --git a/iguana.corecontroller/src/test/java/org/aksw/iguana/cc/query/impl/PatternBasedQueryHandlerTest.java b/iguana.corecontroller/src/test/java/org/aksw/iguana/cc/query/impl/PatternBasedQueryHandlerTest.java deleted file mode 100644 index e7b1888bf..000000000 --- a/iguana.corecontroller/src/test/java/org/aksw/iguana/cc/query/impl/PatternBasedQueryHandlerTest.java +++ /dev/null @@ -1,187 +0,0 @@ -package org.aksw.iguana.cc.query.impl; - -import org.aksw.iguana.cc.config.elements.Connection; -import org.aksw.iguana.cc.query.set.QuerySet; -import org.aksw.iguana.cc.query.set.impl.FileBasedQuerySet; -import org.aksw.iguana.cc.worker.Worker; -import org.aksw.iguana.cc.worker.impl.SPARQLWorker; -import org.aksw.iguana.cc.worker.impl.UPDATEWorker; -import org.apache.commons.io.FileUtils; -import org.apache.jena.ext.com.google.common.collect.Lists; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; - -import java.io.File; -import java.io.IOException; -import java.io.PrintWriter; -import java.util.*; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - -@RunWith(Parameterized.class) -public class PatternBasedQueryHandlerTest { - - private final boolean isUpdate; - private String[] queryStr; - private String dir = UUID.randomUUID().toString(); - private File queriesFile; - - @Parameterized.Parameters - public static Collection data(){ - Collection testData = new ArrayList(); - testData.add(new Object[]{new String[]{"SELECT * {?s ?p ?o}"}, false}); - testData.add(new Object[]{new String[]{"SELECT * {?s ?p ?o}"}, false}); - testData.add(new Object[]{new String[]{"UPDATE * {?s ?p ?o}"}, true}); - - return testData; - } - - public PatternBasedQueryHandlerTest(String[] queryStr, boolean isUpdate){ - this.queryStr = queryStr; - this.isUpdate=isUpdate; - } - - @Before - public void createFolder() throws IOException { - //File f = new File(this.dir); - //f.mkdir(); - String queryFile = UUID.randomUUID().toString(); - File f = new File(queryFile); - f.createNewFile(); - try(PrintWriter pw = new PrintWriter(f)){ - for(String query : queryStr) { - pw.println(query); - } - } - //remove empty lines after printing them, so the expected asserts will correctly assume that the empty limes are ignored - List tmpList = Lists.newArrayList(queryStr); - Iterator it = tmpList.iterator(); - while(it.hasNext()){ - if(it.next().isEmpty()){ - it.remove(); - } - } - this.queryStr= tmpList.toArray(new String[]{}); - this.queriesFile = f; - f.deleteOnExit(); - } - - @After - public void removeFolder() throws IOException { - File f = new File(this.dir); - FileUtils.deleteDirectory(f); - } - - - - @Test - public void testQueryCreation() throws IOException { - //Get queries file - Connection con = new Connection(); - con.setName("a"); - con.setEndpoint("http://test.com"); - Worker worker = getWorker(con, 1, "1"); - PatternQueryHandler qh = new PatternQueryHandler(Lists.newArrayList(worker), con.getEndpoint()); - qh.setOutputFolder(this.dir); - Map map = qh.generate(); - //check if folder exist this.dir/hashCode/ with |queries| files - int hashcode = org.aksw.iguana.cc.utils.FileUtils.getHashcodeFromFileContent(this.queriesFile.getAbsolutePath()); - File f = new File(this.dir+File.separator+hashcode); - if(!isUpdate) { - assertTrue(f.isDirectory()); - int expectedNoOfFiles = queryStr.length; - assertEquals(expectedNoOfFiles, f.listFiles().length); - //iterate through all and check if correct - HashSet files = new HashSet(); - for(File queryFile : f.listFiles()){ - int id = Integer.parseInt(queryFile.getName().replace("sparql", "").replace("update", "")); - String actualQueryString =org.aksw.iguana.cc.utils.FileUtils.readLineAt(0, queryFile); - assertEquals(queryStr[id], actualQueryString); - files.add(queryFile.getAbsolutePath()); - } - for(QuerySet querySet : map.get(this.queriesFile.getAbsolutePath())){ - if(querySet instanceof FileBasedQuerySet) { - assertTrue(files.contains(((FileBasedQuerySet) querySet).getFile().getAbsolutePath())); - } } - assertEquals(files.size(), map.get(this.queriesFile.getAbsolutePath()).length); - FileUtils.deleteDirectory(f); - } - else{ - List expected = new ArrayList(); - List actual = new ArrayList(); - - for(String qStr : queryStr){ - expected.add(qStr); - } - - for(QuerySet querySet : map.get(this.queriesFile.getAbsolutePath())){ - assertEquals(1, querySet.size()); - actual.add(querySet.getQueryAtPos(0)); - } - assertEquals(expected.size(), actual.size()); - actual.removeAll(expected); - assertEquals(actual.size(),0); - assertEquals(queryStr.length, map.get(this.queriesFile.getAbsolutePath()).length); - } - - - } - - @Test - public void testCaching() throws IOException { - if(isUpdate){ - //nothing to check - return; - } - //Get queries file - Connection con = new Connection(); - con.setName("a"); - con.setEndpoint("http://test.com"); - Worker worker = getWorker(con, 1, "1"); - PatternQueryHandler qh = new PatternQueryHandler(Lists.newArrayList(worker), con.getEndpoint()); - qh.setOutputFolder(this.dir); - - Map queries1 = qh.generate(); - //check if folder exist this.dir/hashCode/ with |queries| files - int hashcode = org.aksw.iguana.cc.utils.FileUtils.getHashcodeFromFileContent(this.queriesFile.getAbsolutePath()); - File f = new File(this.dir+File.separator+hashcode); - - worker = getWorker(con, 12, "2"); - qh = new PatternQueryHandler(Lists.newArrayList(worker), con.getEndpoint()); - - qh.setOutputFolder(this.dir); - Map queries2 = qh.generate(); - - HashSet files = new HashSet(); - for(QuerySet querySet : queries1.get(this.queriesFile.getAbsolutePath())){ - if(querySet instanceof FileBasedQuerySet) { - files.add(((FileBasedQuerySet)querySet).getFile().getAbsolutePath()); - } - - } - for(QuerySet querySet : queries2.get(this.queriesFile.getAbsolutePath())){ - if(querySet instanceof FileBasedQuerySet) { - assertTrue(files.contains(((FileBasedQuerySet) querySet).getFile().getAbsolutePath())); - } - - } - - assertEquals(files.size(), queries2.get(this.queriesFile.getAbsolutePath()).length); - FileUtils.deleteDirectory(f); - } - - public Worker getWorker(Connection con, int id, String taskID){ - if(isUpdate){ - return new UPDATEWorker(taskID, con, this.queriesFile.getAbsolutePath(), null, null, null, null,null, id); - } - else { - return new SPARQLWorker(taskID, con, this.queriesFile.getAbsolutePath(), null, null, null, null, null, null, id); - } - } - - -} diff --git a/iguana.corecontroller/src/test/java/org/aksw/iguana/cc/query/impl/PatternQueryHandlerTest.java b/iguana.corecontroller/src/test/java/org/aksw/iguana/cc/query/impl/PatternQueryHandlerTest.java deleted file mode 100644 index 41b4942e7..000000000 --- a/iguana.corecontroller/src/test/java/org/aksw/iguana/cc/query/impl/PatternQueryHandlerTest.java +++ /dev/null @@ -1,119 +0,0 @@ -package org.aksw.iguana.cc.query.impl; - -import org.apache.jena.ext.com.google.common.collect.Lists; -import org.apache.jena.ext.com.google.common.collect.Sets; -import org.aksw.iguana.cc.config.elements.Connection; -import org.aksw.iguana.cc.utils.ServerMock; -import org.aksw.iguana.cc.worker.Worker; -import org.aksw.iguana.cc.worker.impl.SPARQLWorker; -import org.apache.jena.query.ParameterizedSparqlString; -import org.apache.jena.query.Query; -import org.apache.jena.query.QueryFactory; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; -import org.simpleframework.http.core.ContainerServer; -import org.simpleframework.transport.connect.SocketConnection; - -import java.io.IOException; -import java.net.InetSocketAddress; -import java.net.SocketAddress; -import java.util.*; - -import static org.junit.Assert.assertEquals; - -@RunWith(Parameterized.class) -public class PatternQueryHandlerTest { - - private static final int FAST_SERVER_PORT = 8024; - private final String service; - private static ServerMock fastServerContainer; - private static ContainerServer fastServer; - private static SocketConnection fastConnection; - - private final String queryStr; - private final Query expectedConversionQuery; - private final String[] vars; - private final String expectedReplacedQuery; - private final List expectedInstances; - private String dir = UUID.randomUUID().toString(); - - - @Parameterized.Parameters - public static Collection data(){ - Collection testData = new ArrayList(); - testData.add(new Object[]{"SELECT * {?s ?p ?o}", "SELECT * {?s ?p ?o}", "SELECT * {?s ?p ?o}", new String[]{}, new String[]{"SELECT * {?s ?p ?o}"}}); - testData.add(new Object[]{"SELECT ?book {?book %%var0%% ?o}", "SELECT DISTINCT ?var0 {?book ?var0 ?o} LIMIT 2000", "SELECT ?book {?book ?var0 ?o}", new String[]{"var0"}, new String[]{"SELECT ?book {?book ?o}", "SELECT ?book {?book ?o}"}}); - testData.add(new Object[]{"SELECT ?book {?book %%var0%% %%var1%%}", "SELECT DISTINCT ?var1 ?var0 {?book ?var0 ?var1} LIMIT 2000", "SELECT ?book {?book ?var0 ?var1}", new String[]{"var0", "var1"}, new String[]{"SELECT ?book {?book \"Example Book 2\"}", "SELECT ?book {?book \"Example Book 1\"}"}}); - - return testData; - } - - @BeforeClass - public static void startServer() throws IOException { - fastServerContainer = new ServerMock(); - fastServer = new ContainerServer(fastServerContainer); - fastConnection = new SocketConnection(fastServer); - SocketAddress address1 = new InetSocketAddress(FAST_SERVER_PORT); - fastConnection.connect(address1); - } - - @AfterClass - public static void stopServer() throws IOException { - fastConnection.close(); - fastServer.stop(); - } - - public PatternQueryHandlerTest(String queryStr, String expectedConversionStr, String expectedReplacedQuery, String[] vars, String[] expectedInstances) throws IOException { - this.service = "http://localhost:8024"; - - this.queryStr = queryStr; - this.expectedConversionQuery = QueryFactory.create(expectedConversionStr); - this.vars = vars; - this.expectedReplacedQuery=expectedReplacedQuery; - this.expectedInstances = Lists.newArrayList(expectedInstances); - } - - @Test - public void testReplacement(){ - Set varNames = new HashSet(); - String replacedQuery = getHandler().replaceVars(this.queryStr, varNames); - assertEquals(expectedReplacedQuery, replacedQuery); - assertEquals(Sets.newHashSet(vars), varNames); - } - - - @Test - public void testPatternExchange(){ - List instances = getHandler().getInstances(queryStr); - assertEquals(expectedInstances, instances); - - } - - @Test - public void testConversion(){ - // convert query - // retrieve instances - PatternQueryHandler qh = getHandler(); - - ParameterizedSparqlString pss = new ParameterizedSparqlString(); - pss.setCommandText(qh.replaceVars(queryStr, Sets.newHashSet())); - - Query q = qh.convertToSelect(pss, Sets.newHashSet(vars)); - assertEquals(expectedConversionQuery, q); - } - - private PatternQueryHandler getHandler(){ - Connection con = new Connection(); - con.setName("a"); - con.setEndpoint("http://test.com"); - Worker worker = new SPARQLWorker("1", con, "empty.txt", null,null,null,null,null,null, 1); - - PatternQueryHandler qh = new PatternQueryHandler(Lists.newArrayList(worker), service); - return qh; - } - - -} diff --git a/iguana.corecontroller/src/test/java/org/aksw/iguana/cc/query/impl/UpdatePathTest.java b/iguana.corecontroller/src/test/java/org/aksw/iguana/cc/query/impl/UpdatePathTest.java deleted file mode 100644 index ced92dc0e..000000000 --- a/iguana.corecontroller/src/test/java/org/aksw/iguana/cc/query/impl/UpdatePathTest.java +++ /dev/null @@ -1,46 +0,0 @@ -package org.aksw.iguana.cc.query.impl; - -import org.aksw.iguana.cc.query.set.QuerySet; -import org.aksw.iguana.cc.query.set.impl.FileBasedQuerySet; -import org.apache.jena.ext.com.google.common.collect.Lists; -import org.aksw.iguana.cc.config.elements.Connection; -import org.aksw.iguana.cc.worker.Worker; -import org.aksw.iguana.cc.worker.impl.UPDATEWorker; -import org.junit.Test; - -import java.io.File; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - -public class UpdatePathTest { - - @Test - public void checkUpdatePath(){ - Connection con = new Connection(); - con.setName("a"); - con.setEndpoint("http://test.com"); - String updateDir = "src/test/resources/updates/"; - Worker worker = new UPDATEWorker("1", con, updateDir, null, null, null, null,null, 1); - - InstancesQueryHandler qh = new InstancesQueryHandler(Lists.newArrayList(worker)); - Map map = qh.generate(); - assertEquals(1, map.size()); - QuerySet[] updates = map.get(updateDir); - assertEquals(2, updates.length); - List paths = new ArrayList(); - for(File f: new File(updateDir).listFiles()){ - paths.add(f.getAbsolutePath()); - } - assertEquals(2, paths.size()); - for(QuerySet actual : updates){ - assertTrue(actual instanceof FileBasedQuerySet); - paths.remove(((FileBasedQuerySet)actual).getFile().getAbsolutePath()); - } - assertEquals(0, paths.size()); - } - -} diff --git a/iguana.corecontroller/src/test/java/org/aksw/iguana/cc/tasks/MockupStorage.java b/iguana.corecontroller/src/test/java/org/aksw/iguana/cc/tasks/MockupStorage.java deleted file mode 100644 index 94c7694f7..000000000 --- a/iguana.corecontroller/src/test/java/org/aksw/iguana/cc/tasks/MockupStorage.java +++ /dev/null @@ -1,49 +0,0 @@ -package org.aksw.iguana.cc.tasks; - -import org.aksw.iguana.rp.storage.Storage; -import org.apache.jena.rdf.model.Model; -import org.apache.jena.rdf.model.ModelFactory; - -import java.util.HashSet; -import java.util.Properties; -import java.util.Set; - -public class MockupStorage implements Storage { - private Model m = ModelFactory.createDefaultModel(); - - private Set meta = new HashSet(); - - @Override - public void addData(Model data) { - - m.add(data); - } - - @Override - public void addMetaData(Properties p) { - //do nothing - meta.add(p); - } - - @Override - public void commit() { - //do nothing - } - - @Override - public void endTask(String taskID) { - //do nothing - } - - public Model getModel() { - return m; - } - - public void setModel(Model m) { - this.m = m; - } - - public Set getMeta() { - return meta; - } -} diff --git a/iguana.corecontroller/src/test/java/org/aksw/iguana/cc/tasks/MockupTask.java b/iguana.corecontroller/src/test/java/org/aksw/iguana/cc/tasks/MockupTask.java deleted file mode 100644 index a077916b8..000000000 --- a/iguana.corecontroller/src/test/java/org/aksw/iguana/cc/tasks/MockupTask.java +++ /dev/null @@ -1,15 +0,0 @@ -package org.aksw.iguana.cc.tasks; - - -public class MockupTask extends AbstractTask{ - - public MockupTask(String empty){ - } - - - @Override - public void execute() { - } - - -} diff --git a/iguana.corecontroller/src/test/java/org/aksw/iguana/cc/tasks/impl/StresstestTest.java b/iguana.corecontroller/src/test/java/org/aksw/iguana/cc/tasks/impl/StresstestTest.java deleted file mode 100644 index 66d60e2e4..000000000 --- a/iguana.corecontroller/src/test/java/org/aksw/iguana/cc/tasks/impl/StresstestTest.java +++ /dev/null @@ -1,162 +0,0 @@ -package org.aksw.iguana.cc.tasks.impl; - -import org.aksw.iguana.cc.config.elements.Connection; -import org.aksw.iguana.cc.tasks.MockupStorage; -import org.aksw.iguana.cc.worker.MockupWorker; -import org.aksw.iguana.cc.worker.Worker; -import org.aksw.iguana.commons.constants.COMMON; -import org.aksw.iguana.rp.experiment.ExperimentManager; -import org.aksw.iguana.rp.metrics.MetricManager; -import org.aksw.iguana.rp.metrics.impl.EachQueryMetric; -import org.aksw.iguana.rp.storage.StorageManager; -import org.apache.commons.io.FileUtils; -import org.junit.Test; - -import java.io.File; -import java.io.IOException; -import java.time.Instant; -import java.util.*; - -import static org.junit.Assert.assertEquals; - -public class StresstestTest { - - // test correct # of worker creation, meta data and warmup - private String[] queries = new String[]{"a", "b"}; - private String[] queries2 = new String[]{"b", "c"}; - - private ArrayList getWorkers(int threads, String[] queries){ - ArrayList workers = new ArrayList(); - HashMap workerConfig = new HashMap(); - workerConfig.put("className", MockupWorker.class.getCanonicalName()); - workerConfig.put("queries", queries); - workerConfig.put("threads", threads); - workers.add(workerConfig); - return workers; - } - - private Connection getConnection(){ - Connection con = new Connection(); - con.setName("test"); - con.setEndpoint("test/sparql"); - return con; - } - - private LinkedHashMap getQueryHandler(){ - LinkedHashMap queryHandler = new LinkedHashMap(); - queryHandler.put("className", "InstancesQueryHandler"); - return queryHandler; - } - - private void init(){ - StorageManager storageManager = StorageManager.getInstance(); - MetricManager mmanger = MetricManager.getInstance(); - mmanger.addMetric(new EachQueryMetric()); - ExperimentManager rpController = ExperimentManager.getInstance(); - Properties p = new Properties(); - p.put(COMMON.RECEIVE_DATA_START_KEY, "true"); - p.put(COMMON.EXPERIMENT_TASK_ID_KEY, "1/1/1"); - MockupStorage storage = new MockupStorage(); - rpController.receiveData(p); - storageManager.addStorage(storage); - } - - @Test - public void checkStresstestNoQM() throws IOException { - - Stresstest task = new Stresstest( getWorkers(2, queries), getQueryHandler(), 10); - task.qhCacheFolder=UUID.randomUUID().toString(); - task.init(new String[]{"1", "1/1", "1/1/1"}, "test", getConnection()); - - init(); - - Instant start = Instant.now(); - task.execute(); - Instant end = Instant.now(); - - //2 queries in mix, 10 executions on 2 workers -> 40 queries - assertEquals(40, task.getExecutedQueries()); - FileUtils.deleteDirectory(new File(task.qhCacheFolder)); - } - - @Test - public void checkStresstestTL() throws IOException { - - Stresstest task = new Stresstest(5000, getWorkers(2, queries), getQueryHandler()); - task.qhCacheFolder=UUID.randomUUID().toString(); - - task.init(new String[]{"1", "1/1", "1/1/1"}, "test", getConnection()); - - init(); - - Instant start = Instant.now(); - task.execute(); - Instant end = Instant.now(); - //give about 200milliseconds time for init and end stuff - assertEquals(5000.0, end.toEpochMilli()-start.toEpochMilli(), 300.0); - FileUtils.deleteDirectory(new File(task.qhCacheFolder)); - - } - - @Test - public void warmupTest() throws IOException { - //check if not executing - Stresstest task = new Stresstest(5000, getWorkers(2, queries), getQueryHandler()); - task.qhCacheFolder=UUID.randomUUID().toString(); - - task.init(new String[]{"1", "1/1", "1/1/1"}, "test", getConnection()); - Instant start = Instant.now(); - assertEquals(0, task.warmup()); - Instant end = Instant.now(); - assertEquals(0.0, end.toEpochMilli()-start.toEpochMilli(), 5.0); - //check if executing - - LinkedHashMap warmup = new LinkedHashMap(); - warmup.put("workers", getWorkers(2, queries)); - warmup.put("timeLimit", 350); - FileUtils.deleteDirectory(new File(task.qhCacheFolder)); - - task = new Stresstest(5000, getWorkers(2, queries), getQueryHandler(), warmup); - task.qhCacheFolder=UUID.randomUUID().toString(); - - task.init(new String[]{"1", "1/1", "1/1/1"}, "test", getConnection()); - start = Instant.now(); - long queriesExecuted = task.warmup(); - end = Instant.now(); - // might sadly be 400 or 500 as the warmup works in 100th steps, also overhead, as long as executed Queries are 6 its fine - assertEquals(350.0, end.toEpochMilli()-start.toEpochMilli(), 250.0); - //each worker could execute 3 query - assertEquals(6, queriesExecuted); - FileUtils.deleteDirectory(new File(task.qhCacheFolder)); - - } - - @Test - public void workerCreationTest() throws IOException { - ArrayList worker = getWorkers(2, queries); - worker.addAll(getWorkers(1, queries2)); - Stresstest task = new Stresstest(5000, worker, getQueryHandler()); - task.qhCacheFolder=UUID.randomUUID().toString(); - - task.init(new String[]{"1", "1/1", "1/1/1"}, "test", getConnection()); - List workers = task.workers; - assertEquals(3, workers.size()); - int q1=0; - int q2=0; - // alittle bit hacky but should be sufficient - for(Worker w : workers){ - MockupWorker mockupWorker = (MockupWorker)w; - String[] queries = mockupWorker.getStringQueries(); - if(queries.hashCode()==this.queries.hashCode()){ - q1++; - } - else if(queries.hashCode()==this.queries2.hashCode()){ - q2++; - } - } - assertEquals(2, q1); - assertEquals(1, q2); - FileUtils.deleteDirectory(new File(task.qhCacheFolder)); - - } -} diff --git a/iguana.corecontroller/src/test/java/org/aksw/iguana/cc/utils/CLIProcessManagerTest.java b/iguana.corecontroller/src/test/java/org/aksw/iguana/cc/utils/CLIProcessManagerTest.java deleted file mode 100644 index e6a232935..000000000 --- a/iguana.corecontroller/src/test/java/org/aksw/iguana/cc/utils/CLIProcessManagerTest.java +++ /dev/null @@ -1,62 +0,0 @@ -package org.aksw.iguana.cc.utils; - -import org.junit.Test; - -import java.io.IOException; - -import static org.junit.Assert.*; - -public class CLIProcessManagerTest { - - @Test - public void execTest() throws InterruptedException { - //create process - Process p = CLIProcessManager.createProcess("echo \"abc\"; wait 1m"); - //destroy process - assertTrue(p.isAlive()); - CLIProcessManager.destroyProcess(p); - //give OS a little bit of time to destroy process - Thread.sleep(50); - assertFalse(p.isAlive()); - - } - - @Test - public void countLinesSuccessfulTest() throws IOException, InterruptedException { - //create - Process p = CLIProcessManager.createProcess("echo \"abc\"; wait 100; echo \"t\nt\nabc: test ended suffix\"; wait 1m;"); - //count Lines until "test ended" occured - Thread.sleep(100); - assertTrue(CLIProcessManager.isReaderReady(p)); - - assertEquals(3, CLIProcessManager.countLinesUntilStringOccurs(p, "test ended", "failed")); - //destroy - CLIProcessManager.destroyProcess(p); - //give OS a little bit of time to destroy process - Thread.sleep(50); - assertFalse(p.isAlive()); - - } - - @Test - public void countLinesFailTest() throws IOException, InterruptedException { - //create - Process p = CLIProcessManager.createProcess("echo \"abc\"; wait 100; echo \"abc: failed suffix\"; wait 1m;"); - Thread.sleep(100); - assertTrue(CLIProcessManager.isReaderReady(p)); - //count Lines until "test ended" occured - try{ - CLIProcessManager.countLinesUntilStringOccurs(p, "test ended", "failed"); - assertTrue("Test did not end in IOException", false); - }catch (IOException e){ - assertTrue(true); - } - //destroy - CLIProcessManager.destroyProcess(p); - //give OS a little bit of time to destroy process - Thread.sleep(50); - assertFalse(p.isAlive()); - - } - -} diff --git a/iguana.corecontroller/src/test/java/org/aksw/iguana/cc/utils/FileUtilsTest.java b/iguana.corecontroller/src/test/java/org/aksw/iguana/cc/utils/FileUtilsTest.java deleted file mode 100644 index bb86c3ba8..000000000 --- a/iguana.corecontroller/src/test/java/org/aksw/iguana/cc/utils/FileUtilsTest.java +++ /dev/null @@ -1,47 +0,0 @@ -package org.aksw.iguana.cc.utils; - -import org.junit.Test; - -import java.io.File; -import java.io.IOException; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - -public class FileUtilsTest { - - @Test - public void countLinesTest() throws IOException { - //get test file - File f = new File("src/test/resources/fileUtils.txt"); - //count lines - assertEquals(6, FileUtils.countLines(f)); - } - - @Test - public void readLineAtTest() throws IOException { - //get test file - File f = new File("src/test/resources/fileUtils.txt"); - //read line at 2, 15 - assertEquals("a", FileUtils.readLineAt(0, f)); - assertEquals("abc", FileUtils.readLineAt(2, f)); - //is at actual line 16, but as all the lines between line 4-10 and 12-15 are empty this should be the 4th - assertEquals("dfe", FileUtils.readLineAt(4, f)); - //read line at -1 - assertEquals("", FileUtils.readLineAt(-1, f)); - } - - @Test - public void readTest() throws IOException { - //read whole content - String data = FileUtils.readFile("src/test/resources/fileUtils.txt"); - String expected = "a\nab\nabc\n\n\n\n\n\n\n\n\\n\n\n\n\n\ndfe\n\ntest"; - assertEquals(expected, data); - } - - @Test - public void getHashTest(){ - //check if hash abs works - assertTrue(FileUtils.getHashcodeFromFileContent("src/test/resources/fileUtils.txt")>0); - } -} diff --git a/iguana.corecontroller/src/test/java/org/aksw/iguana/cc/utils/SPARQLQueryStatisticsTest.java b/iguana.corecontroller/src/test/java/org/aksw/iguana/cc/utils/SPARQLQueryStatisticsTest.java deleted file mode 100644 index be05c8757..000000000 --- a/iguana.corecontroller/src/test/java/org/aksw/iguana/cc/utils/SPARQLQueryStatisticsTest.java +++ /dev/null @@ -1,62 +0,0 @@ -package org.aksw.iguana.cc.utils; - -import org.apache.jena.query.Query; -import org.apache.jena.query.QueryFactory; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; - -import java.util.ArrayList; -import java.util.Collection; - -import static org.junit.Assert.assertEquals; - -@RunWith(Parameterized.class) -public class SPARQLQueryStatisticsTest { - - - private final String query; - private final double size; - private final int[] stats; - - @Parameterized.Parameters - public static Collection data(){ - Collection testData = new ArrayList(); - testData.add(new Object[]{"SELECT * {?s ?p ?o}", 1, new int[]{0, 0, 0, 0, 0, 0, 0, 0, 1}}); - testData.add(new Object[]{"SELECT * {?s ?p ?o. ?o ?p1 ?t}", 1, new int[]{0, 0, 0, 0, 0, 0, 0, 0, 2}}); - testData.add(new Object[]{"SELECT * {?s ?p ?o. ?o ?p1 ?t. FILTER (?t = \"test\")}", 1, new int[]{0, 1, 0, 0, 0, 0, 0, 0, 2}}); - //implicit groupBY as aggr - testData.add(new Object[]{"SELECT (COUNT(?s) AS ?co) {?s ?p ?o. ?o ?p1 ?t. FILTER (?t = \"test\")}", 1, new int[]{1, 1, 0, 0, 0, 1, 0, 0, 2}}); - testData.add(new Object[]{"SELECT * {?s ?p ?o. ?o ?p1 ?t. FILTER (?t = \"test\")} ORDER BY ?s", 1, new int[]{0, 1, 0, 0, 0, 0, 0, 1, 2}}); - testData.add(new Object[]{"SELECT ?s {?s ?p ?o. ?o ?p1 ?t. FILTER (?t = \"test\")} GROUP BY ?s", 1, new int[]{0, 1, 0, 0, 0, 1, 0, 0, 2}}); - testData.add(new Object[]{"SELECT ?o {{?s ?p ?o OPTIONAL {?o ?u ?s} } UNION { ?o ?p1 ?t}} OFFSET 10", 1, new int[]{0, 0, 1, 1, 0, 0, 1, 0, 3}}); - //implicit groupBY as aggr - testData.add(new Object[]{"SELECT * {?s ?p ?o} HAVING(COUNT(?s) > 1)", 1, new int[]{1, 0, 0, 0, 1, 1, 0, 0, 1}}); - - return testData; - } - - public SPARQLQueryStatisticsTest(String query, double size, int[] stats){ - this.query=query; - this.size=size; - this.stats=stats; - } - - @Test - public void checkCorrectStats(){ - SPARQLQueryStatistics qs = new SPARQLQueryStatistics(); - Query q = QueryFactory.create(this.query); - qs.getStatistics(q); - assertEquals(stats[0], qs.aggr); - assertEquals(stats[1], qs.filter); - assertEquals(stats[2], qs.optional); - assertEquals(stats[3], qs.union); - assertEquals(stats[4], qs.having); - assertEquals(stats[5], qs.groupBy); - assertEquals(stats[6], qs.offset); - assertEquals(size, qs.size, 0); - assertEquals(stats[7], qs.orderBy); - assertEquals(stats[8], qs.triples); - } - -} diff --git a/iguana.corecontroller/src/test/java/org/aksw/iguana/cc/utils/ServerMock.java b/iguana.corecontroller/src/test/java/org/aksw/iguana/cc/utils/ServerMock.java deleted file mode 100644 index b4485ae3c..000000000 --- a/iguana.corecontroller/src/test/java/org/aksw/iguana/cc/utils/ServerMock.java +++ /dev/null @@ -1,48 +0,0 @@ -package org.aksw.iguana.cc.utils; - -import org.aksw.iguana.cc.lang.impl.SPARQLLanguageProcessor; -import org.apache.commons.io.FileUtils; -import org.simpleframework.http.Request; -import org.simpleframework.http.Response; -import org.simpleframework.http.Status; -import org.simpleframework.http.core.Container; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.File; -import java.io.IOException; - -/** - * Server Mock representing a TS - * - * @author f.conrads - * - */ -public class ServerMock implements Container { - - private static final Logger LOGGER = LoggerFactory.getLogger(ServerMock.class); - private String actualContent; - - - @Override - public void handle(Request request, Response resp) { - String content=null; - try { - content = request.getContent(); - } catch (IOException e) { - LOGGER.error("Got exception.", e); - } - resp.setCode(Status.OK.code); - resp.setContentType(SPARQLLanguageProcessor.QUERY_RESULT_TYPE_JSON); - try { - //write answer - String resultStr = FileUtils.readFileToString(new File("src/test/resources/sparql-json-response.json"), "UTF-8"); - resp.getOutputStream().write(resultStr.getBytes()); - resp.getOutputStream().close(); - } catch (IOException e) { - LOGGER.error("Could not close Response Output Stream"); - } - } - - -} diff --git a/iguana.corecontroller/src/test/java/org/aksw/iguana/cc/worker/HTTPWorkerTest.java b/iguana.corecontroller/src/test/java/org/aksw/iguana/cc/worker/HTTPWorkerTest.java deleted file mode 100644 index 2e89ad378..000000000 --- a/iguana.corecontroller/src/test/java/org/aksw/iguana/cc/worker/HTTPWorkerTest.java +++ /dev/null @@ -1,233 +0,0 @@ -package org.aksw.iguana.cc.worker; - -import org.aksw.iguana.cc.config.elements.Connection; -import org.aksw.iguana.cc.lang.impl.SPARQLLanguageProcessor; -import org.aksw.iguana.cc.query.impl.InstancesQueryHandler; -import org.aksw.iguana.cc.utils.FileUtils; -import org.aksw.iguana.cc.worker.impl.HttpGetWorker; -import org.aksw.iguana.cc.worker.impl.HttpPostWorker; -import org.aksw.iguana.cc.worker.impl.HttpWorker; -import org.aksw.iguana.commons.constants.COMMON; -import org.apache.jena.ext.com.google.common.collect.Lists; -import org.junit.*; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; -import org.simpleframework.http.core.ContainerServer; -import org.simpleframework.transport.connect.SocketConnection; - -import java.io.File; -import java.io.IOException; -import java.net.InetSocketAddress; -import java.net.SocketAddress; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Properties; -import java.util.UUID; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; - -import static org.junit.Assert.*; - -@RunWith(Parameterized.class) -public class HTTPWorkerTest { - - private static final int FAST_SERVER_PORT = 8025; - private final String service; - private static WorkerServerMock fastServerContainer; - private static ContainerServer fastServer; - private static SocketConnection fastConnection; - private final Boolean isPost; - - private String queriesFile="src/test/resources/workers/single-query.txt"; - private String responseType; - private String parameter; - private String query; - private String queryID; - private boolean isFail; - private String outputDir; - private Integer fixedLatency; - private Integer gaussianLatency; - - @Parameterized.Parameters - public static Collection data(){ - Collection testData = new ArrayList(); - //get tests - testData.add(new Object[]{"Random Text", "doc1", "text/plain", "text", 100,50, false, false}); - testData.add(new Object[]{UUID.randomUUID().toString(), UUID.randomUUID().toString(), "text/plain", "text", 100,50, false, false}); - - testData.add(new Object[]{"Random Text", "doc1", "text/plain", "test", 100,50, true, false}); - testData.add(new Object[]{"Random Text", "doc1", null, "text", 100,50, false, false}); - - //post tests - testData.add(new Object[]{"Random Text", "doc1", "text/plain", "text", 100,50, false, true}); - testData.add(new Object[]{UUID.randomUUID().toString(), UUID.randomUUID().toString(), "text/plain", "text", 100,50, false, true}); - - testData.add(new Object[]{"Random Text", "doc1", "text/plain", "test", 100,50, true, true}); - testData.add(new Object[]{"Random Text", "doc1", "text/plain", null, 100,50, true, true}); - testData.add(new Object[]{"Random Text", "doc1", null, "text", 100,50, false, true}); - - return testData; - } - - @BeforeClass - public static void startServer() throws IOException { - fastServerContainer = new WorkerServerMock(); - fastServer = new ContainerServer(fastServerContainer); - fastConnection = new SocketConnection(fastServer); - SocketAddress address1 = new InetSocketAddress(FAST_SERVER_PORT); - fastConnection.connect(address1); - - } - - @AfterClass - public static void stopServer() throws IOException { - fastConnection.close(); - fastServer.stop(); - } - - - public HTTPWorkerTest(String query, String queryID, String responseType, String parameter, Integer fixedLatency, Integer gaussianLatency, Boolean isFail, Boolean isPost){ - this.query=query; - this.queryID=queryID; - this.responseType=responseType; - this.parameter=parameter; - this.isFail=isFail; - this.isPost=isPost; - this.fixedLatency=fixedLatency; - this.gaussianLatency=gaussianLatency; - this.service = "http://localhost:8025"; - //warmup - getWorker("1").executeQuery("test", "test"); - } - - @Before - public void setOutputDir(){ - this.outputDir = UUID.randomUUID().toString(); - } - - @After - public void deleteFolder() throws IOException { - org.apache.commons.io.FileUtils.deleteDirectory(new File(outputDir)); - } - - @Test - public void testExecution() throws InterruptedException, IOException { - // check if correct param name was set - String taskID="123/1/1/"; - - HttpWorker getWorker = getWorker(taskID); - - getWorker.executeQuery(query, queryID); - //as the result processing is in the background we have to wait for it. - Thread.sleep(1000); - Collection results = getWorker.popQueryResults(); - assertEquals(1, results.size()); - Properties p = results.iterator().next(); - - assertEquals(taskID, p.get(COMMON.EXPERIMENT_TASK_ID_KEY)); - - assertEquals(queryID, p.get(COMMON.QUERY_ID_KEY)); - assertEquals(180000.0, p.get(COMMON.PENALTY)); - assertTrue(((Properties)p.get(COMMON.EXTRA_META_KEY)).isEmpty()); - if(isPost){ - assertEquals(200.0, (double) p.get(COMMON.RECEIVE_DATA_TIME), 20.0); - } - else { - assertEquals(100.0, (double) p.get(COMMON.RECEIVE_DATA_TIME), 20.0); - } - if(isFail){ - assertEquals(-2l, p.get(COMMON.RECEIVE_DATA_SUCCESS)); - assertEquals(0l, p.get(COMMON.RECEIVE_DATA_SIZE)); - } - else{ - assertEquals(1l, p.get(COMMON.RECEIVE_DATA_SUCCESS)); - if(responseType!= null && responseType.equals("text/plain")) { - assertEquals(4l, p.get(COMMON.RECEIVE_DATA_SIZE)); - } - if(responseType==null || responseType.equals(SPARQLLanguageProcessor.QUERY_RESULT_TYPE_JSON)){ - assertEquals(2l, p.get(COMMON.RECEIVE_DATA_SIZE)); - } - } - assertEquals(1, getWorker.getExecutedQueries()); - } - - private HttpWorker getWorker(String taskID) { - return getWorker(taskID, null, null); - } - - private HttpWorker getWorker(String taskID, Integer latencyFixed, Integer gaussianFixed) { - if(isPost){ - return new HttpPostWorker(taskID, getConnection(), this.queriesFile, "application/json", this.responseType,this.parameter, null, null, null, latencyFixed, gaussianFixed, null, 1); - } - return new HttpGetWorker(taskID, getConnection(), this.queriesFile, this.responseType,this.parameter, null, null, null, latencyFixed, gaussianFixed, null, 1); - - } - - private Connection getConnection() { - Connection con = new Connection(); - con.setName("test"); - con.setPassword("test"); - con.setUser("abc"); - con.setEndpoint(service); - con.setUpdateEndpoint(service); - return con; - } - - @Test - public void testWait() throws InterruptedException { - String taskID="123/1/1/"; - HttpWorker getWorker = getWorker(taskID, this.fixedLatency, this.gaussianLatency); - InstancesQueryHandler qh = new InstancesQueryHandler(Lists.newArrayList(getWorker)); - qh.setOutputFolder(outputDir); - qh.generate(); - ExecutorService executorService = Executors.newFixedThreadPool(1); - executorService.submit(getWorker); - long waitMS=850; - Thread.sleep(waitMS); - getWorker.stopSending(); - executorService.shutdownNow(); - //get expected delay - int expectedDelay = 100+this.fixedLatency+this.gaussianLatency; - if(isPost){ - expectedDelay+=100; - } - double expectedQueries = waitMS*1.0/expectedDelay; - double deltaUp = waitMS*1.0/(expectedDelay+gaussianLatency); - double deltaDown = waitMS*1.0/(expectedDelay-gaussianLatency); - double delta = Math.ceil((deltaDown-deltaUp)/2); - assertEquals(expectedQueries, 1.0*getWorker.getExecutedQueries(), delta); - } - - @Test - public void testWorkflow() throws InterruptedException, IOException { - // check as long as not endsignal - String taskID="123/1/1/"; - int queryHash = FileUtils.getHashcodeFromFileContent(this.queriesFile); - - HttpWorker getWorker = getWorker(taskID); - InstancesQueryHandler qh = new InstancesQueryHandler(Lists.newArrayList(getWorker)); - qh.setOutputFolder(outputDir); - qh.generate(); - ExecutorService executorService = Executors.newFixedThreadPool(1); - executorService.submit(getWorker); - Thread.sleep(450); - getWorker.stopSending(); - executorService.shutdownNow(); - // check correct executedQueries - long expectedSize=4; - if(isPost){ - expectedSize=2; - } - assertEquals(expectedSize, getWorker.getExecutedQueries()); - // check pop query results - Collection results = getWorker.popQueryResults(); - for(Properties p : results){ - assertEquals(queryHash, p.get(COMMON.QUERY_HASH)); - } - assertEquals(expectedSize, results.size()); - for(long i=1;i=queries.length){ - counter=0; - } - queryStr.append(queries[counter]); - queryID.append("query").append(counter); - counter++; - } -} diff --git a/iguana.corecontroller/src/test/java/org/aksw/iguana/cc/worker/UPDATEWorkerTest.java b/iguana.corecontroller/src/test/java/org/aksw/iguana/cc/worker/UPDATEWorkerTest.java deleted file mode 100644 index 659a6953f..000000000 --- a/iguana.corecontroller/src/test/java/org/aksw/iguana/cc/worker/UPDATEWorkerTest.java +++ /dev/null @@ -1,158 +0,0 @@ -package org.aksw.iguana.cc.worker; - -import org.aksw.iguana.cc.config.elements.Connection; -import org.aksw.iguana.cc.query.impl.InstancesQueryHandler; -import org.aksw.iguana.cc.worker.impl.SPARQLWorker; -import org.aksw.iguana.cc.worker.impl.UPDATEWorker; -import org.aksw.iguana.cc.worker.impl.update.UpdateTimer; -import org.aksw.iguana.commons.time.TimeUtils; -import org.apache.commons.io.FileUtils; -import org.apache.jena.ext.com.google.common.collect.Lists; -import org.junit.*; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; -import org.simpleframework.http.core.ContainerServer; -import org.simpleframework.transport.connect.SocketConnection; - -import java.io.File; -import java.io.IOException; -import java.net.InetSocketAddress; -import java.net.SocketAddress; -import java.time.Instant; -import java.util.*; - -import static org.junit.Assert.assertEquals; - - -@RunWith(Parameterized.class) -public class UPDATEWorkerTest { - - private static final int FAST_SERVER_PORT = 8025; - private final String service; - private static WorkerServerMock fastServerContainer; - private static ContainerServer fastServer; - private static SocketConnection fastConnection; - private final String timerStrategy; - private String queriesFile; - private String outputDir; - private int expectedExec; - - @Parameterized.Parameters - public static Collection data(){ - Collection testData = new ArrayList(); - testData.add(new Object[]{"none", "src/test/resources/workers/updates", 4}); - testData.add(new Object[]{"fixed", "src/test/resources/workers/updates", 4}); - testData.add(new Object[]{"distributed", "src/test/resources/workers/updates", 4}); - testData.add(new Object[]{"none", "src/test/resources/workers/updates.txt", 3}); - testData.add(new Object[]{"fixed", "src/test/resources/workers/updates.txt", 3}); - testData.add(new Object[]{"distributed", "src/test/resources/workers/updates.txt", 3}); - return testData; - } - - @BeforeClass - public static void startServer() throws IOException { - fastServerContainer = new WorkerServerMock(true); - fastServer = new ContainerServer(fastServerContainer); - fastConnection = new SocketConnection(fastServer); - SocketAddress address1 = new InetSocketAddress(FAST_SERVER_PORT); - fastConnection.connect(address1); - - } - - @AfterClass - public static void stopServer() throws IOException { - fastConnection.close(); - fastServer.stop(); - } - - public UPDATEWorkerTest(String timerStrategy, String queriesFile, int expectedExec){ - this.service="http://localhost:8025/test"; - this.timerStrategy=timerStrategy; - this.queriesFile=queriesFile; - this.expectedExec=expectedExec; - //warmup - SPARQLWorker worker = new SPARQLWorker("", getConnection(), this.queriesFile, null, null, null, null, null, null, 1); - worker.executeQuery("INSERT DATA {", "1"); - fastServerContainer.getTimes().clear(); - fastServerContainer.getEncodedAuth().clear(); - } - - @Before - public void createDir(){ - this.outputDir= UUID.randomUUID().toString(); - } - - @After - public void cleanup() throws IOException { - FileUtils.deleteDirectory(new File(outputDir)); - fastServerContainer.getTimes().clear(); - fastServerContainer.getEncodedAuth().clear(); - } - - // creds correct - // stop sending after iteration - // correct timer strategy - // correct waiting in sum - @Test - public void testWorkflow() throws InterruptedException { - String taskID="124/1/1"; - Integer timeLimit=2000; - Connection con = getConnection(); - UPDATEWorker worker = new UPDATEWorker(taskID, con, this.queriesFile, this.timerStrategy, null, timeLimit, null, null, 1); - InstancesQueryHandler qh = new InstancesQueryHandler(Lists.newArrayList(worker)); - qh.setOutputFolder(this.outputDir); - qh.generate(); - worker.run(); - Instant now = worker.startTime; - - Thread.sleep(2000); - assertEquals(this.expectedExec, worker.getExecutedQueries()); - - Set creds = fastServerContainer.getEncodedAuth(); - assertEquals(1, creds.size()); - assertEquals(con.getUser()+":"+con.getPassword(), creds.iterator().next()); - List requestTimes = fastServerContainer.getTimes(); - long noOfQueries = worker.getNoOfQueries(); - Double fixedValue = timeLimit/noOfQueries*1.0; - Instant pastInstant = requestTimes.get(0); - - long remainingQueries = noOfQueries-1; - long remainingTime=timeLimit-Double.valueOf(TimeUtils.durationInMilliseconds(now, pastInstant)).longValue(); - for(int i=1;i requestTimes = new ArrayList(); - private Set encodedAuth = new HashSet(); - - public WorkerServerMock() { - this(false); - } - - public WorkerServerMock(Boolean ignore){ - super(); - this.ignore =ignore; - } - - @Override - public void handle(Request request, Response resp) { - String content=null; - requestTimes.add(Instant.now()); - if(ignore){ - String authValue = request.getValue("Authorization").replace("Basic ", ""); - this.encodedAuth.add(new String(Base64.getDecoder().decode(authValue))); - waitForMS(95); - try { - content = request.getContent(); - }catch (IOException e){ - LOGGER.error("", e); - } - } - else if(request.getMethod().equals("GET")) { - waitForMS(95); - content=request.getParameter("text"); - } - else if(request.getMethod().equals("POST")){ - waitForMS(195); - try { - String postContent = request.getContent(); - if(postContent.startsWith("{ \"text\":")){ - content=postContent; - } - } catch (IOException e) { - LOGGER.error("", e); - } - } - - if(content!=null){ - handleOK(resp, request.getValue("accept")); - } - else{ - handleFail(resp, request.getValue("accept")); - } - - } - - private void waitForMS(long ms){ - try { - Thread.sleep(ms); - } catch (InterruptedException e) { - e.printStackTrace(); - } - } - - public void handleFail(Response resp, String acceptType){ - resp.setCode(Status.BAD_REQUEST.code); - String cType = acceptType; - if(acceptType==null){ - cType = SPARQLLanguageProcessor.QUERY_RESULT_TYPE_JSON; - } - resp.setContentType(cType); - try { - //write answer - resp.getOutputStream().write("".getBytes()); - resp.getOutputStream().close(); - } catch (IOException e) { - LOGGER.error("Could not close Response Output Stream"); - } - } - - public void handleUnAuthorized(Response resp){ - resp.setCode(Status.UNAUTHORIZED.code); - try { - //write answer - resp.getOutputStream().write("".getBytes()); - resp.getOutputStream().close(); - } catch (IOException e) { - LOGGER.error("Could not close Response Output Stream"); - } - } - - public void handleOK(Response resp, String acceptType){ - resp.setCode(Status.OK.code); - String cType = acceptType; - if(acceptType==null){ - cType = SPARQLLanguageProcessor.QUERY_RESULT_TYPE_JSON; - } - resp.setContentType(cType); - - try { - //write answer - String resultStr=""; - if(cType.equals("text/plain")){ - resultStr="a\nb\nc\nd"; - } - else if(cType.equals(SPARQLLanguageProcessor.QUERY_RESULT_TYPE_JSON)) { - resultStr = FileUtils.readFileToString(new File("src/test/resources/sparql-json-response.json"), "UTF-8"); - } - resp.getOutputStream().write(resultStr.getBytes()); - resp.getOutputStream().close(); - } catch (IOException e) { - LOGGER.error("Could not close Response Output Stream"); - } - } - - public List getTimes(){ - return this.requestTimes; - } - - public Set getEncodedAuth() { - return encodedAuth; - } -} diff --git a/iguana.corecontroller/src/test/java/org/aksw/iguana/cc/worker/impl/CLIWorkersTests.java b/iguana.corecontroller/src/test/java/org/aksw/iguana/cc/worker/impl/CLIWorkersTests.java deleted file mode 100644 index 81c02433d..000000000 --- a/iguana.corecontroller/src/test/java/org/aksw/iguana/cc/worker/impl/CLIWorkersTests.java +++ /dev/null @@ -1,166 +0,0 @@ -package org.aksw.iguana.cc.worker.impl; - -import org.aksw.iguana.cc.config.elements.Connection; -import org.aksw.iguana.cc.utils.FileUtils; -import org.aksw.iguana.commons.constants.COMMON; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; - -import java.io.File; -import java.io.IOException; -import java.util.Collection; -import java.util.Properties; -import java.util.UUID; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - -public class CLIWorkersTests { - - private File f; - - @Before - public void createFile(){ - String file = UUID.randomUUID().toString(); - this.f = new File(file); - ; - } - - @After - public void deleteFile(){ - f.delete(); - } - - @Test - public void checkMultipleProcesses(){ - Connection con = new Connection(); - con.setEndpoint("src/test/resources/cli/echoinput.sh "+f.getAbsolutePath()); - MultipleCLIInputWorker worker = new MultipleCLIInputWorker("123/1/1", con, "src/test/resources/update/empty.nt", "init finished", "rows", "query fail", 2, null, null, null,null, 1); - assertEquals(2, worker.processList.size()); - for(Process p : worker.processList){ - assertTrue(p.isAlive()); - } - //should run normally - assertEquals(0, worker.currentProcessId); - worker.executeQuery("test", "1"); - assertEquals(0, worker.currentProcessId); - worker.executeQuery("quit", "2"); - worker.executeQuery("test", "1"); - assertEquals(1, worker.currentProcessId); - assertEquals(2, worker.processList.size()); - - for(Process p : worker.processList){ - assertTrue(p.isAlive()); - } - worker.executeQuery("quit", "2"); - worker.executeQuery("test", "1"); - assertEquals(0, worker.currentProcessId); - } - - @Test - public void checkFileInput() throws IOException { - //check if file is created and used - Connection con = new Connection(); - String dir = UUID.randomUUID().toString(); - con.setEndpoint("src/test/resources/cli/echoinput.sh "+f.getAbsolutePath()); - CLIInputFileWorker worker = new CLIInputFileWorker("123/1/1", con, "src/test/resources/update/empty.nt", "init finished", "rows", "query fail", 1, dir, null, null, null, null, 1); - worker.executeQuery("test", "1"); - assertEquals("test", FileUtils.readFile(dir+File.separator+"tmpquery.sparql")); - worker.executeQuery("SELECT whatever", "1"); - assertEquals("SELECT whatever", FileUtils.readFile(dir+File.separator+"tmpquery.sparql")); - assertEquals("tmpquery.sparql\ntmpquery.sparql\n", FileUtils.readFile(f.getAbsolutePath())); - - org.apache.commons.io.FileUtils.deleteDirectory(new File(dir)); - worker.stopSending(); - - } - - @Test - public void checkInput() throws IOException { - // check if connection stays - Connection con = new Connection(); - - con.setEndpoint("src/test/resources/cli/echoinput.sh "+f.getAbsolutePath()); - CLIInputWorker worker = new CLIInputWorker("123/1/1", con, "src/test/resources/update/empty.nt", "init finished", "rows", "query fail", null, null, null, null, 1); - worker.executeQuery("test", "1"); - worker.executeQuery("SELECT whatever", "1"); - assertEquals("test\nSELECT whatever\n", FileUtils.readFile(f.getAbsolutePath())); - Collection succeededResults = worker.popQueryResults(); - assertEquals(2, succeededResults.size()); - Properties succ = succeededResults.iterator().next(); - assertEquals(COMMON.QUERY_SUCCESS, succ.get(COMMON.RECEIVE_DATA_SUCCESS)); - assertEquals(3l, succ.get(COMMON.RECEIVE_DATA_SIZE)); - succ = succeededResults.iterator().next(); - assertEquals(COMMON.QUERY_SUCCESS, succ.get(COMMON.RECEIVE_DATA_SUCCESS)); - assertEquals(3l, succ.get(COMMON.RECEIVE_DATA_SIZE)); - - // check fail - worker.executeQuery("fail", "2"); - assertEquals("test\nSELECT whatever\nfail\n", FileUtils.readFile(f.getAbsolutePath())); - Collection failedResults = worker.popQueryResults(); - assertEquals(1, failedResults.size()); - Properties fail = failedResults.iterator().next(); - assertEquals(COMMON.QUERY_UNKNOWN_EXCEPTION, fail.get(COMMON.RECEIVE_DATA_SUCCESS)); - assertEquals(0l, fail.get(COMMON.RECEIVE_DATA_SIZE)); - worker.stopSending(); - - - } - - @Test - public void checkPrefix() throws IOException { - // check if connection stays - Connection con = new Connection(); - - con.setEndpoint("src/test/resources/cli/echoinput.sh "+f.getAbsolutePath()); - CLIInputPrefixWorker worker = new CLIInputPrefixWorker("123/1/1", con, "src/test/resources/update/empty.nt", "init finished", "rows", "query fail", 1, "prefix", "suffix", null, null, null, null, 1); - worker.executeQuery("test", "1"); - worker.executeQuery("SELECT whatever", "1"); - assertEquals("prefix test suffix\nprefix SELECT whatever suffix\n", FileUtils.readFile(f.getAbsolutePath())); - Collection succeededResults = worker.popQueryResults(); - assertEquals(2, succeededResults.size()); - Properties succ = succeededResults.iterator().next(); - assertEquals(COMMON.QUERY_SUCCESS, succ.get(COMMON.RECEIVE_DATA_SUCCESS)); - assertEquals(3l, succ.get(COMMON.RECEIVE_DATA_SIZE)); - succ = succeededResults.iterator().next(); - assertEquals(COMMON.QUERY_SUCCESS, succ.get(COMMON.RECEIVE_DATA_SUCCESS)); - assertEquals(3l, succ.get(COMMON.RECEIVE_DATA_SIZE)); - - // check fail - worker.executeQuery("fail", "2"); - assertEquals("prefix test suffix\nprefix SELECT whatever suffix\nprefix fail suffix\n", FileUtils.readFile(f.getAbsolutePath())); - Collection failedResults = worker.popQueryResults(); - assertEquals(1, failedResults.size()); - Properties fail = failedResults.iterator().next(); - assertEquals(COMMON.QUERY_UNKNOWN_EXCEPTION, fail.get(COMMON.RECEIVE_DATA_SUCCESS)); - assertEquals(0l, fail.get(COMMON.RECEIVE_DATA_SIZE)); - worker.stopSending(); - } - - @Test - public void checkCLI() throws IOException { - //check if simple cli works - // public CLIWorker(String taskID, Connection connection, String queriesFile, @Nullable Integer timeOut, @Nullable Integer timeLimit, @Nullable Integer fixedLatency, @Nullable Integer gaussianLatency, Integer workerID) { - Connection con = new Connection(); - con.setUser("user1"); - con.setPassword("pwd"); - - con.setEndpoint("/bin/echo \"$QUERY$ $USER$:$PASSWORD$ $ENCODEDQUERY$\" > "+f.getAbsolutePath()); - CLIWorker worker = new CLIWorker("123/1/1", con, "src/test/resources/update/empty.nt", null, null, null, null, 1); - worker.executeQuery("test ()", "1"); - String content = FileUtils.readFile(f.getAbsolutePath()); - assertEquals("test () user1:pwd test+%28%29\n", content); - - con = new Connection(); - con.setEndpoint("/bin/echo \"$QUERY$ $USER$:$PASSWORD$ $ENCODEDQUERY$\" > "+f.getAbsolutePath()+" | /bin/printf \"HeaderDoesNotCount\na\na\""); - worker = new CLIWorker("123/1/1", con, "src/test/resources/update/empty.nt", null, null, null, null, 1); - worker.executeQuery("test ()", "1"); - content = FileUtils.readFile(f.getAbsolutePath()); - assertEquals("test () : test+%28%29\n", content); - Collection results = worker.popQueryResults(); - assertEquals(1, results.size()); - Properties p = results.iterator().next(); - assertEquals(2l, p.get(COMMON.RECEIVE_DATA_SIZE)); - } -} diff --git a/iguana.corecontroller/src/test/resources/querystats.nt b/iguana.corecontroller/src/test/resources/querystats.nt deleted file mode 100644 index b0ee226e5..000000000 --- a/iguana.corecontroller/src/test/resources/querystats.nt +++ /dev/null @@ -1,13 +0,0 @@ - "false"^^. - "true"^^ . - "true"^^ . - "false"^^. - "2"^^. - "false"^^. - "false"^^. - "false"^^. - "false"^^. - . - "abc". - "SELECT *\nWHERE\n { ?s ?p ?o .\n ?o ?q ?t\n FILTER ( ?t = \"abc\" )\n }\nGROUP BY ?s\n" . - . \ No newline at end of file diff --git a/iguana.resultprocessor/README b/iguana.resultprocessor/README deleted file mode 100644 index 476f91357..000000000 --- a/iguana.resultprocessor/README +++ /dev/null @@ -1,85 +0,0 @@ -INTRODUCTION ------------- - -The Result Processing module of Iguana will be used to save and calculate Metrics into several Storage Solutions. - -The provided Metrics are - -* Queries Per Second (QPS) -* Query Mixes Per Hour (QMPH) -* Number of Queries Per Hour (NoQPH) -* Each Query Execution Time (EQE) - -The provided Storage Solutions are - -* Triple Store -* NTriple File -* File/Directory Structure - -For a full description, please visit the wiki - -CONFIGURATION -------------- - - -to define which metrics should be used simply add the following line -to your properites - - iguana.rp.metrics=metric1, metric2, ... - -To define metric1, metric2 and so on simply add the following line - - metric1.class=org.aksw.iguana.rp.metrics.impl.QMPHMetric - - -The following classes refer to the following Metrics - -* QPS: org.aksw.iguana.rp.metrics.impl.QPSMetric - -* QMPH: org.aksw.iguana.rp.metrics.impl.QMPHMetric - -* NoQPH: org.aksw.iguana.rp.metrics.impl.NoQPHMetric - -* EQE: org.aksw.iguana.rp.metrics.impl.EachQueryMetric - - -To define the storages which should be used, add the following line -to your properties - - iguana.rp.storages=storage1, storage2,... - -To define the Storage please add the following - - storage1.class=org.aksw.iguana.rp.storage.impl.TriplestoreStorage - storage1.constructorArgs=http://localhost:9999/blazegraph/sparql, http://localhost:9999/blazegraph/sparql - - -The following Classes refer to the following Storages - -* TriplestoreStorage: org.aksw.iguana.rp.storage.impl.TriplestoreStorage - (You have to at least specify the endpoint and updateEndpoint of the triple store in the constructor arguments) - -* FileStorage: org.aksw.iguana.rp.storage.impl.FileStorage - (optional: you can define the root directory of the stored CSV files) - -* NTFileStorage: org.aksw.iguana.rp.storage.impl.NTFileStorage - (optional: you can specify the Ntriple file name) - -For Further Information to the constructor Arguments, -visit the JavaDoc: http://iguana-benchmark.eu/javadoc/index.html - - - -All three (iguana.rp.consumer, iguana.rp.metrics, iguana.rp.storages) -have to be stated in the properties file somehow. - - - -LINKS ------ - -* Project Site: http://iguana-benchmark.eu - -* Github Site: http://github.com/AKSW/IGUANA - -* Bug Tracker: http://github.com/AKSW/IGUANA/issues diff --git a/iguana.resultprocessor/pom.xml b/iguana.resultprocessor/pom.xml deleted file mode 100644 index c66b91ae3..000000000 --- a/iguana.resultprocessor/pom.xml +++ /dev/null @@ -1,188 +0,0 @@ - - 4.0.0 - - org.aksw - iguana-parent - ${revision} - - iguana.resultprocessor - - Iguana ResultProcessor - Processing, aggregating and store results from Iguanas core. - - - AGPLv3 or later - https://www.gnu.org/licenses/agpl-3.0.html - - - - - Lixi Conrads - lixiconrads@gmail.com - - Former Developer - - Dice Research Group - https://dice-research.org - - - - Dice Research Group - https://dice-research.org - - - GitHub Issue Management - https://github.com/dice-group/iguana/issues - - https://dice-research.org/IGUANA - - - 11 - 4.2.0 - UTF-8 - 11 - 11 - - - - - org.apache.jena - jena-iri - ${jena.version} - - - org.apache.jena - jena-arq - ${jena.version} - - - org.apache.jena - jena-core - ${jena.version} - - - - junit - junit - 4.13.1 - test - - - org.aksw - iguana.commons - ${revision} - - - - - - - org.jacoco - jacoco-maven-plugin - 0.8.6 - - - prepare-agent - - prepare-agent - - - - report - prepare-package - - report - - - - post-unit-test - test - - report - - - - - target/jacoco.exec - - target/jacoco-ut - - - - - - org.apache.maven.plugins - maven-compiler-plugin - 3.8.1 - - 11 - 11 - UTF-8 - - -parameters - - - - - org.codehaus.mojo - exec-maven-plugin - - - maven-dependency-plugin - - - install - - copy-dependencies - - - ${project.build.directory}/lib - - - - - - - - - org.codehaus.mojo - exec-maven-plugin - 1.5.0 - - java - org.aksw.iguana.rp.controller.MainController - - - - - - - - - - - - Apache Repo Central - Apache Repository - https://repo.maven.apache.org/maven2 - - - maven.aksw.internal - University Leipzig, AKSW Maven2 Repository - https://maven.aksw.org/archiva/repository/internal - - - maven.aksw.snapshots - University Leipzig, AKSW Maven2 Repository - https://maven.aksw.org/archiva/repository/snapshots - - - - - github - GitHub dice-group Apache Maven Packages - https://maven.pkg.github.com/dice-group/IGUANA - - - diff --git a/iguana.resultprocessor/src/main/java/org/aksw/iguana/rp/controller/RPController.java b/iguana.resultprocessor/src/main/java/org/aksw/iguana/rp/controller/RPController.java deleted file mode 100644 index a589c6ee2..000000000 --- a/iguana.resultprocessor/src/main/java/org/aksw/iguana/rp/controller/RPController.java +++ /dev/null @@ -1,51 +0,0 @@ -/** - * - */ -package org.aksw.iguana.rp.controller; - - -import org.aksw.iguana.rp.experiment.ExperimentManager; -import org.aksw.iguana.rp.metrics.Metric; -import org.aksw.iguana.rp.metrics.MetricManager; -import org.aksw.iguana.rp.storage.Storage; -import org.aksw.iguana.rp.storage.StorageManager; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.List; - -/** - * This is the Main Controller. - * It will start the ResultProcessor, initialize the {@link org.aksw.iguana.rp.storage.StorageManager} and the {@link org.aksw.iguana.rp.metrics.MetricManager} - * - * @author f.conrads - * - */ -public class RPController { - - - private static final Logger LOGGER = LoggerFactory - .getLogger(RPController.class); - private StorageManager storageManager; - - - /** - * This will initialize the MainController. - */ - public void init(List storages, List metrics){ - //add storages to StoragesManager - storageManager = StorageManager.getInstance(); - storageManager.addStorages(storages); - LOGGER.info("Storages : {{}}", storageManager); - //Add default metrics to MetricsManager - MetricManager globalMetricsManager = MetricManager.getInstance(); - globalMetricsManager.addMetrics(metrics); - LOGGER.info("GlobalMetrics : {{}}", globalMetricsManager); - ExperimentManager emanager = new ExperimentManager(globalMetricsManager, storageManager); - - } - - public void close() { - storageManager.close(); - } -} diff --git a/iguana.resultprocessor/src/main/java/org/aksw/iguana/rp/experiment/ExperimentManager.java b/iguana.resultprocessor/src/main/java/org/aksw/iguana/rp/experiment/ExperimentManager.java deleted file mode 100644 index 49228c28d..000000000 --- a/iguana.resultprocessor/src/main/java/org/aksw/iguana/rp/experiment/ExperimentManager.java +++ /dev/null @@ -1,124 +0,0 @@ -package org.aksw.iguana.rp.experiment; - -import org.aksw.iguana.commons.constants.COMMON; -import org.aksw.iguana.rp.metrics.MetricManager; -import org.aksw.iguana.rp.storage.StorageManager; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.HashMap; -import java.util.Map; -import java.util.Properties; - -/** - * The ExperimentManager manages the incoming properties from the - * tasks and sort them to the correct experiments - * One Experiment is simply a {@link org.aksw.iguana.rp.metrics.MetricManager} - * - * @author f.conrads - */ -public class ExperimentManager { - - private static final Logger LOGGER = LoggerFactory - .getLogger(ExperimentManager.class); - - private Map experiments = new HashMap(); - private MetricManager globalMetricManager; - - private StorageManager storageManager; - - private static ExperimentManager instance; - - public synchronized static ExperimentManager getInstance(){ - if (instance == null) { - instance = new ExperimentManager(MetricManager.getInstance(), StorageManager.getInstance()); - } - return instance; - } - - - /** - * Initialize the ExperimentManager with the global {@link org.aksw.iguana.rp.metrics.MetricManager} - * @param globalMetricManager - */ - public ExperimentManager(MetricManager globalMetricManager, StorageManager storageManager){ - this.globalMetricManager = globalMetricManager; - this.storageManager = storageManager; - } - - /** - * - * @param p - */ - public void receiveData(Properties p){ - //check if start, content, end - if(p.containsKey(COMMON.RECEIVE_DATA_START_KEY)){ - startExperimentTask(p); - } - else if(p.containsKey(COMMON.RECEIVE_DATA_END_KEY)){ - endExperimentTask(p); - } - else{ - content(p); - } - } - - /** - * This will start an experiment. This will initialize the following things - * Queries, Metrics, Resultsizes, Workers, Tasks, Suite(?), metricsManager - * - * @param p - */ - private void startExperimentTask(Properties p){ - //Check if properties contains an experiment ID, if not do nothing. - if(!p.containsKey(COMMON.EXPERIMENT_TASK_ID_KEY)){ - LOGGER.error("Could not find experiment task ID in properties."); - LOGGER.error("Will ignore this properties object {}", p.toString()); - return; - } - //Get the Experiment task ID - String taskID = p.getProperty(COMMON.EXPERIMENT_TASK_ID_KEY); - LOGGER.info("Got start flag for experiment task ID {}", taskID); - - - //Add metricManager to experiments - experiments.put(taskID, globalMetricManager); - - globalMetricManager.addMetaData(p); - //check all the properties. (Queries, Results, Workers) and add them to the Storages - storageManager.addMetaData(p); - LOGGER.info("Will start experiment task with ID {} now.", taskID); - } - - /** - * Will sort the properties to the correct experiment according to their IDs - * It will simply add the properties to the {@link org.aksw.iguana.rp.metrics.MetricManager} - * @param p - */ - private void content(Properties p){ - String taskID = p.getProperty(COMMON.EXPERIMENT_TASK_ID_KEY); - LOGGER.debug("Got content for experiment task ID: {} ", taskID); - if(experiments.containsKey(taskID)) - experiments.get(taskID).receiveData(p); - else - LOGGER.warn("Got content for experiment task ID: {} but task never start", taskID); - } - - /** - * This will end the experiment and start the close method of the associated metrics - * @param p - */ - private void endExperimentTask(Properties p){ - String taskID = p.getProperty(COMMON.EXPERIMENT_TASK_ID_KEY); - storageManager.endTask(taskID); - storageManager.commit(); - LOGGER.info("Got end Flag for experiment task ID {}", taskID); - if(experiments.containsKey(taskID)){ - experiments.get(taskID).close(); - experiments.remove(taskID); - } - else{ - LOGGER.warn("Could not find Experiment Task with ID: {}.", taskID); - } - } -} diff --git a/iguana.resultprocessor/src/main/java/org/aksw/iguana/rp/metrics/AbstractMetric.java b/iguana.resultprocessor/src/main/java/org/aksw/iguana/rp/metrics/AbstractMetric.java deleted file mode 100644 index 61e0e2b26..000000000 --- a/iguana.resultprocessor/src/main/java/org/aksw/iguana/rp/metrics/AbstractMetric.java +++ /dev/null @@ -1,252 +0,0 @@ -package org.aksw.iguana.rp.metrics; - -import org.aksw.iguana.commons.annotation.Shorthand; -import org.aksw.iguana.commons.constants.COMMON; -import org.aksw.iguana.rp.storage.StorageManager; -import org.aksw.iguana.rp.vocab.Vocab; -import org.apache.jena.rdf.model.*; -import org.apache.jena.vocabulary.RDF; -import org.apache.jena.vocabulary.RDFS; - -import java.util.HashMap; -import java.util.Map; -import java.util.Properties; - -/** - * Abstract Metric class which implements the method sendData - * so the final Metric class can send their final data via this command to the storages - * - * @author f.conrads - * - */ -public abstract class AbstractMetric implements Metric{ - - protected StorageManager storageManager = StorageManager.getInstance(); - - protected Properties metaData = new Properties(); - - protected Map dataContainer = new HashMap(); - - protected String name; - protected String shortName; - protected String description; - - /** - * This constructor will not set name, Short name and description - * Thus the final Metric class has to set them itself. - */ - public AbstractMetric(){ - } - - /** - * Will create an Metric class with the name, short name and description - * - * @param name - * @param shortName - * @param description - */ - public AbstractMetric(String name, String shortName, String description){ - this.name=name; - this.shortName=shortName; - this.description=description; - } - - @Override - public void setStorageManager(StorageManager smanager){ - this.storageManager = smanager; - } - - @Override - public StorageManager getStorageManager(){ - return this.storageManager; - } - - @Override - public String getDescription(){ - return this.description; - } - - @Override - public String getName(){ - return this.name; - } - - @Override - public String getShortName(){ - return this.shortName; - } - - - /** - * Will add the Meta Data to the Metric - */ - @Override - public void setMetaData(Properties metaData){ - this.metaData = metaData; - } - - - /** - * Will return the Properties Object with the associated key: EXTRA_META_KEY
- * if this key does not exists: recv will be returned - * - * @param recv - * @return - */ - protected Properties getExtraMeta(Properties recv){ - if(recv.containsKey(COMMON.EXTRA_META_KEY)) - return (Properties) recv.get(COMMON.EXTRA_META_KEY); - return recv; - } - - - - /** - * Will create a subject node string from the recv object (ExperimentTaskID and extraMeta Hash) - * @param recv - * @return - */ - protected String getSubjectFromExtraMeta(Properties recv){ - String subject = metaData.getProperty(COMMON.EXPERIMENT_TASK_ID_KEY); - Properties extraMeta = getExtraMeta(recv); - if (!extraMeta.isEmpty()) { - subject += "/" + recv.get(COMMON.WORKER_ID); - } - return subject; - } - - /** - * Will add the data to a in memory container which can be assessed by extra - * - * @param extra - * @param data - */ - protected void addDataToContainer(Properties extra, Properties data){ - this.dataContainer.put(extra, data); - } - - /** - * Getting the data Properties from the data container associated to extra - * - * @param extra - * @return - */ - protected Properties getDataFromContainer(Properties extra){ - return this.dataContainer.get(extra); - } - - /** - * Assuming that the results are Integer objects, this will - * 1. if no data for extra exists, create the data from the results object - * 2. if the data exists, sum the corresponding - * - * for example: - * container has data object e1:(a:10, b:12) - * new results for e1 are (a:2, b:5) - * The new container data will be (a:12, b:17) - * - * @param extra - * @param results - */ - protected void processData(Properties extra, Properties results){ - Properties tmp = getDataFromContainer(extra); - if(tmp!=null){ - for(Object obj : results.keySet()){ - if(tmp.get(obj.toString()) instanceof Long) { - Long res = (long) tmp.get(obj.toString()); - tmp.put(obj.toString(),res+(long)results.get(obj)); - } - else if(tmp.get(obj.toString()) instanceof Integer) { - int res = (int) tmp.get(obj.toString()); - tmp.put(obj.toString(),res+(int)results.get(obj)); - } - else if(tmp.get(obj.toString()) instanceof Double) { - double res = (double) tmp.get(obj.toString()); - tmp.put(obj.toString(),res+(double)results.get(obj)); - } - } - } - else{ - tmp = new Properties(); - for(Object obj : results.keySet()){ - if(results.get(obj) instanceof Long) - tmp.put(obj.toString(),(long)results.get(obj)); - if(results.get(obj) instanceof Double) - tmp.put(obj.toString(),(double)results.get(obj)); - if(results.get(obj) instanceof Integer) - tmp.put(obj.toString(),(int)results.get(obj)); - } - } - addDataToContainer(extra, tmp); - } - - - /** - * Creates a Statement connecting a the subject to the Task Resource using the iprop:workerResult property as follows - * ires:Task1 iprop:workerResult subject - * @param subject - * @return - */ - protected Statement getConnectingStatement(Resource subject) { - return ResourceFactory.createStatement(getTaskResource(), Vocab.workerResult, subject); - } - - public Resource getTaskResource(){ - String subject = metaData.getProperty(COMMON.EXPERIMENT_TASK_ID_KEY); - return ResourceFactory.createResource(COMMON.RES_BASE_URI+subject); - } - - public Resource getSubject(Properties recv){ - String id = this.getSubjectFromExtraMeta(recv); - return ResourceFactory.createResource(COMMON.RES_BASE_URI+id); - } - - public Property getMetricProperty(){ - return ResourceFactory.createProperty(COMMON.PROP_BASE_URI+shortName); - } - - public void sendData(Model m){ - this.storageManager.addData(m); - } - - @Override - public void close() { - //Add metric description and worker class - Model m = ModelFactory.createDefaultModel(); - String label = this.getClass().getCanonicalName(); - if(this.getClass().isAnnotationPresent(Shorthand.class)){ - label = getClass().getAnnotation(Shorthand.class).value(); - } - Literal labelRes = ResourceFactory.createPlainLiteral(label); - Literal commentRes = ResourceFactory.createPlainLiteral(this.description); - Resource classRes = ResourceFactory.createResource(COMMON.CLASS_BASE_URI+"metric/"+label); - Resource metricRes = ResourceFactory.createResource(COMMON.RES_BASE_URI+this.getShortName()); - //Resource metricClass = ResourceFactory.createResource(COMMON.CLASS_BASE_URI+this.getShortName()); - - m.add(metricRes, RDFS.label, this.getName()); - m.add(metricRes, RDFS.comment, commentRes); - //adding type iguana:metric - m.add(metricRes, RDF.type, Vocab.metricClass); - //adding type iguana:metric/SPECIFIC_METRIC_CLASS - m.add(metricRes, RDF.type, classRes); - m.add(metricRes, RDFS.label, labelRes); - - for(Properties key : dataContainer.keySet()) { - - Resource subject = ResourceFactory.createResource(COMMON.RES_BASE_URI+getSubjectFromExtraMeta(key)); - m.add(subject, - RDF.type, - Vocab.workerClass); - for(Object k : key.keySet()) { - m.add(subject, ResourceFactory.createProperty(COMMON.PROP_BASE_URI+k), ResourceFactory.createTypedLiteral(key.get(k))); - } - m.add(subject, Vocab.worker2metric, metricRes); - } - m.add(getTaskResource(), Vocab.worker2metric, metricRes); - - this.storageManager.addData(m); - this.storageManager.commit(); - - this.dataContainer.clear(); - } -} diff --git a/iguana.resultprocessor/src/main/java/org/aksw/iguana/rp/metrics/Metric.java b/iguana.resultprocessor/src/main/java/org/aksw/iguana/rp/metrics/Metric.java deleted file mode 100644 index dcc5b0136..000000000 --- a/iguana.resultprocessor/src/main/java/org/aksw/iguana/rp/metrics/Metric.java +++ /dev/null @@ -1,89 +0,0 @@ -package org.aksw.iguana.rp.metrics; - -import org.aksw.iguana.rp.storage.StorageManager; - -import java.util.Properties; - -/** - * This is the Interface for all Metrics - * - * @author f.conrads - * - */ -public interface Metric { - - /** - * This method should implement what to do with one result.

- * - * For example: No Of Queries Per Hour will get the time query time - * add the time to a variable which keeps track of the total time of all executed queries - * and increase the number of executed queries if the query was successfully executed.

- * - * Be aware, that in this example, the Metric could be stopped as soon as one hour is reached, - * or it could be calculate in the close method.

- * - * Assuming, the totaltime is in minutes (it should be calculated in ms though) - * Latter one will result in the following formular:
- * m = 60 * queries / totaltime

- * - * The actual keys of the properties will depend on the core.
- * The stress test will send different keys than a completeness test.
- * Thus not all metrics are available for each test.
- * Hence it should be implemented if the Metric cannot calculate the test results - * that it will just close itself without adding results. - * - * - * @param p - */ - public void receiveData(Properties p); - - public void setStorageManager(StorageManager sManager); - - public StorageManager getStorageManager(); - /** - * This method will be called, as soon as the associated Experiment Task is finished. - * - * Not all metrics are available for each test. - * Hence it should be implemented if the Metric cannot calculate the test results - * that it will just close itself without adding results. - * The {@link org.aksw.iguana.rp.metrics.MetricManager} will try to close the Metric still, - * thus it should be checked if that was the case. - * - */ - public void close(); - - - /** - * This method should return a short description of what the Metric will calculate - * - * For example (No. of Queries Per Hour): "Will sum up all successful executed Queries in one hour." - * - * @return - */ - public String getDescription(); - - /** - * This method should return the Metric Name - * - * For example: "Query Mixes Per Hour" - * - * @return - */ - public String getName(); - - /** - * This method should return an abbreviated version of the Metric name. - * - * For example (Query Mixes Per Hour): "QMPH" - * @return - */ - public String getShortName(); - - /** - * This method will be called by the {@link org.aksw.iguana.rp.experiment.ExperimentManager} to - * provide meta data such as the number of query mixes. - * - * @param metaData - */ - public void setMetaData(Properties metaData); -} diff --git a/iguana.resultprocessor/src/main/java/org/aksw/iguana/rp/metrics/MetricManager.java b/iguana.resultprocessor/src/main/java/org/aksw/iguana/rp/metrics/MetricManager.java deleted file mode 100644 index 2a1aff10c..000000000 --- a/iguana.resultprocessor/src/main/java/org/aksw/iguana/rp/metrics/MetricManager.java +++ /dev/null @@ -1,110 +0,0 @@ -/** - * - */ -package org.aksw.iguana.rp.metrics; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.*; - -/** - * - * The MetricManager will manage all {@link org.aksw.iguana.rp.metrics.Metric} - * - * @author f.conrads - * - */ -public class MetricManager { - - private static final Logger LOGGER = LoggerFactory - .getLogger(MetricManager.class); - - private Set metrics = new HashSet(); - - private static MetricManager instance; - - public synchronized static MetricManager getInstance() { - if (instance == null) { - instance = new MetricManager(); - } - return instance; - } - - /** - * WIll add a metric to the manager - * @param metric - */ - public void addMetric(Metric metric){ - if(metric==null){ - return; - } - metrics.add(metric); - } - - public Set getMetrics(){ - return metrics; - } - - /** - * Will add the meta Data to all metrics - * @param metaData - */ - public void addMetaData(Properties metaData){ - for(Metric m : metrics){ - m.setMetaData(metaData); - } - } - /** - * This will message the received properties to all defined metrics. - * - * @param p - */ - public void receiveData(Properties p){ - Set remove = new HashSet(); - for(Metric m : metrics){ - try{ - m.receiveData(p); - }catch(Exception e){ - LOGGER.warn("Could not use metric {}, Cause: {}",m.getShortName(),e); - remove.add(m); - } - } - metrics.removeAll(remove); - } - - @Override - public String toString(){ - StringBuilder ret =new StringBuilder(); - - Iterator it = metrics.iterator(); - for(int i=0;i remove = new HashSet(); - for(Metric m : metrics){ - try{ - m.close(); - m.getStorageManager().commit(); - - }catch(Exception e){ - LOGGER.error("Could not use metric "+m.getShortName()+". Cause: {}",e); - - } - } - metrics.removeAll(remove); - } - - public void addMetrics(List metrics) { - this.metrics.addAll(metrics); - } -} diff --git a/iguana.resultprocessor/src/main/java/org/aksw/iguana/rp/metrics/impl/AvgQPSMetric.java b/iguana.resultprocessor/src/main/java/org/aksw/iguana/rp/metrics/impl/AvgQPSMetric.java deleted file mode 100644 index 3c468a9e4..000000000 --- a/iguana.resultprocessor/src/main/java/org/aksw/iguana/rp/metrics/impl/AvgQPSMetric.java +++ /dev/null @@ -1,87 +0,0 @@ -package org.aksw.iguana.rp.metrics.impl; - -import org.aksw.iguana.commons.annotation.Shorthand; -import org.aksw.iguana.commons.constants.COMMON; -import org.apache.jena.rdf.model.*; - -import java.util.HashMap; -import java.util.Map; -import java.util.Properties; - -/** - * Calculates the average queries per second - */ -@Shorthand("AvgQPS") -public class AvgQPSMetric extends QPSMetric { - public AvgQPSMetric() { - super( - "Average Queries Per Second", - "AvgQPS", - "Will calculate the overall average queries Per second. Further on it will save the totaltime of each query, the failure and the success"); - } - - public AvgQPSMetric(Integer penalty) { - super( - "Average Queries Per Second", - "AvgQPS", - "Will calculate the overall average queries Per second. Further on it will save the totaltime of each query, the failure and the success"); - this.penalty=penalty; - } - - - - @Override - public void close() { - super.close(); - } - - @Override - protected void qpsClose(){ - Model m = ModelFactory.createDefaultModel(); - Map map = new HashMap(); - Property property = getMetricProperty(); - Property penalziedProp = ResourceFactory.createProperty(COMMON.PROP_BASE_URI+"penalized"+shortName); - for(Properties key : dataContainer.keySet()){ - Properties value = dataContainer.get(key); - Double avgQps=0.0; - Double penalizedAvgQps=0.0; - for(Object queryID : value.keySet()){ - Object[] resArr = (Object[]) value.get(queryID); - Double qps = (long) resArr[1]/*success*/ / (double) resArr[0]/*time*/ / 1000.0/*ms to s*/; - Double penalizedQPS = ((long) resArr[1]/*success*/ + (long) resArr[2]/*failure*/) / (double) resArr[7]/*penalizedTime*/ / 1000.0/*ms to s*/; - map.putIfAbsent(queryID, new Number[]{Double.valueOf(0), Long.valueOf(0), Long.valueOf(0), Double.valueOf(0)}); - - Number[] current =map.get(queryID); - Long succ = (long)resArr[1]+(Long)current[1]; - Long fail = (long)resArr[2]+(Long)current[2]; - Double time = (double)resArr[0]+(Double)current[0]; - Double penTime = (double)resArr[7]+(Double)current[3]; - map.put(queryID, new Number[]{time, succ, fail, penTime}); - avgQps+=qps; - penalizedAvgQps+=penalizedQPS; - } - avgQps = avgQps/value.size(); - penalizedAvgQps = penalizedAvgQps/value.size(); - Resource subject = getSubject(key); - m.add(getConnectingStatement(subject)); - m.add(subject, property, ResourceFactory.createTypedLiteral(avgQps)); - m.add(subject, penalziedProp, ResourceFactory.createTypedLiteral(penalizedAvgQps)); - - } - Double avgQps=0.0; - Double penalizedAvgQps=0.0; - for(Object queryID : map.keySet()) { - Double qps = (Long)map.get(queryID)[1]*1.0/((Double)map.get(queryID)[0]/1000.0); - Double penalizedQPS = ((long)map.get(queryID)[1] + (long)map.get(queryID)[2]) *1.0/((double)map.get(queryID)[3]/1000.0); - avgQps+=qps; - penalizedAvgQps+=penalizedQPS; - } - avgQps = avgQps/map.size(); - penalizedAvgQps= penalizedAvgQps/map.size(); - m.add(getTaskResource(), property, ResourceFactory.createTypedLiteral(avgQps)); - m.add(getTaskResource(), penalziedProp, ResourceFactory.createTypedLiteral(penalizedAvgQps)); - this.sendData(m); - this.storageManager.commit(); - } - -} diff --git a/iguana.resultprocessor/src/main/java/org/aksw/iguana/rp/metrics/impl/EachQueryMetric.java b/iguana.resultprocessor/src/main/java/org/aksw/iguana/rp/metrics/impl/EachQueryMetric.java deleted file mode 100644 index 74cb5f231..000000000 --- a/iguana.resultprocessor/src/main/java/org/aksw/iguana/rp/metrics/impl/EachQueryMetric.java +++ /dev/null @@ -1,120 +0,0 @@ -/** - * - */ -package org.aksw.iguana.rp.metrics.impl; - -import org.aksw.iguana.commons.annotation.Shorthand; -import org.aksw.iguana.commons.constants.COMMON; -import org.aksw.iguana.rp.metrics.AbstractMetric; -import org.apache.jena.rdf.model.*; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.HashMap; -import java.util.Map; -import java.util.Properties; - -/** - * - * This metric will send every query execution time to the storages. Also it - * will provide if the query succeeded or failed. - * - * @author f.conrads - * - */ -@Shorthand("EachQuery") -public class EachQueryMetric extends AbstractMetric { - - private static Property queryProperty = ResourceFactory.createProperty(COMMON.PROP_BASE_URI+"query"); - private static Property execProperty = ResourceFactory.createProperty(COMMON.PROP_BASE_URI+"queryExecution"); - private static Property resultSize = ResourceFactory.createProperty(COMMON.PROP_BASE_URI+"resultSize"); - private static Property timeProperty = ResourceFactory.createProperty(COMMON.PROP_BASE_URI+"time"); - private static Property successProperty = ResourceFactory.createProperty(COMMON.PROP_BASE_URI+"success"); - private static Property runProperty = ResourceFactory.createProperty(COMMON.PROP_BASE_URI+"run"); - private static Property queryIDProperty = ResourceFactory.createProperty(COMMON.PROP_BASE_URI+"queryID"); - private static Property errorCodeProperty = ResourceFactory.createProperty(COMMON.PROP_BASE_URI+"code"); - - - private Map queryRunMap = new HashMap(); - - protected static Logger LOGGER = LoggerFactory - .getLogger(EachQueryMetric.class); - - /** - * - */ - public EachQueryMetric() { - super("Each Query Execution", "EachQuery", - "Will save every query execution time."); - } - - /* - * (non-Javadoc) - * - * @see org.aksw.iguana.rp.metrics.Metric#receiveData(java.util.Properties) - */ - @Override - public void receiveData(Properties p) { - // set Subject Node, hash out of task ID and if not empty the extra - // properties - Model m = ModelFactory.createDefaultModel(); - - String worker = getSubjectFromExtraMeta((Properties) p.get(COMMON.EXTRA_META_KEY)); - - - LOGGER.debug(this.getShortName() + " has received " + p); - - double time = (double) p.get(COMMON.RECEIVE_DATA_TIME); - Boolean success = (Boolean) (((long) p.get(COMMON.RECEIVE_DATA_SUCCESS))>0?true:false); - String queryID = p.getProperty(COMMON.QUERY_ID_KEY); - long err = (long) p.get(COMMON.RECEIVE_DATA_SUCCESS); - String subject = worker+"/"+queryID; - - long run=1; - if(queryRunMap.containsKey(subject)){ - run = queryRunMap.get(subject)+1; - } - //set subject2 node subject/noOfRun - String subject2 = subject+"/"+run; - - //as triples - Resource workerRes = ResourceFactory.createResource(COMMON.RES_BASE_URI+worker); - - Resource queryRes = ResourceFactory.createResource(COMMON.RES_BASE_URI+subject); - - Resource subRes = ResourceFactory.createResource(COMMON.RES_BASE_URI+subject2); - m.add(getConnectingStatement(workerRes)); - m.add(workerRes, queryProperty , queryRes); - m.add(queryRes, execProperty , subRes); - m.add(subRes, timeProperty, ResourceFactory.createTypedLiteral(time)); - m.add(subRes, successProperty, ResourceFactory.createTypedLiteral(success)); - if(p.containsKey(COMMON.QUERY_HASH)) { - int queryHash = Integer.parseInt(p.get(COMMON.QUERY_HASH).toString()); - m.add(subRes, queryIDProperty, ResourceFactory.createResource(COMMON.RES_BASE_URI+queryHash+"/"+queryID)); - } - else{ - m.add(subRes, queryIDProperty, ResourceFactory.createTypedLiteral(queryID)); - } - m.add(subRes, runProperty, ResourceFactory.createTypedLiteral(run)); - m.add(subRes, errorCodeProperty, ResourceFactory.createTypedLiteral(err)); - if(p.containsKey(COMMON.RECEIVE_DATA_SIZE)) { - long resSize = Long.parseLong(p.get(COMMON.RECEIVE_DATA_SIZE).toString()); - m.add(subRes, resultSize, ResourceFactory.createTypedLiteral(resSize)); - } - - sendData(m); - queryRunMap.put(subject, run); - } - - /* - * (non-Javadoc) - * - * @see org.aksw.iguana.rp.metrics.Metric#close() - */ - @Override - public void close() { - // Nothing to do here, as each query was sent to the Storages yet. - super.close(); - } - -} diff --git a/iguana.resultprocessor/src/main/java/org/aksw/iguana/rp/metrics/impl/F1MeasureMetric.java b/iguana.resultprocessor/src/main/java/org/aksw/iguana/rp/metrics/impl/F1MeasureMetric.java deleted file mode 100644 index b01949455..000000000 --- a/iguana.resultprocessor/src/main/java/org/aksw/iguana/rp/metrics/impl/F1MeasureMetric.java +++ /dev/null @@ -1,132 +0,0 @@ -package org.aksw.iguana.rp.metrics.impl; - -import org.aksw.iguana.commons.annotation.Shorthand; -import org.aksw.iguana.commons.constants.COMMON; -import org.aksw.iguana.rp.metrics.AbstractMetric; -import org.apache.jena.rdf.model.*; - -import java.util.HashMap; -import java.util.Map; -import java.util.Properties; - -/** - * provides a metric to measure F1, recall and precision if provided tp,fp,fn. - * Calculates micro and macro f1, recall and precision as well. - */ -@Shorthand("F1Measure") -public class F1MeasureMetric extends AbstractMetric { - - private static Property queryProperty = ResourceFactory.createProperty(COMMON.PROP_BASE_URI+"query"); - private static Property queryIDProperty = ResourceFactory.createProperty(COMMON.PROP_BASE_URI+"queryID"); - private static Property queryStringProperty = ResourceFactory.createProperty(COMMON.PROP_BASE_URI+"queryString"); - private static Property tpProperty = ResourceFactory.createProperty(COMMON.PROP_BASE_URI+"tp"); - private static Property fpProperty = ResourceFactory.createProperty(COMMON.PROP_BASE_URI+"fp"); - private static Property fnProperty = ResourceFactory.createProperty(COMMON.PROP_BASE_URI+"fn"); - private static Property precisionProperty = ResourceFactory.createProperty(COMMON.PROP_BASE_URI+"precision"); - private static Property recallProperty = ResourceFactory.createProperty(COMMON.PROP_BASE_URI+"recall"); - private static Property f1Property = ResourceFactory.createProperty(COMMON.PROP_BASE_URI+"f1"); - private static Property microPrecisionProperty = ResourceFactory.createProperty(COMMON.PROP_BASE_URI+"microPrecision"); - private static Property microRecallProperty = ResourceFactory.createProperty(COMMON.PROP_BASE_URI+"microRecall"); - private static Property microF1Property = ResourceFactory.createProperty(COMMON.PROP_BASE_URI+"microF1"); - private static Property macroPrecisionProperty = ResourceFactory.createProperty(COMMON.PROP_BASE_URI+"macroPrecision"); - private static Property macroRecallProperty = ResourceFactory.createProperty(COMMON.PROP_BASE_URI+"macroRecall"); - private static Property macroF1Property = ResourceFactory.createProperty(COMMON.PROP_BASE_URI+"macroF1"); - - - private static final int TP_RESULTS = 0; - - private static final int FP_RESULTS = 1; - - private static final int FN_RESULTS = 2; - - private static final int QUERY_ID = 0; - - private static final int QUERY_STRING = 1; - - private static final int DOUBLE_RESULTS = 2; - - private Map rawResults = new HashMap(); - - public F1MeasureMetric() { - super("F1 Measure", "F1Measure", "Will calculate Micro and Macro F1 measure"); - } - - @Override - public void receiveData(Properties p) { - String queryID = p.get(COMMON.QUERY_ID_KEY).toString(); - String queryString = p.get(COMMON.QUERY_STRING).toString(); - double[] doubleResults = (double[])p.get(COMMON.DOUBLE_RAW_RESULTS); - Object[] rawResult = new Object[3]; - rawResult[QUERY_ID] = queryID; - rawResult[QUERY_STRING] = queryString; - rawResult[DOUBLE_RESULTS] = doubleResults; - rawResults.put(queryID, rawResult); - } - - @Override - public void close() { - String subject = getSubjectFromExtraMeta(new Properties()); - - Model m = ModelFactory.createDefaultModel(); - Resource subRes= ResourceFactory.createResource(COMMON.RES_BASE_URI+subject); - - double[] globalMeasure = new double[] {0,0,0}; - double[] globalRaw = new double[] {0,0,0}; - int i=0; - for(String key : rawResults.keySet()) { - Object[] rawResult = rawResults.get(key); - String queryURI = COMMON.RES_BASE_URI+subject+"/"+rawResult[QUERY_ID].toString(); - Resource queryURIRes = ResourceFactory.createResource(queryURI); - m.add(subRes, queryProperty, queryURIRes); - m.add(queryURIRes, queryIDProperty, ResourceFactory.createTypedLiteral(rawResult[QUERY_ID])); - m.add(queryURIRes, queryStringProperty, ResourceFactory.createTypedLiteral(rawResult[QUERY_STRING].toString().replaceAll("(<|>)", ""))); - - double[] rawDoubleResults = (double[])rawResult[DOUBLE_RESULTS]; - m.add(queryURIRes, tpProperty, ResourceFactory.createTypedLiteral(rawDoubleResults[TP_RESULTS])); - m.add(queryURIRes, fpProperty, ResourceFactory.createTypedLiteral(rawDoubleResults[FP_RESULTS])); - m.add(queryURIRes, fnProperty, ResourceFactory.createTypedLiteral(rawDoubleResults[FN_RESULTS])); - - globalRaw[TP_RESULTS]+=rawDoubleResults[TP_RESULTS]; - globalRaw[FP_RESULTS]+=rawDoubleResults[FP_RESULTS]; - globalRaw[FN_RESULTS]+=rawDoubleResults[FN_RESULTS]; - double[] measure = calculateMeasure(rawDoubleResults); - m.add(queryURIRes, precisionProperty, ResourceFactory.createTypedLiteral(measure[0])); - m.add(queryURIRes, recallProperty, ResourceFactory.createTypedLiteral(measure[1])); - m.add(queryURIRes, f1Property, ResourceFactory.createTypedLiteral(measure[2])); - - globalMeasure[0] += measure[0]; - globalMeasure[1] += measure[1]; - globalMeasure[2] += measure[2]; - } - Properties results = new Properties(); - double[] microMeasure = calculateMeasure(globalRaw); - m.add(subRes, microPrecisionProperty, ResourceFactory.createTypedLiteral(microMeasure[0])); - m.add(subRes, microRecallProperty, ResourceFactory.createTypedLiteral(microMeasure[1])); - m.add(subRes, microF1Property, ResourceFactory.createTypedLiteral(microMeasure[2])); - m.add(subRes, macroPrecisionProperty, ResourceFactory.createTypedLiteral(globalMeasure[0]/rawResults.size())); - m.add(subRes, macroRecallProperty, ResourceFactory.createTypedLiteral(globalMeasure[1]/rawResults.size())); - m.add(subRes, macroF1Property, ResourceFactory.createTypedLiteral(globalMeasure[2]/rawResults.size())); - sendData(m); - super.close(); - } - - private double[] calculateMeasure(double[] rawDoubleResults) { - double[] measure = new double[] {0,0,0}; - double tp = rawDoubleResults[TP_RESULTS]; - double fp = rawDoubleResults[FP_RESULTS]; - double fn = rawDoubleResults[FN_RESULTS]; - if(tp==0&&fp==0&&fn==0) { - return new double[]{1,1,1}; - } - if(fp!=0||tp!=0) { - measure[0] = tp/(tp+fp); - } - if(fp!=0||tp!=0) { - measure[1] = tp/(tp+fn); - } - if(measure[0]!=0 || measure[1]!=0) - measure[2] = 2*measure[0]*measure[1]/(measure[0]+measure[1]); - return measure; - } - -} diff --git a/iguana.resultprocessor/src/main/java/org/aksw/iguana/rp/metrics/impl/NoQMetric.java b/iguana.resultprocessor/src/main/java/org/aksw/iguana/rp/metrics/impl/NoQMetric.java deleted file mode 100644 index 61e1f0a27..000000000 --- a/iguana.resultprocessor/src/main/java/org/aksw/iguana/rp/metrics/impl/NoQMetric.java +++ /dev/null @@ -1,82 +0,0 @@ -package org.aksw.iguana.rp.metrics.impl; - -import org.aksw.iguana.commons.annotation.Shorthand; -import org.aksw.iguana.commons.constants.COMMON; -import org.aksw.iguana.rp.metrics.AbstractMetric; -import org.apache.jena.rdf.model.*; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.Properties; - -/** - * Counts the number of all successfully executed queries - */ -@Shorthand("NoQ") -public class NoQMetric extends AbstractMetric { - - protected static final Object TOTAL_TIME = "totalTime"; - protected static final Object TOTAL_SUCCESS = "totalSuccess"; - - - protected static Logger LOGGER = LoggerFactory.getLogger(NoQPHMetric.class); - - - protected long hourInMS = 3600000; - - - public NoQMetric(){ - super("Number Of Queries", "NoQ", "Will calculate the number of queries which could be executed successfully."); - } - - protected NoQMetric(String name, String shortName, String description){ - super(name, shortName, description); - } - - - /* (non-Javadoc) - * @see org.aksw.iguana.rp.metrics.Metric#receiveData(java.util.Properties) - */ - @Override - public void receiveData(Properties p) { - LOGGER.debug(this.getShortName()+" has received "+p); - double time = Double.parseDouble(p.get(COMMON.RECEIVE_DATA_TIME).toString()); - Integer success = (long)p.get(COMMON.RECEIVE_DATA_SUCCESS)>0?1:0; - - Properties results = new Properties(); - results.put(TOTAL_TIME, time); - results.put(TOTAL_SUCCESS, success); - - Properties extra = getExtraMeta(p); - processData(extra, results); - } - - /* (non-Javadoc) - * @see org.aksw.iguana.rp.metrics.Metric#close() - */ - @Override - public void close() { - callbackClose(); - super.close(); - - } - - protected void callbackClose() { - Model m = ModelFactory.createDefaultModel(); - Property property = getMetricProperty(); - long sum = 0; - for(Properties key : dataContainer.keySet()){ - Double totalTime = (Double) dataContainer.get(key).get(TOTAL_TIME); - Integer success = (Integer) dataContainer.get(key).get(TOTAL_SUCCESS); - sum+=success; - Resource subject = getSubject(key); - m.add(getConnectingStatement(subject)); - m.add(subject, property, ResourceFactory.createTypedLiteral(success)); - } - m.add(getTaskResource(), property, ResourceFactory.createTypedLiteral(sum)); - sendData(m); - } - - - -} diff --git a/iguana.resultprocessor/src/main/java/org/aksw/iguana/rp/metrics/impl/NoQPHMetric.java b/iguana.resultprocessor/src/main/java/org/aksw/iguana/rp/metrics/impl/NoQPHMetric.java deleted file mode 100644 index ad92b92c1..000000000 --- a/iguana.resultprocessor/src/main/java/org/aksw/iguana/rp/metrics/impl/NoQPHMetric.java +++ /dev/null @@ -1,91 +0,0 @@ -/** - * - */ -package org.aksw.iguana.rp.metrics.impl; - -import org.aksw.iguana.commons.annotation.Shorthand; -import org.aksw.iguana.commons.constants.COMMON; -import org.aksw.iguana.rp.metrics.AbstractMetric; -import org.apache.jena.rdf.model.*; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.Properties; - -/** - * - * The Number Of Queries Per Hour Metric - * - * @author f.conrads - * - */ -@Shorthand("NoQPH") -public class NoQPHMetric extends AbstractMetric { - - protected static final Object TOTAL_TIME = "totalTime"; - protected static final Object TOTAL_SUCCESS = "totalSuccess"; - - - protected static Logger LOGGER = LoggerFactory.getLogger(NoQPHMetric.class); - - - protected long hourInMS = 3600000; - - - public NoQPHMetric(){ - super("Number Of Queries Per Hour", "NoQPH", "Will calculate the number of queries which could be executed successfully per Hour."); - } - - protected NoQPHMetric(String name, String shortName, String description){ - super(name, shortName, description); - } - - - /* (non-Javadoc) - * @see org.aksw.iguana.rp.metrics.Metric#receiveData(java.util.Properties) - */ - @Override - public void receiveData(Properties p) { - LOGGER.debug(this.getShortName()+" has received "+p); - double time = Double.parseDouble(p.get(COMMON.RECEIVE_DATA_TIME).toString()); - Integer success = (long)p.get(COMMON.RECEIVE_DATA_SUCCESS)>0?1:0; - - Properties results = new Properties(); - results.put(TOTAL_TIME, time); - results.put(TOTAL_SUCCESS, success); - - Properties extra = getExtraMeta(p); - processData(extra, results); - } - - /* (non-Javadoc) - * @see org.aksw.iguana.rp.metrics.Metric#close() - */ - @Override - public void close() { - callbackClose(); - super.close(); - - } - - protected void callbackClose() { - Model m = ModelFactory.createDefaultModel(); - Property property = getMetricProperty(); - Double sum = 0.0; - for(Properties key : dataContainer.keySet()){ - Double totalTime = (Double) dataContainer.get(key).get(TOTAL_TIME); - Integer success = (Integer) dataContainer.get(key).get(TOTAL_SUCCESS); - Double noOfQueriesPerHour = hourInMS*success*1.0/totalTime; - sum+=noOfQueriesPerHour; - Resource subject = getSubject(key); - m.add(getConnectingStatement(subject)); - m.add(subject, property, ResourceFactory.createTypedLiteral(noOfQueriesPerHour)); - } - - m.add(getTaskResource(), property, ResourceFactory.createTypedLiteral(sum)); - sendData(m); - } - - - - } diff --git a/iguana.resultprocessor/src/main/java/org/aksw/iguana/rp/metrics/impl/QMPHMetric.java b/iguana.resultprocessor/src/main/java/org/aksw/iguana/rp/metrics/impl/QMPHMetric.java deleted file mode 100644 index 0c0b5a352..000000000 --- a/iguana.resultprocessor/src/main/java/org/aksw/iguana/rp/metrics/impl/QMPHMetric.java +++ /dev/null @@ -1,64 +0,0 @@ -/** - * - */ -package org.aksw.iguana.rp.metrics.impl; - -import org.aksw.iguana.commons.annotation.Shorthand; -import org.aksw.iguana.commons.constants.COMMON; -import org.apache.jena.rdf.model.*; -import org.slf4j.LoggerFactory; - -import java.util.Properties; - -/** - * - * The Query Mixes Per Hour Metric - * - * @author f.conrads - * - */ -@Shorthand("QMPH") -public class QMPHMetric extends NoQPHMetric { - - public QMPHMetric(){ - super("Query Mixes Per Hour", "QMPH", "Will calculate the query mixes which could be executed successfully per Hour."); - LOGGER = LoggerFactory.getLogger(QMPHMetric.class); - } - - - - /* (non-Javadoc) - * @see org.aksw.iguana.rp.metrics.Metric#close() - */ - @Override - public void close() { - callbackClose(); - super.close(); - } - - /** - * callback which will be called in close - */ - @Override - protected void callbackClose(){ - Model m = ModelFactory.createDefaultModel(); - Property property = getMetricProperty(); - Double sum = 0.0; - for(Properties key : dataContainer.keySet()){ - Double totalTime = (double) dataContainer.get(key).get(TOTAL_TIME); - Integer success = (Integer) dataContainer.get(key).get(TOTAL_SUCCESS); - - double noOfQueriesPerHour = hourInMS*success*1.0/totalTime; - - int noOfQueryMixes = (int) key.get(COMMON.NO_OF_QUERIES); - Double qmph=noOfQueriesPerHour*1.0/noOfQueryMixes; - - sum+=qmph; - Resource subject = getSubject(key); - m.add(getConnectingStatement(subject)); - m.add(subject, property, ResourceFactory.createTypedLiteral(qmph)); - } - m.add(getTaskResource(), property, ResourceFactory.createTypedLiteral(sum)); - sendData(m); - } -} diff --git a/iguana.resultprocessor/src/main/java/org/aksw/iguana/rp/metrics/impl/QPSMetric.java b/iguana.resultprocessor/src/main/java/org/aksw/iguana/rp/metrics/impl/QPSMetric.java deleted file mode 100644 index 68e34bbf1..000000000 --- a/iguana.resultprocessor/src/main/java/org/aksw/iguana/rp/metrics/impl/QPSMetric.java +++ /dev/null @@ -1,232 +0,0 @@ -package org.aksw.iguana.rp.metrics.impl; - -import org.aksw.iguana.commons.annotation.Shorthand; -import org.aksw.iguana.commons.constants.COMMON; -import org.aksw.iguana.rp.metrics.AbstractMetric; -import org.apache.jena.rdf.model.*; -import org.apache.jena.vocabulary.RDF; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.util.HashMap; -import java.util.Map; -import java.util.Properties; - -/** - * Queries Per Second Metric implementation - * - * @author f.conrads - * - */ -@Shorthand("QPS") -public class QPSMetric extends AbstractMetric { - - protected static Logger LOGGER = LoggerFactory.getLogger(QPSMetric.class); - - private static Property queryProperty = ResourceFactory.createProperty(COMMON.PROP_BASE_URI+"query"); - private static Property failProperty = ResourceFactory.createProperty(COMMON.PROP_BASE_URI+"failed"); - private static Property succeededProperty = ResourceFactory.createProperty(COMMON.PROP_BASE_URI+"succeeded"); - private static Property ttProperty = ResourceFactory.createProperty(COMMON.PROP_BASE_URI+"totalTime"); - private static Property resultSize = ResourceFactory.createProperty(COMMON.PROP_BASE_URI+"resultSize"); - private static Property timeOuts = ResourceFactory.createProperty(COMMON.PROP_BASE_URI+"timeOuts"); - private static Property unknownException = ResourceFactory.createProperty(COMMON.PROP_BASE_URI+"unknownException"); - private static Property wrongCodes = ResourceFactory.createProperty(COMMON.PROP_BASE_URI+"wrongCodes"); - private static Property penalizedQPSProperty = ResourceFactory.createProperty(COMMON.PROP_BASE_URI+"penalizedQPS"); - private static Property queryID = ResourceFactory.createProperty(COMMON.PROP_BASE_URI+"queryID"); - - protected long hourInMS = 3600000; - protected Integer penalty = null; - private boolean noPenalty= false; - - public QPSMetric() { - super( - "Queries Per Second", - "QPS", - "Will calculate for each query the amount of how many times the query could be executed succesfully in one second." - + " Further on it will save the totaltime of each query, the failure and the success"); - } - - public QPSMetric(Integer penalty) { - super( - "Queries Per Second", - "QPS", - "Will calculate for each query the amount of how many times the query could be executed succesfully in one second." - + " Further on it will save the totaltime of each query, the failure and the success"); - this.penalty = penalty; - } - - public QPSMetric(String name, String shortName, String description) { - super(name, shortName, description); - } - - @Override - public void receiveData(Properties p) { - //Save success and time of each query - LOGGER.debug(this.getShortName() + " has received " + p); - double time = Double.parseDouble(p.get(COMMON.RECEIVE_DATA_TIME).toString()); - long tmpSuccess = Long.parseLong(p.get(COMMON.RECEIVE_DATA_SUCCESS).toString()); - long success = tmpSuccess>0?1:0; - long failure = success==1?0:1; - long timeout = tmpSuccess==COMMON.QUERY_SOCKET_TIMEOUT?1:0; - long unknown = tmpSuccess==COMMON.QUERY_UNKNOWN_EXCEPTION?1:0; - long wrongCode = tmpSuccess==COMMON.QUERY_HTTP_FAILURE?1:0; - Double penalty=getPenalty(p); - - long size=-1; - double penalizedTime=getPenalizedTime(penalty, failure, time); - if(p.containsKey(COMMON.RECEIVE_DATA_SIZE)) { - size = Long.parseLong(p.get(COMMON.RECEIVE_DATA_SIZE).toString()); - } - String queryID = p.getProperty(COMMON.QUERY_ID_KEY); - int queryHash = Integer.parseInt(p.get(COMMON.QUERY_HASH).toString()); - Properties extra = getExtraMeta(p); - - Properties tmp = putResults(extra, time, success, failure, timeout, unknown, wrongCode, penalizedTime, size, queryHash, queryID); - addDataToContainer(extra, tmp); - - } - - private Properties putResults(Properties extra, double time, long success, long failure, long timeout, long unknown, long wrongCode, double penalizedTime, long size, int queryHash, String queryID) { - Properties tmp = getDataFromContainer(extra); - if(tmp!=null && tmp.containsKey(queryID)){ - Object[] oldArr = (Object[]) tmp.get(queryID); - if (success > 0) - oldArr[0] = (double) oldArr[0] + time; - oldArr[1] = (long) oldArr[1] + success; - oldArr[2] = (long) oldArr[2] + failure; - if((long)oldArr[3] map = new HashMap(); - Model m = ModelFactory.createDefaultModel(); - - for(Properties key : dataContainer.keySet()){ - Properties value = dataContainer.get(key); - Resource subjectParent = getSubject(key); - m.add(getConnectingStatement(subjectParent)); - addToModel(value, subjectParent, m, map); - } - Resource subjectParent = getTaskResource(); - addToModel( map, subjectParent, m, null); - sendData(m); - } - - private void addToModel(Map value, Resource subjectParent, Model m, Map map){ - Property qpsProperty = getMetricProperty(); - - for(Object queryID : value.keySet()){ - Object[] resArr = (Object[]) value.get(queryID); - if(map!=null) - mergeResults(map, queryID, resArr); - Double qps = (long) resArr[1]/*success*/ / (double) resArr[0]/*time*/ / 1000.0/*ms to s*/; - Double pqps = ((long)resArr[1]/*success*/ + (long)resArr[2]/*failure*/) / ((double)resArr[7]/*penalizedTime*//1000.0/*ms to s*/); - - Resource query = ResourceFactory.createResource(subjectParent.getURI()+"/"+queryID); - m.add(subjectParent, queryProperty, query); - m.add(query, qpsProperty, ResourceFactory.createTypedLiteral(qps)); - m.add(query, ttProperty, ResourceFactory.createTypedLiteral((double)resArr[0])); - m.add(query, succeededProperty, ResourceFactory.createTypedLiteral((long)resArr[1])); - m.add(query, failProperty, ResourceFactory.createTypedLiteral((long)resArr[2])); - if((long)resArr[3]!=-1L) { - m.add(query, resultSize, ResourceFactory.createTypedLiteral((long)resArr[3])); - } - else{ - m.add(query, resultSize, ResourceFactory.createTypedLiteral("?")); - } - m.add(query, timeOuts, ResourceFactory.createTypedLiteral((long)resArr[4])); - m.add(query, unknownException, ResourceFactory.createTypedLiteral((long)resArr[5])); - m.add(query, wrongCodes, ResourceFactory.createTypedLiteral((long)resArr[6])); - if(!noPenalty) { - m.add(query, penalizedQPSProperty, ResourceFactory.createTypedLiteral(pqps)); - } - m.add(query, QPSMetric.queryID, ResourceFactory.createResource(COMMON.RES_BASE_URI+(int)resArr[8]+ "/" + queryID.toString())); - m.add(query, RDF.type, ResourceFactory.createResource(COMMON.CLASS_BASE_URI+"ExecutedQuery")); - } - } - - private void mergeResults(Map map, Object queryID, Object[] resArr) { - if(map.containsKey(queryID)){ - Object[] currentResults = (Object[])map.get(queryID); - Object[] newResults = new Object[currentResults.length]; - for(int i=0;i storages = new HashSet(); - - private static StorageManager instance; - - public static synchronized StorageManager getInstance() { - if (instance == null) { - instance = new StorageManager(); - } - return instance; - } - - /** - * Will add the Storage - * - * @param storage - */ - public void addStorage(Storage storage){ - if(storage==null){ - return; - } - storages.add(storage); - } - - /** - * Will return each Storage - * - * @return - */ - public Set getStorages(){ - return storages; - } - - /** - * Simply adds a Model - * @param m - */ - public void addData(Model m){ - for(Storage s : storages){ - s.addData(m); - } - } - - - /** - * Will add the MetaData to each Storage - * @param p - */ - public void addMetaData(Properties p){ - for(Storage s : storages){ - try{ - s.addMetaData(p); - }catch(Exception e){ - LOGGER.error("Could not store meta data in "+s.getClass().getSimpleName()+" for Properties "+p, e); - } - } - } - - - @Override - public String toString(){ - StringBuilder ret = new StringBuilder(); - Iterator it = storages.iterator(); - for(int i=0;i storages) { - this.storages.addAll(storages); - } - - public void close() { - for(Storage storage : storages){ - storage.close(); - } - } -} diff --git a/iguana.resultprocessor/src/main/java/org/aksw/iguana/rp/storage/TripleBasedStorage.java b/iguana.resultprocessor/src/main/java/org/aksw/iguana/rp/storage/TripleBasedStorage.java deleted file mode 100644 index eb4c15ded..000000000 --- a/iguana.resultprocessor/src/main/java/org/aksw/iguana/rp/storage/TripleBasedStorage.java +++ /dev/null @@ -1,192 +0,0 @@ -/** - * - */ -package org.aksw.iguana.rp.storage; - -import org.aksw.iguana.commons.constants.COMMON; -import org.apache.jena.rdf.model.Model; -import org.apache.jena.rdf.model.ModelFactory; -import org.apache.jena.rdf.model.ResourceFactory; -import org.apache.jena.rdf.model.Statement; -import org.apache.jena.vocabulary.RDF; -import org.apache.jena.vocabulary.RDFS; - -import java.io.UnsupportedEncodingException; -import java.net.URLEncoder; -import java.nio.charset.StandardCharsets; -import java.util.Calendar; -import java.util.GregorianCalendar; -import java.util.Properties; -import java.util.Set; - -/** - * This Storage will save all the metric results as triples - * - * @author f.conrads - * - */ -public abstract class TripleBasedStorage implements Storage { - - protected String baseUri = COMMON.BASE_URI; - private String resource = COMMON.RES_BASE_URI; - private String properties = COMMON.PROP_BASE_URI; - - - protected Model metricResults = createPrefixModel(); - - private String suiteClassUri = baseUri + "/class/Suite"; - private String expClassUri = baseUri + "/class/Experiment"; - private String taskClassUri = baseUri + "/class/Task"; - private String conClassUri = baseUri + "/class/Connection"; - private String datasetClassUri = baseUri + "/class/Dataset"; - - - private String classUri = RDF.type.getURI(); - private String rdfsUri = "http://www.w3.org/2000/01/rdf-schema#"; - private String xsdUri = "http://www.w3.org/2001/XMLSchema#"; - - - protected Model createPrefixModel() { - Model metricResults = ModelFactory.createDefaultModel(); - metricResults.setNsPrefix("iprop", COMMON.PROP_BASE_URI); - metricResults.setNsPrefix("iont", COMMON.CLASS_BASE_URI); - metricResults.setNsPrefix("ires", COMMON.RES_BASE_URI); - metricResults.setNsPrefix("lsqr", "http://lsq.aksw.org/res/"); - return metricResults; - } - - - /* - * (non-Javadoc) - * - * @see org.aksw.iguana.rp.storage.Storage#addMetaData(java.util.Properties) - */ - @Override - public void addMetaData(Properties p) { - - String suiteUrl = getUrlWithResourcePrefix(p, COMMON.SUITE_ID_KEY); - String expUrl = getUrlWithResourcePrefix(p, COMMON.EXPERIMENT_ID_KEY); - String taskUrl = getUrlWithResourcePrefix(p, COMMON.EXPERIMENT_TASK_ID_KEY); - - String datasetUrl = getUrlWithResourcePrefix(p, COMMON.DATASET_ID_KEY); - String conName = p.getProperty(COMMON.CONNECTION_ID_KEY); - if(p.containsKey(COMMON.CONNECTION_VERSION_KEY)){ - conName+="-"+p.getProperty(COMMON.CONNECTION_VERSION_KEY); - } - String connUrl = getUrlWithResourcePrefix(conName); - - String actualTaskID = getUrlWithResourcePrefix(p, COMMON.EXPERIMENT_TASK_CLASS_ID_KEY); - - - metricResults.add(createStatement(suiteUrl, getUrlWithPropertyPrefix("experiment"), expUrl, true)); - metricResults.add(createStatement(suiteUrl, classUri, suiteClassUri, true)); - metricResults.add(createStatement(expUrl, getUrlWithPropertyPrefix("task"), taskUrl, true)); - metricResults.add(createStatement(expUrl, getUrlWithPropertyPrefix("dataset"), datasetUrl, true)); - metricResults.add(createStatement(expUrl, classUri, expClassUri, true)); - metricResults.add(createStatement(taskUrl, getUrlWithPropertyPrefix("connection"), connUrl, true)); - if(p.containsKey(COMMON.EXPERIMENT_TASK_NAME_KEY)){ - metricResults.add(metricResults.createResource(taskUrl), RDFS.label, p.getProperty(COMMON.EXPERIMENT_TASK_NAME_KEY)); - } - - metricResults.add(createStatement(connUrl, classUri, conClassUri, true)); - metricResults.add(createStatement(datasetUrl, classUri, datasetClassUri, true)); - metricResults.add(createStatement(taskUrl, classUri, taskClassUri, true)); - metricResults.add(createStatement(taskUrl, classUri, actualTaskID, true)); - - addExtraMetadata(p, taskUrl); - metricResults.add(metricResults.createResource(datasetUrl), RDFS.label, p.getProperty(COMMON.DATASET_ID_KEY)); - metricResults.add(metricResults.createResource(connUrl), RDFS.label, p.getProperty(COMMON.CONNECTION_ID_KEY)); - if(p.containsKey(COMMON.CONNECTION_VERSION_KEY)) { - metricResults.add(metricResults.createResource(connUrl), ResourceFactory.createProperty(getUrlWithPropertyPrefix("version")), p.getProperty(COMMON.CONNECTION_VERSION_KEY)); - } - - if(p.containsKey(COMMON.QUERY_STATS)) { - Model queryStats = (Model) p.get(COMMON.QUERY_STATS); - metricResults.add(queryStats); - } - - Calendar cal = GregorianCalendar.getInstance(); - metricResults.add(metricResults.createResource(taskUrl), - ResourceFactory.createProperty(rdfsUri + "startDate"), metricResults.createTypedLiteral(cal)); - } - - private String getUrlWithResourcePrefix(Properties p, String key) { - return getUrlWithResourcePrefix(p.getProperty(key)); - } - - private String getUrlWithResourcePrefix(String suffix) { - try { - String[] suffixParts = suffix.split("/"); - for (int i = 0; i < suffixParts.length; i++) - suffixParts[i] = URLEncoder.encode(suffixParts[i], StandardCharsets.UTF_8.toString()); - return resource + String.join("/", suffixParts); - } catch (UnsupportedEncodingException e) { - return resource + suffix.hashCode(); - } - } - - private String getUrlWithPropertyPrefix(String suffix) { - try { - String[] suffixParts = suffix.split("/"); - for (int i = 0; i < suffixParts.length; i++) - suffixParts[i] = URLEncoder.encode(suffixParts[i], StandardCharsets.UTF_8.toString()); - return properties + String.join("/", suffixParts); - } catch (UnsupportedEncodingException e) { - return properties + suffix.hashCode(); - } - } - - private Statement createStatement(String subject, String predicate, Object object) - { - return metricResults.createStatement(metricResults.createResource(subject), ResourceFactory.createProperty(predicate), metricResults.createTypedLiteral(object)); - } - - private Statement createStatement(String subject, String predicate, String object, boolean isObjectUri) - { - if(isObjectUri) - return metricResults.createStatement(metricResults.createResource(subject), ResourceFactory.createProperty(predicate), metricResults.createResource(object)); - else - return metricResults.createStatement(metricResults.createResource(subject), ResourceFactory.createProperty(predicate), object); - } - - private void addExtraMetadata(Properties p, String taskUrl) { - Properties extra = (Properties) p.get(COMMON.EXTRA_META_KEY); - for (Object obj : extra.keySet()) { - if (p.containsKey(COMMON.EXTRA_IS_RESOURCE_KEY) && ((Set) p.get(COMMON.EXTRA_IS_RESOURCE_KEY)).contains(obj)) { - metricResults.add(createStatement( - taskUrl, - getUrlWithResourcePrefix(obj.toString()), - getUrlWithResourcePrefix(extra.get(obj).toString()), - true)); - } else { - metricResults.add(createStatement( - taskUrl, - getUrlWithPropertyPrefix(obj.toString()), - extra.get(obj))); - } - } - } - - @Override - public String toString() { - return this.getClass().getSimpleName(); - } - - /** - * Ends the task and adds a rdfs:endDate triple with the current time - * @param taskID - */ - public void endTask(String taskID) { - Calendar cal = GregorianCalendar.getInstance(); - String taskUrl = getUrlWithResourcePrefix(taskID); - metricResults.add(metricResults.add(metricResults.createResource(taskUrl), - ResourceFactory.createProperty(rdfsUri + "endDate"), metricResults.createTypedLiteral(cal))); - } - - - public void addData(Model data){ - metricResults.add(data); - } - - -} diff --git a/iguana.resultprocessor/src/main/java/org/aksw/iguana/rp/storage/impl/NTFileStorage.java b/iguana.resultprocessor/src/main/java/org/aksw/iguana/rp/storage/impl/NTFileStorage.java deleted file mode 100644 index 12b7f78ce..000000000 --- a/iguana.resultprocessor/src/main/java/org/aksw/iguana/rp/storage/impl/NTFileStorage.java +++ /dev/null @@ -1,86 +0,0 @@ -/** - * - */ -package org.aksw.iguana.rp.storage.impl; - -import org.aksw.iguana.commons.annotation.Shorthand; -import org.aksw.iguana.rp.storage.TripleBasedStorage; -import org.apache.jena.riot.RDFDataMgr; -import org.apache.jena.riot.RDFFormat; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.FileOutputStream; -import java.io.IOException; -import java.io.OutputStream; -import java.util.Calendar; - -/** - * - * Will save results as NTriple File either using the provided name or the a generated one. - * - * @author f.conrads - * - */ -@Shorthand("NTFileStorage") -public class NTFileStorage extends TripleBasedStorage { - - private static final Logger LOGGER = LoggerFactory - .getLogger(NTFileStorage.class); - - private StringBuilder file; - - /** - * Uses a generated file called results_{DD}-{MM}-{YYYY}_{HH}-{mm}.nt - */ - public NTFileStorage() { - Calendar now = Calendar.getInstance(); - - this.file = new StringBuilder(); - file.append("results_") - .append( - String.format("%d-%02d-%02d_%02d-%02d.%03d", - now.get(Calendar.YEAR), - now.get(Calendar.MONTH) + 1, - now.get(Calendar.DAY_OF_MONTH), - now.get(Calendar.HOUR_OF_DAY), - now.get(Calendar.MINUTE), - now.get(Calendar.MILLISECOND) - ) - ) - .append(".nt"); - } - - /** - * Uses the provided filename - * @param fileName - */ - public NTFileStorage(String fileName){ - this.file = new StringBuilder(fileName); - } - - /* (non-Javadoc) - * @see org.aksw.iguana.rp.storage.Storage#commit() - */ - @Override - public void commit() { - try (OutputStream os = new FileOutputStream(file.toString(), true)) { - RDFDataMgr.write(os, metricResults, RDFFormat.NTRIPLES); - metricResults.removeAll(); - } catch (IOException e) { - LOGGER.error("Could not commit to NTFileStorage.", e); - } - } - - - - @Override - public String toString(){ - return this.getClass().getSimpleName(); - } - - public String getFileName(){ - return this.file.toString(); - } - -} diff --git a/iguana.resultprocessor/src/main/java/org/aksw/iguana/rp/storage/impl/RDFFileStorage.java b/iguana.resultprocessor/src/main/java/org/aksw/iguana/rp/storage/impl/RDFFileStorage.java deleted file mode 100644 index d797e0fdd..000000000 --- a/iguana.resultprocessor/src/main/java/org/aksw/iguana/rp/storage/impl/RDFFileStorage.java +++ /dev/null @@ -1,82 +0,0 @@ -package org.aksw.iguana.rp.storage.impl; - -import org.aksw.iguana.commons.annotation.Shorthand; -import org.aksw.iguana.rp.storage.TripleBasedStorage; -import org.apache.jena.riot.Lang; -import org.apache.jena.riot.RDFDataMgr; -import org.apache.jena.riot.RDFLanguages; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.FileOutputStream; -import java.io.IOException; -import java.io.OutputStream; -import java.util.Calendar; - -@Shorthand("RDFFileStorage") -public class RDFFileStorage extends TripleBasedStorage { - - private static final Logger LOGGER = LoggerFactory.getLogger(RDFFileStorage.class.getName()); - - private Lang lang = Lang.TTL; - private StringBuilder file; - - /** - * Uses a generated file called results_{DD}-{MM}-{YYYY}_{HH}-{mm}.ttl - */ - public RDFFileStorage() { - Calendar now = Calendar.getInstance(); - - this.file = new StringBuilder(); - file.append("results_") - .append( - String.format("%d-%02d-%02d_%02d-%02d.%03d", - now.get(Calendar.YEAR), - now.get(Calendar.MONTH) + 1, - now.get(Calendar.DAY_OF_MONTH), - now.get(Calendar.HOUR_OF_DAY), - now.get(Calendar.MINUTE), - now.get(Calendar.MILLISECOND) - ) - ) - .append(".ttl"); - } - - /** - * Uses the provided filename - * @param fileName - */ - public RDFFileStorage(String fileName){ - this.file = new StringBuilder(fileName); - this.lang= RDFLanguages.filenameToLang(fileName, Lang.TTL); - - } - - /* (non-Javadoc) - * @see org.aksw.iguana.rp.storage.Storage#commit() - */ - @Override - public void commit() { - - } - - @Override - public void close(){ - try (OutputStream os = new FileOutputStream(file.toString(), true)) { - RDFDataMgr.write(os, metricResults, this.lang); - metricResults.removeAll(); - } catch (IOException e) { - LOGGER.error("Could not commit to RDFFileStorage using lang: "+lang, e); - } - } - - - @Override - public String toString(){ - return this.getClass().getSimpleName(); - } - - public String getFileName(){ - return this.file.toString(); - } -} diff --git a/iguana.resultprocessor/src/main/java/org/aksw/iguana/rp/storage/impl/TriplestoreStorage.java b/iguana.resultprocessor/src/main/java/org/aksw/iguana/rp/storage/impl/TriplestoreStorage.java deleted file mode 100644 index a832efd21..000000000 --- a/iguana.resultprocessor/src/main/java/org/aksw/iguana/rp/storage/impl/TriplestoreStorage.java +++ /dev/null @@ -1,105 +0,0 @@ -/** - * - */ -package org.aksw.iguana.rp.storage.impl; - -import org.aksw.iguana.commons.annotation.Shorthand; -import org.aksw.iguana.rp.storage.TripleBasedStorage; -import org.apache.http.auth.AuthScope; -import org.apache.http.auth.Credentials; -import org.apache.http.auth.UsernamePasswordCredentials; -import org.apache.http.client.CredentialsProvider; -import org.apache.http.client.HttpClient; -import org.apache.http.impl.client.BasicCredentialsProvider; -import org.apache.http.impl.client.HttpClients; -import org.apache.jena.riot.Lang; -import org.apache.jena.riot.RDFDataMgr; -import org.apache.jena.update.UpdateExecutionFactory; -import org.apache.jena.update.UpdateFactory; -import org.apache.jena.update.UpdateProcessor; -import org.apache.jena.update.UpdateRequest; - -import java.io.StringWriter; - - -/** - * This Storage will save all the metric results into a specified triple store - * - * @author f.conrads - * - */ -@Shorthand("TriplestoreStorage") -public class TriplestoreStorage extends TripleBasedStorage { - - private UpdateRequest blockRequest = UpdateFactory.create(); - - - private String updateEndpoint; - private String endpoint; - private String user; - private String pwd; - - - public TriplestoreStorage(String endpoint, String updateEndpoint, String user, String pwd, String baseUri){ - this.endpoint=endpoint; - this.updateEndpoint=updateEndpoint; - this.user=user; - this.pwd=pwd; - if(baseUri!=null && !baseUri.isEmpty()){ - this.baseUri=baseUri; - } - } - - public TriplestoreStorage(String endpoint, String updateEndpoint, String baseUri){ - this.endpoint=endpoint; - this.updateEndpoint=updateEndpoint; - if(baseUri!=null && !baseUri.isEmpty()){ - this.baseUri=baseUri; - } - } - - public TriplestoreStorage(String endpoint, String updateEndpoint){ - this.endpoint=endpoint; - this.updateEndpoint=updateEndpoint; - } - - /* (non-Javadoc) - * @see org.aksw.iguana.rp.storage.Storage#commit() - */ - @Override - public void commit() { - if (metricResults.size() == 0) - return; - - StringWriter results = new StringWriter(); - RDFDataMgr.write(results, metricResults, Lang.NT); - String update = "INSERT DATA {" + results.toString() + "}"; - //Create Update Request from block - blockRequest.add(update); - - //submit Block to Triple Store - UpdateProcessor processor = UpdateExecutionFactory - .createRemote(blockRequest, updateEndpoint, createHttpClient()); - processor.execute(); - blockRequest = new UpdateRequest(); - } - - - - private HttpClient createHttpClient(){ - CredentialsProvider credsProvider = new BasicCredentialsProvider(); - if(user !=null && pwd !=null){ - Credentials credentials = new UsernamePasswordCredentials(user, pwd); - credsProvider.setCredentials(AuthScope.ANY, credentials); - } - HttpClient httpclient = HttpClients.custom() - .setDefaultCredentialsProvider(credsProvider) - .build(); - return httpclient; - } - - @Override - public String toString(){ - return this.getClass().getSimpleName(); - } -} diff --git a/iguana.resultprocessor/src/main/java/org/aksw/iguana/rp/vocab/Vocab.java b/iguana.resultprocessor/src/main/java/org/aksw/iguana/rp/vocab/Vocab.java deleted file mode 100644 index 5dd8989f8..000000000 --- a/iguana.resultprocessor/src/main/java/org/aksw/iguana/rp/vocab/Vocab.java +++ /dev/null @@ -1,32 +0,0 @@ -package org.aksw.iguana.rp.vocab; - -import org.aksw.iguana.commons.constants.COMMON; -import org.apache.jena.rdf.model.Property; -import org.apache.jena.rdf.model.Resource; -import org.apache.jena.rdf.model.ResourceFactory; - -/** - * RDF Vocabulary Classes and Properties - */ -public class Vocab { - - private static String rdfs = "http://www.w3.org/2000/01/rdf-schema#"; - public static Property aggrProperty = ResourceFactory.createProperty(COMMON.PROP_BASE_URI + "aggregations"); - public static Property rdfsID = ResourceFactory.createProperty(rdfs + "ID"); - public static Property filterProperty = ResourceFactory.createProperty(COMMON.PROP_BASE_URI + "filter"); - public static Property groupByProperty = ResourceFactory.createProperty(COMMON.PROP_BASE_URI + "groupBy"); - public static Property havingProperty = ResourceFactory.createProperty(COMMON.PROP_BASE_URI + "having"); - public static Property triplesProperty = ResourceFactory.createProperty(COMMON.PROP_BASE_URI + "triples"); - public static Property offsetProperty = ResourceFactory.createProperty(COMMON.PROP_BASE_URI + "offset"); - public static Property optionalProperty = ResourceFactory.createProperty(COMMON.PROP_BASE_URI + "optional"); - public static Property orderByProperty = ResourceFactory.createProperty(COMMON.PROP_BASE_URI + "orderBy"); - public static Property unionProperty = ResourceFactory.createProperty(COMMON.PROP_BASE_URI + "union"); - public static Property worker2metric = ResourceFactory.createProperty(COMMON.PROP_BASE_URI+"metric"); - public static Property workerResult = ResourceFactory.createProperty(COMMON.PROP_BASE_URI+"workerResult"); - - public static Resource workerClass = ResourceFactory.createResource(COMMON.CLASS_BASE_URI+"Worker"); - public static Resource queryClass = ResourceFactory.createResource(COMMON.CLASS_BASE_URI+"Query"); - public static Resource metricClass = ResourceFactory.createResource( COMMON.CLASS_BASE_URI+"Metric"); - - -} diff --git a/iguana.resultprocessor/src/test/java/org/aksw/iguana/rp/metrics/impl/MetricTest.java b/iguana.resultprocessor/src/test/java/org/aksw/iguana/rp/metrics/impl/MetricTest.java deleted file mode 100644 index 950a40c2b..000000000 --- a/iguana.resultprocessor/src/test/java/org/aksw/iguana/rp/metrics/impl/MetricTest.java +++ /dev/null @@ -1,140 +0,0 @@ -/** - * - */ -package org.aksw.iguana.rp.metrics.impl; - -import org.aksw.iguana.commons.constants.COMMON; -import org.aksw.iguana.rp.metrics.Metric; -import org.aksw.iguana.rp.storage.StorageManager; -import org.aksw.iguana.rp.utils.EqualityStorage; -import org.apache.jena.rdf.model.Model; -import org.apache.jena.rdf.model.ModelFactory; -import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.Parameters; - -import java.io.FileNotFoundException; -import java.io.FileReader; -import java.util.ArrayList; -import java.util.Collection; -import java.util.List; -import java.util.Properties; - -import static org.junit.Assert.assertEquals; - -/** - * This will do a small test with every implemented Metric - * - * @author f.conrads - * - */ -@RunWith(Parameterized.class) -public class MetricTest { - - private final Model goldenModel; - private Properties extra = new Properties(); - private Metric m; - private boolean sendPenalty; - - /** - * @return Configurations to test - */ - @Parameters - public static Collection data() { - List testConfigs = new ArrayList(); - - testConfigs.add(new Object[] { new NoQPHMetric(),"src/test/resources/nt/noqphtest.nt", false}); - testConfigs.add(new Object[] { new QPSMetric(), "src/test/resources/nt/qpstest.nt", false}); - //check if penalty will be used if send. - testConfigs.add(new Object[] { new QPSMetric(), "src/test/resources/nt/qpspenaltytest.nt", true}); - testConfigs.add(new Object[] { new QPSMetric(1000), "src/test/resources/nt/qpspenaltytest.nt", false}); - //Test if 2000 will be used instead of provided 1000 - testConfigs.add(new Object[] { new QPSMetric(2000), "src/test/resources/nt/qpspenaltytest2.nt", true}); - testConfigs.add(new Object[] { new AvgQPSMetric(), "src/test/resources/nt/avgqpstest.nt", false}); - testConfigs.add(new Object[] { new AvgQPSMetric(2000), "src/test/resources/nt/penaltyavgqpstest.nt", true}); - - testConfigs.add(new Object[] { new NoQMetric(), "src/test/resources/nt/noqtest.nt", false}); - testConfigs.add(new Object[] { new QMPHMetric(), "src/test/resources/nt/qmphtest.nt", false}); - testConfigs.add(new Object[] { new EachQueryMetric(), "src/test/resources/nt/eqtest.nt", false}); - testConfigs.add(new Object[] { new F1MeasureMetric(), "src/test/resources/nt/f1test.nt", false}); - - return testConfigs; - } - - - - public MetricTest(Metric m, String golden, boolean sendPenalty) throws FileNotFoundException { - - //meta = new Triple("1/1/1/"+extra.hashCode(), "a", "b"); - this.m = m; - this.goldenModel = ModelFactory.createDefaultModel(); - this.goldenModel.read(new FileReader(golden), null, "N-TRIPLE"); - this.sendPenalty=sendPenalty; - } - - @Test - public void modelTest(){ - Model[] data = test(m, goldenModel); - //assert equals all triples in one are the same as the other - assertEquals(data[0].size(), data[1].size()); - data[0].remove(data[1]); - //if size was the same, and after EXPECTED <- EXPECTED/ACTUAL is either 0 if EXPECTED=ACTUAL or not zero, and the size of expected is bigger than 0 - assertEquals(0, data[0].size()); - } - - - public Model[] test(Metric metric, Model golden){ - - StorageManager smanager = new StorageManager(); - EqualityStorage storage = new EqualityStorage(golden); - smanager.addStorage(storage); - metric.setStorageManager(smanager); - metric.setMetaData(createMetaData()); - Properties extraMeta = new Properties(); - extraMeta.put(COMMON.WORKER_ID, "0"); - extraMeta.put(COMMON.NO_OF_QUERIES, 2); - metric.receiveData(createData(200, "sparql1", "1123",120, 1, extraMeta)); - metric.receiveData(createData(250, "sparql2", "1125",100,1, extraMeta)); - extraMeta = new Properties(); - extraMeta.put(COMMON.WORKER_ID, "1"); - extraMeta.put(COMMON.NO_OF_QUERIES, 2); - metric.receiveData(createData(150, "sparql1", "1123", null, 1, extraMeta)); - metric.receiveData(createData(100, "sparql2", "1125",null,-2L, extraMeta)); - - metric.close(); - return new Model[]{storage.getExpectedModel(), storage.getActualModel()}; - - } - - private Properties createData(double time, String queryID, String queryHash, Integer resultSize, long success, Properties extraMeta) { - Properties p = new Properties(); - p.setProperty(COMMON.EXPERIMENT_TASK_ID_KEY, "1/1/1"); - p.put(COMMON.RECEIVE_DATA_SUCCESS, success); - p.put(COMMON.RECEIVE_DATA_TIME, time); - p.put(COMMON.QUERY_ID_KEY, queryID); - p.put(COMMON.QUERY_HASH, queryHash); - p.put(COMMON.QUERY_STRING, "SELECT * {?s ?p ?o}"); - //tp=time/5, fp=time/10, fn=8 - p.put(COMMON.DOUBLE_RAW_RESULTS, new double[]{time/5.0, time/10.0, 8}); - if(this.sendPenalty) - p.put(COMMON.PENALTY, 1000); - if(resultSize!=null) - p.put(COMMON.RECEIVE_DATA_SIZE, resultSize); - p.put(COMMON.EXTRA_META_KEY, extraMeta); - return p; - } - - private Properties createMetaData() { - Properties p = new Properties(); - p.put(COMMON.EXPERIMENT_TASK_ID_KEY, "1/1/1"); - p.setProperty(COMMON.EXPERIMENT_ID_KEY, "1/1"); - p.setProperty(COMMON.CONNECTION_ID_KEY, "virtuoso"); - p.setProperty(COMMON.SUITE_ID_KEY, "1"); - p.setProperty(COMMON.DATASET_ID_KEY, "dbpedia"); - p.put(COMMON.RECEIVE_DATA_START_KEY, "true"); - p.put(COMMON.EXTRA_META_KEY, extra); - p.put(COMMON.NO_OF_QUERIES, 2); - return p; - } -} diff --git a/iguana.resultprocessor/src/test/java/org/aksw/iguana/rp/storage/impl/NTFileStorageTest.java b/iguana.resultprocessor/src/test/java/org/aksw/iguana/rp/storage/impl/NTFileStorageTest.java deleted file mode 100644 index 224ffba83..000000000 --- a/iguana.resultprocessor/src/test/java/org/aksw/iguana/rp/storage/impl/NTFileStorageTest.java +++ /dev/null @@ -1,109 +0,0 @@ -/** - * - */ -package org.aksw.iguana.rp.storage.impl; - -import org.aksw.iguana.commons.constants.COMMON; -import org.aksw.iguana.rp.storage.Storage; -import org.apache.jena.rdf.model.*; -import org.apache.jena.vocabulary.RDFS; -import org.junit.Test; - -import java.io.File; -import java.io.FileReader; -import java.io.IOException; -import java.util.List; -import java.util.Properties; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - -/** - * - * This will test the NTFileStorage in short. - * - * - * @author f.conrads - * - */ -public class NTFileStorageTest { - - - @Test - public void dataTest() throws IOException{ - Storage store = new NTFileStorage("results_test2.nt"); - - new File("results_test2.nt").delete(); - - Model m = ModelFactory.createDefaultModel(); - m.read(new FileReader("src/test/resources/nt/results_test1.nt"), null, "N-TRIPLE"); - - store.addData(m); - store.commit(); - assertEqual("results_test2.nt","src/test/resources/nt/results_test1.nt", true); - new File("results_test2.nt").delete(); - - } - - @Test - public void metaTest() throws IOException{ - Storage store = new NTFileStorage("results_test.nt"); - new File("results_test.nt").delete(); - - Properties extraMeta = new Properties(); - extraMeta.setProperty("a", "b"); - - Properties p = new Properties(); - p.put(COMMON.EXPERIMENT_TASK_ID_KEY, "1/1/1"); - p.setProperty(COMMON.EXPERIMENT_ID_KEY, "1/1"); - p.setProperty(COMMON.CONNECTION_ID_KEY, "virtuoso"); - p.setProperty(COMMON.SUITE_ID_KEY, "1"); - p.setProperty(COMMON.DATASET_ID_KEY, "dbpedia"); - p.put(COMMON.RECEIVE_DATA_START_KEY, "true"); - p.put(COMMON.EXPERIMENT_TASK_CLASS_ID_KEY, "ClassName"); - p.put(COMMON.EXTRA_META_KEY, new Properties()); - p.put(COMMON.NO_OF_QUERIES, 2); - - store.addMetaData(p); - store.commit(); - assertEqual("results_test.nt", "src/test/resources/nt/nt_results_woMeta.nt", false); - new File("results_test.nt").delete(); - store = new NTFileStorage("results_test2.nt"); - - p.put(COMMON.EXTRA_META_KEY, extraMeta); - store.addMetaData(p); - store.commit(); - assertEqual("results_test2.nt", "src/test/resources/nt/nt_results_wMeta.nt", false); - - new File("results_test2.nt").delete(); - - - } - - /** - * Checks if two ntriple files are equal by loading them into a model and check if they have the same size - * and by removing the actual model from the expected, if the new size after removal equals 0 they are the same - * - * @param actualFile - * @param expectedFile - * @throws IOException - */ - public void assertEqual(String actualFile, String expectedFile, boolean ignoreDate) throws IOException{ - Model expected = ModelFactory.createDefaultModel(); - expected.read(new FileReader(expectedFile), null, "N-TRIPLE"); - Model actual = ModelFactory.createDefaultModel(); - actual.read(new FileReader(actualFile), null, "N-TRIPLE"); - assertEquals(expected.size(), actual.size()); - expected.remove(actual); - if(!ignoreDate){ - //Remove startDate as they are different, just check if actual contains a start date - Property startDate =ResourceFactory.createProperty(RDFS.getURI()+"startDate"); - assertTrue(actual.contains(null, startDate, (RDFNode)null)); - List stmts = expected.listStatements(null, startDate, (RDFNode)null).toList(); - assertEquals(1, stmts.size()); - expected.remove(stmts); - } - - assertEquals(0, expected.size()); - } -} diff --git a/iguana.resultprocessor/src/test/java/org/aksw/iguana/rp/storage/impl/RDFFileStorageTest.java b/iguana.resultprocessor/src/test/java/org/aksw/iguana/rp/storage/impl/RDFFileStorageTest.java deleted file mode 100644 index c44ec3e95..000000000 --- a/iguana.resultprocessor/src/test/java/org/aksw/iguana/rp/storage/impl/RDFFileStorageTest.java +++ /dev/null @@ -1,78 +0,0 @@ -/** - * - */ -package org.aksw.iguana.rp.storage.impl; - -import org.aksw.iguana.commons.constants.COMMON; -import org.aksw.iguana.rp.storage.Storage; -import org.apache.jena.rdf.model.*; -import org.apache.jena.riot.RDFLanguages; -import org.apache.jena.vocabulary.RDFS; -import org.junit.Test; - -import java.io.File; -import java.io.FileReader; -import java.io.IOException; -import java.util.List; -import java.util.Properties; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - -/** - * - * This will test the RDFFileStorage in short. - * - * - * @author l.conrads - * - */ -public class RDFFileStorageTest { - - - @Test - public void dataTest() throws IOException{ - Storage store = new RDFFileStorage("results_test2.ttl"); - - new File("results_test2.ttl").delete(); - - Model m = ModelFactory.createDefaultModel(); - m.read(new FileReader("src/test/resources/nt/results_test1.nt"), null, "N-TRIPLE"); - - store.addData(m); - store.commit(); - store.close(); - - assertEqual("results_test2.ttl","src/test/resources/nt/results_test1.nt", true); - new File("results_test2.ttl").delete(); - - } - - - /** - * Checks if two ntriple files are equal by loading them into a model and check if they have the same size - * and by removing the actual model from the expected, if the new size after removal equals 0 they are the same - * - * @param actualFile - * @param expectedFile - * @throws IOException - */ - public void assertEqual(String actualFile, String expectedFile, boolean ignoreDate) throws IOException{ - Model expected = ModelFactory.createDefaultModel(); - expected.read(new FileReader(expectedFile), null, "N-TRIPLE"); - Model actual = ModelFactory.createDefaultModel(); - actual.read(new FileReader(actualFile), null, RDFLanguages.filenameToLang(actualFile).getName()); - assertEquals(expected.size(), actual.size()); - expected.remove(actual); - if(!ignoreDate){ - //Remove startDate as they are different, just check if actual contains a start date - Property startDate =ResourceFactory.createProperty(RDFS.getURI()+"startDate"); - assertTrue(actual.contains(null, startDate, (RDFNode)null)); - List stmts = expected.listStatements(null, startDate, (RDFNode)null).toList(); - assertEquals(1, stmts.size()); - expected.remove(stmts); - } - - assertEquals(0, expected.size()); - } -} diff --git a/iguana.resultprocessor/src/test/java/org/aksw/iguana/rp/storage/impl/TriplestoreStorageTest.java b/iguana.resultprocessor/src/test/java/org/aksw/iguana/rp/storage/impl/TriplestoreStorageTest.java deleted file mode 100644 index c9251fdd9..000000000 --- a/iguana.resultprocessor/src/test/java/org/aksw/iguana/rp/storage/impl/TriplestoreStorageTest.java +++ /dev/null @@ -1,114 +0,0 @@ -/** - * - */ -package org.aksw.iguana.rp.storage.impl; - -import org.aksw.iguana.commons.constants.COMMON; -import org.aksw.iguana.rp.utils.ServerMock; -import org.apache.jena.rdf.model.Model; -import org.apache.jena.rdf.model.ModelFactory; -import org.apache.jena.rdf.model.ResourceFactory; -import org.junit.After; -import org.junit.Test; -import org.simpleframework.http.core.ContainerServer; -import org.simpleframework.transport.connect.SocketConnection; - -import java.io.IOException; -import java.net.InetSocketAddress; -import java.net.SocketAddress; -import java.util.Properties; - -import static org.junit.Assert.assertEquals; - -/** - * Will test if the TriplestoreStorage sends the correct INSERT command to a Mock Server - * - * @author f.conrads - * - */ -public class TriplestoreStorageTest { - - private static final int FAST_SERVER_PORT = 8023; - private ServerMock fastServerContainer; - private ContainerServer fastServer; - private SocketConnection fastConnection; - - private String metaExp = "INSERT DATA {\n" + - " .\n" + - " .\n" + - " .\n" + - " \"dbpedia\" .\n" + - " .\n" + - " \"virtuoso\" .\n" + - " .\n" + - " \"???\"^^ .\n" + - " .\n" + - " .\n" + - " .\n" + - " .\n" + - " .\n" + - "}"; - - private String dataExp = "INSERT DATA {\n"+ -" \"c\" .\n"+ -"}"; - - /** - * @throws IOException - */ - @Test - public void metaTest() throws IOException{ - fastServerContainer = new ServerMock(); - fastServer = new ContainerServer(fastServerContainer); - fastConnection = new SocketConnection(fastServer); - SocketAddress address1 = new InetSocketAddress(FAST_SERVER_PORT); - fastConnection.connect(address1); - - String host = "http://localhost:8023"; - TriplestoreStorage store = new TriplestoreStorage(host, host); - Properties p = new Properties(); - p.put(COMMON.EXPERIMENT_TASK_ID_KEY, "1/1/1"); - p.setProperty(COMMON.EXPERIMENT_ID_KEY, "1/1"); - p.setProperty(COMMON.CONNECTION_ID_KEY, "virtuoso"); - p.setProperty(COMMON.SUITE_ID_KEY, "1"); - p.setProperty(COMMON.DATASET_ID_KEY, "dbpedia"); - p.put(COMMON.EXPERIMENT_TASK_CLASS_ID_KEY, "ClassName"); - p.put(COMMON.RECEIVE_DATA_START_KEY, "true"); - p.put(COMMON.EXTRA_META_KEY, new Properties()); - p.put(COMMON.NO_OF_QUERIES, 2); - store.addMetaData(p); - store.commit(); - assertEquals(metaExp.trim(), fastServerContainer.getActualContent().trim().replaceAll("[0-9][0-9][0-9][0-9]\\-[0-9][0-9]\\-[0-9][0-9]T[0-9][0-9]\\:[0-9][0-9]\\:[0-9][0-9]\\.[0-9]+Z", "???"));//2020-09-21T22:06:45.109Z - } - - /** - * @throws IOException - */ - @After - public void close() throws IOException { - fastConnection.close(); - } - - - /** - * @throws IOException - */ - @Test - public void dataTest() throws IOException{ - fastServerContainer = new ServerMock(); - fastServer = new ContainerServer(fastServerContainer); - fastConnection = new SocketConnection(fastServer); - SocketAddress address1 = new InetSocketAddress(FAST_SERVER_PORT); - fastConnection.connect(address1); - - String host = "http://localhost:8023"; - TriplestoreStorage store = new TriplestoreStorage(host, host); - - Model m = ModelFactory.createDefaultModel(); - m.add(ResourceFactory.createResource(COMMON.RES_BASE_URI+"a"), ResourceFactory.createProperty(COMMON.PROP_BASE_URI+"b") , "c"); - store.addData(m); - store.commit(); - assertEquals(dataExp.trim(),fastServerContainer.getActualContent().trim()); - } - -} diff --git a/iguana.resultprocessor/src/test/java/org/aksw/iguana/rp/utils/EqualityStorage.java b/iguana.resultprocessor/src/test/java/org/aksw/iguana/rp/utils/EqualityStorage.java deleted file mode 100644 index 46b35b69d..000000000 --- a/iguana.resultprocessor/src/test/java/org/aksw/iguana/rp/utils/EqualityStorage.java +++ /dev/null @@ -1,68 +0,0 @@ -/** - * - */ -package org.aksw.iguana.rp.utils; - -import org.aksw.iguana.rp.storage.Storage; -import org.apache.jena.rdf.model.Model; -import org.apache.jena.rdf.model.ModelFactory; - -import java.util.Properties; - -/** - * Class to help the Unit Metric Tests.
- * - * Will be initialized with an Array of Triple[]. - * It will be checked if the first received Data is equal to the first Array Object - * the second recv Data will be checked against the second Object and so on. - * - * @author f.conrads - * - */ -public class EqualityStorage implements Storage{ - - - private Model expectedModel; - private Model actualModel = ModelFactory.createDefaultModel(); - - - - public EqualityStorage( Model expectedModel) { - this.expectedModel = expectedModel; - } - - - - @Override - public void addData(Model data) { - this.actualModel.add(data); - } - - public Model getExpectedModel(){ - return this.expectedModel; - } - - public Model getActualModel(){ - return this.actualModel; - } - - - // NOTHING TO DO IN THE FOLLOWING METHODS - @Override - public void addMetaData(Properties p) { - //explicity empty - } - - @Override - public void commit() { - //explicity empty - } - - - @Override - public void endTask(String taskID) { - // TODO Auto-generated method stub - - } - -} diff --git a/iguana.resultprocessor/src/test/java/org/aksw/iguana/rp/utils/ServerMock.java b/iguana.resultprocessor/src/test/java/org/aksw/iguana/rp/utils/ServerMock.java deleted file mode 100644 index 1db72d07d..000000000 --- a/iguana.resultprocessor/src/test/java/org/aksw/iguana/rp/utils/ServerMock.java +++ /dev/null @@ -1,55 +0,0 @@ -package org.aksw.iguana.rp.utils; - -import org.simpleframework.http.Request; -import org.simpleframework.http.Response; -import org.simpleframework.http.Status; -import org.simpleframework.http.core.Container; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.IOException; - -/** - * Server Mock representing a TS - * - * @author f.conrads - * - */ -public class ServerMock implements Container { - - private static final Logger LOGGER = LoggerFactory.getLogger(ServerMock.class); - private String actualContent; - - - @Override - public void handle(Request request, Response resp) { - String content=null; - try { - content = request.getContent(); - } catch (IOException e) { - LOGGER.error("Got exception.", e); - } - this.actualContent=content; - resp.setCode(Status.OK.code); - try { - resp.getOutputStream().close(); - } catch (IOException e) { - LOGGER.error("Could not close Response Output Stream"); - } - } - - /** - * @return the actualContent - */ - public String getActualContent() { - return actualContent; - } - - /** - * @param actualContent the actualContent to set - */ - public void setActualContent(String actualContent) { - this.actualContent = actualContent; - } - -} diff --git a/images/iguana3-logo.png b/images/iguana3-logo.png deleted file mode 100644 index 988f32f4c..000000000 Binary files a/images/iguana3-logo.png and /dev/null differ diff --git a/customs/images/logo_white.png b/images/logo_white.png similarity index 100% rename from customs/images/logo_white.png rename to images/logo_white.png diff --git a/mkdocs.yml b/mkdocs.yml index ac6a5ee09..85d6812be 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -1,24 +1,20 @@ site_name: Iguana $VERSION Documentation -#repo_url: https://github.com/dice-group/Iguana/ - -#google_analytics: ['UA-179945559-1', 'iguana-benchmark.eu'] extra: - version: $VERSION + version: $RELEASE_VERSION release_version: $RELEASE_VERSION social: - icon: fontawesome/brands/github - link: https://github.com/dice-group/Iguana + link: https://github.com/dice-group/IGUANA -repo_url: https://github.com/dice-group/Iguana -repo_name: dice-group/Iguana +repo_url: https://github.com/dice-group/IGUANA +repo_name: dice-group/IGUANA edit_uri: "" - theme: #name: 'readthedocs' - custom_dir: 'customs/' + custom_dir: 'images/' name: material features: - navigation.tabs @@ -32,11 +28,11 @@ theme: font: text: Roboto code: Roboto Mono - logo: images/logo_white.png - favicon: images/Iguana_new_logo6.png + logo: logo_white.png + favicon: IGUANA_logo.png icon: logo: logo - repo: fontawesome/brands/git-alt + repo: fontawesome/brands/github palette: - media: "(prefers-color-scheme: light)" scheme: default @@ -62,33 +58,17 @@ color: "#82BDC3" nav: - "General": - - "About": "about.md" - - "Architecture": "architecture.md" - - - "Quick Start Guide": - - "Download": "download.md" - - "Quick Configuration": "quick-config.md" - - "Run Iguana": "run-iguana.md" + - "About": "README.md" - - "Usage": - - "Workflow": "usage/workflow.md" - - "Configuration": "usage/configuration.md" - - "Stresstest" : "usage/stresstest.md" - - "Supported Workers": "usage/workers.md" - - "Supported Queries": "usage/queries.md" - - "Supported Languages": "usage/languages.md" - - "Metrics": "usage/metrics.md" - - "Benchmark Results": "usage/results.md" - - "Tutorial": "usage/tutorial.md" - - "Development": - - "Overview": "develop/overview.md" - - "Extend": - - "Tasks": "develop/extend-task.md" - - "Workers": "develop/extend-workers.md" - - "Query Handling": "develop/extend-queryhandling.md" - - "Languages": "develop/extend-lang.md" - - "Result storage": "develop/extend-result-storages.md" - - "Metrics": "develop/extend-metrics.md" - - "Use Iguana": - - "Maven": "develop/maven.md" - - "Javadoc": "http://iguana-benchmark.eu/javadoc/$VERSION/index.html" + - "Configuration": + - "Overview": "configuration/overview.md" + - "Tasks": "configuration/tasks.md" + - "Workers": "configuration/workers.md" + - "Queries": "configuration/queries.md" + - "Response Body Processing": "configuration/response_body_processor.md" + - "Language Processor": "configuration/language_processor.md" + - "Result Storage": "configuration/storages.md" + - "Metrics": "configuration/metrics.md" + - "RDF Results": "configuration/rdf_results.md" + - "Javadocs": + - "Javadocs": "http://iguana-benchmark.eu/javadoc/latest/" diff --git a/pom.xml b/pom.xml index 34cf99d60..0d15dbecd 100644 --- a/pom.xml +++ b/pom.xml @@ -1,9 +1,10 @@ - - 4.0.0 - org.aksw - iguana-parent + + 4.0.0 + org.aksw + iguana ${revision} - Iguana Parent + Iguana AGPLv3 or later @@ -30,19 +31,24 @@ https://github.com/dice-group/iguana/issues https://dice-research.org/IGUANA - pom - - iguana.commons - iguana.resultprocessor - iguana.corecontroller - + + ${major.minor.version}.${build.version} ${major.version}.${minor.version} - 3 - 3 - 3 + 4 + 0 + 0 + + 17 + 4.2.0 + UTF-8 + 17 + 17 + + 2.19.0 + github @@ -50,8 +56,142 @@ https://maven.pkg.github.com/dice-group/iguana + + + + org.apache.jena + jena-arq + ${jena.version} + + + org.apache.jena + jena-core + ${jena.version} + + + org.apache.jena + jena-querybuilder + ${jena.version} + + + ch.qos.logback + logback-classic + 1.4.14 + + + com.fasterxml.jackson.dataformat + jackson-dataformat-yaml + 2.12.5 + + + com.fasterxml.jackson.datatype + jackson-datatype-jsr310 + 2.12.5 + + + com.networknt + json-schema-validator + 1.0.78 + + + com.googlecode.json-simple + json-simple + 1.1.1 + + + org.junit.jupiter + junit-jupiter + 5.9.2 + test + + + com.opencsv + opencsv + 5.7.1 + + + org.lz4 + lz4-pure-java + 1.8.0 + + + org.apache.hbase + hbase-common + 2.5.5 + + + com.beust + jcommander + 1.82 + + + com.github.tomakehurst + wiremock-jre8-standalone + 2.35.0 + test + + + org.apache.httpcomponents.client5 + httpclient5 + 5.3 + + + + + org.apache.maven.plugins + maven-compiler-plugin + 3.8.1 + + 17 + 17 + UTF-8 + + -parameters + + + **/log4j2.yml + + + + + + org.apache.maven.plugins + maven-surefire-plugin + 3.1.2 + + -Xmx16384M + + + + + + org.apache.maven.plugins + maven-shade-plugin + 3.4.1 + + false + iguana + + + + package + + shade + + + + + org.aksw.iguana.cc.controller.MainController + + + + + + + org.apache.maven.plugins maven-javadoc-plugin @@ -62,19 +202,19 @@ + maven-resources-plugin 3.1.0 copy-resources - package copy-resources - ${project.basedir}/target + ${project.basedir}/target/ ${project.basedir}/src/main/resources/ @@ -83,12 +223,6 @@ true - - ${project.basedir}/iguana.corecontroller/target/ - - iguana-${revision}-shaded.jar - - @@ -96,4 +230,100 @@ + + + + native + + + + org.junit.platform + junit-platform-launcher + 1.9.2 + test + + + + + + org.codehaus.mojo + exec-maven-plugin + 1.6.0 + + + run-script + generate-resources + + exec + + + ${project.basedir}/graalvm/generate-config.sh + + -t + ${project.build.directory} + + + + + cleanup-files + test + + exec + + + bash + + -c + if [ -f ${project.build.directory}/native/agent-output/test/*/resource-config.json ]; then sed "s/\\\\\\\\E//g" ${project.build.directory}/native/agent-output/test/*/resource-config.json | sed "s/\\\\\\\\Q//g" > ${project.build.directory}/resource-config.json.tmp && cp ${project.build.directory}/resource-config.json.tmp ${project.build.directory}/native/agent-output/test/*/resource-config.json; fi + + + + + + + org.apache.maven.plugins + maven-surefire-plugin + 3.1.2 + + + org.graalvm.buildtools + native-maven-plugin + 0.10.1 + true + + + build-native + + compile-no-fork + + package + + + test-native + + test + + test + + + + iguana + + --gc=G1 + -march=x86-64-v3 + --no-fallback + -O3 + -H:-UseCompressedReferences + -H:+UnlockExperimentalVMOptions + + + true + + + + + + + + diff --git a/schema/iguana-schema.json b/schema/iguana-schema.json index 842e4b363..e0a821c39 100644 --- a/schema/iguana-schema.json +++ b/schema/iguana-schema.json @@ -1,409 +1,391 @@ { - "$schema": "http://json-schema.org/draft-07/schema#", - + "$schema": "http://json-schema.org/draft-06/schema#", + "$ref": "#/definitions/root", "definitions": { - "connection": { + "root": { + "title": "root", "type": "object", + "additionalProperties": false, "properties": { - "endpoint": { "type": "string" }, - "updateEndpoint": { "type": "string" }, - "user": { "type": "string" }, - "password": { "type": "string" }, - "version": { - "type": "string" - } - }, - "required": ["endpoint"] - }, - "warmup" : { - "type": "object", - "properties": { - "timeLimit": { - "type": "integer" + "datasets": { + "type": "array", + "items": { + "$ref": "#/definitions/Dataset" + }, + "minItems": 1 }, - "queryHandler": { - "$ref": "#/definitions/genericClassObject" + "connections": { + "type": "array", + "items": { + "$ref": "#/definitions/Connection" + }, + "minItems": 1 }, - "workers": { + "tasks": { "type": "array", "items": { - "oneOf": [ - { - "$ref": "#/definitions/AbstractWorker" - } - ] + "$ref": "#/definitions/Task" + }, + "minItems": 1 + }, + "storages": { + "type": "array", + "items": { + "$ref": "#/definitions/Storage" + }, + "minItems": 1 + }, + "responseBodyProcessors": { + "type": "array", + "items": { + "$ref": "#/definitions/ResponseBodyProcessor" } - } - }, - "required": ["workers","timeLimit"] - }, - "stresstest": { - "type": "object", - "properties": { - "timeLimit": { "type": "integer" }, - "noOfQueryMixes": {"type": "integer"}, - "queryHandler": {"$ref" : "#/definitions/genericClassObject" }, - "warmup" : {"$ref" : "#/definitions/warmup"}, - "workers": { + }, + "metrics": { "type": "array", "items": { - "oneOf": [ - { - "$ref": "#/definitions/AbstractWorker" - } - ] + "$ref": "#/definitions/Metric" } } }, - "required": ["queryHandler", "workers"] + "required": [ + "connections", + "datasets", + "storages", + "tasks" + ] }, - "AbstractWorker": { + + "Connection": { "type": "object", + "additionalProperties": false, "properties": { - "className": { + "name": { + "type": "string" + }, + "version": { + "type": "string" + }, + "endpoint": { + "type": "string", + "format": "uri" + }, + "updateEndpoint": { + "type": "string", + "format": "uri" + }, + "authentication": { + "$ref": "#/definitions/Authentication" + }, + "updateAuthentication": { + "$ref": "#/definitions/Authentication" + }, + "dataset": { "type": "string" } - }, - "allOf": [{ - "if": { - "properties": { - "className" : { - "oneOf": [ {"const": "SPARQLWorker"},{"const": "org.aksw.iguana.cc.worker.impl.SPARQLWorker"}] - } + "required": [ + "endpoint", + "name" + ], + "title": "Connection" + }, + "Authentication": { + "type": "object", + "additionalProperties": false, + "properties": { + "user": { + "type": "string" + }, + "password": { + "type": "string" } }, - "then": - { - "additionalProperties": {"type": "undefined"}, - - "required": [ - "className", - "threads", - "queriesFile" - ], - "properties": { - "className": { - "type": "string" - }, - "threads": { - "type": "integer" - }, - "queriesFile": { - "type": "string" - }, - "timeOut": { - "type": "integer" - }, - "fixedLatency": { - "type": "integer" - }, - "gaussianLatency": { - "type": "integer" - }, - "responseType": { - "type": "string" - }, - "parameterName": { - "type": "string" - } - } + "required": [ + "password", + "user" + ], + "title": "Authentication" + }, + "Dataset": { + "type": "object", + "additionalProperties": false, + "properties": { + "name": { + "type": "string" + }, + "file": { + "type": "string" } - }, - { - "if": { - "properties": { - "className" : { - "oneOf": [{"const": "UPDATEWorker"},{"const": "org.aksw.iguana.cc.worker.impl.UPDATEWorker"}] - } + "required": [ + "name" + ], + "title": "Dataset" + }, + "Metric": { + "type": "object", + "additionalProperties": false, + "properties": { + "type": { + "type": "string", + "enum": [ "AES", "AvgQPS", "EachQuery", "NoQ", "NoQPH", "PAvgQPS", "PQPS", "QMPH", "QPS" ] + }, + "penalty": { + "type": "integer", + "minimum": 0 } }, - "then": - {"required": ["className", "threads", "queriesFile"], - "properties": { - "className": { - "type": "string" - }, - "threads" : {"type": "integer"}, - "queriesFile" : {"type": "string"}, - "timeOut" : {"type": "integer"}, - "fixedLatency" : {"type": "integer"}, - "gaussianLatency" : {"type": "integer"}, - "timerStrategy" : {"type": "string"} - }, - "additionalProperties": {"type": "undefined"} - } - - }, - {"if": {"properties": { - "className" : { - "oneOf": [{"const": "MultipleCLIInputWorker"}, {"const": "org.aksw.iguana.cc.worker.impl.MultipleCLIInputWorker"}] - } - }}, - "then": - {"required": ["className", "threads", "queriesFile", "queryError", "queryFinished", "initFinished"], - "properties": { - "className": { - "type": "string" - }, - "threads" : {"type": "integer"}, - "queriesFile" : {"type": "string"}, - "timeOut" : {"type": "integer"}, - "fixedLatency" : {"type": "integer"}, - "gaussianLatency" : {"type": "integer"}, - "queryError" : {"type": "string"}, - "queryFinished" : {"type": "string"}, - "initFinished" : {"type": "string"}, - "numberOfProcesses" : {"type": "integer"} - }, "additionalProperties": {"type": "undefined"} - } - }, - { - "if": { - "properties": { - "className" : { - "oneOf": [{"const": "CLIInputWorker"}, {"const": "org.aksw.iguana.cc.worker.impl.CLIInputWorker"}] - } + "required": [ + "type" + ], + "title": "Metric" + }, + "ResponseBodyProcessor": { + "type": "object", + "additionalProperties": false, + "properties": { + "contentType": { + "type": "string" + }, + "threads": { + "type": "integer", + "minimum": 1 + }, + "timeout" : { + "type": "string" } }, - "then": - {"required": ["className", "threads", "queriesFile", "queryError", "queryFinished", "initFinished"], - "properties": { - "className": { - "type": "string" - }, - "threads" : {"type": "integer"}, - "queriesFile" : {"type": "string"}, - "timeOut" : {"type": "integer"}, - "fixedLatency" : {"type": "integer"}, - "gaussianLatency" : {"type": "integer"}, - "queryError" : {"type": "string"}, - "queryFinished" : {"type": "string"}, - "initFinished" : {"type": "string"} - }, "additionalProperties": {"type": "undefined"} - } + "required": [ + "contentType" + ], + "title": "ResponseBodyProcessor" + }, + "Storage": { + "type": "object", + "oneOf": [ + { "$ref": "#/definitions/CSVStorage" }, + { "$ref": "#/definitions/RDFFileStorage" }, + { "$ref": "#/definitions/TriplestoreStorage" } + ], + "title": "Storage" + }, + "CSVStorage": { + "type": "object", + "unevaluatedProperties": false, + "properties": { + "type": { + "type": "string", + "const": "csv file" + }, + "directory": { + "type": "string" + } }, - { - "if": { - "properties": { - "className" : { - "oneOf": [{"const": "CLIPrefixWorker"}, {"const": "org.aksw.iguana.cc.worker.impl.CLIPrefixWorker"}] - } + "required": [ + "type", + "directory" + ], + "title": "CSVStorage" + }, + "RDFFileStorage": { + "type": "object", + "unevaluatedProperties": false, + "properties": { + "type": { + "type": "string", + "const": "rdf file" + }, + "path": { + "type": "string" } }, - "then": { - "required": [ - "className", - "threads", - "queriesFile", - "queryError", - "queryFinished", - "initFinished", - "queryPrefix", - "querySuffix" - ], - "properties": { - "className": { - "type": "string" - }, - "threads": { - "type": "integer" - }, - "queriesFile": { - "type": "string" - }, - "timeOut": { - "type": "integer" - }, - "fixedLatency": { - "type": "integer" - }, - "gaussianLatency": { - "type": "integer" - }, - "numberOfProcesses": { - "type": "integer" - }, - "queryError": { - "type": "string" - }, - "queryFinished": { - "type": "string" - }, - "initFinished": { - "type": "string" - }, - "querySuffix": { - "type": "string" - }, - "queryPrefix": { - "type": "string" - } + "required": [ + "type", + "path" + ], + "title": "RDFFileStorage" + }, + "TriplestoreStorage": { + "type": "object", + "unevaluatedProperties": false, + "properties": { + "type": { + "type": "string", + "const": "triplestore" }, - "additionalProperties": {"type": "undefined"} - } - + "endpoint": { + "type": "string", + "format": "uri" + }, + "user": { + "type": "string" + }, + "password": { + "type": "string" + }, + "baseUri": { + "type": "string", + "format": "uri" + } }, - {"if": { - "properties": { - "className" : { - "oneOf": [{"const": "MultipleCLIInputFileWorker"}, {"const": "org.aksw.iguana.cc.worker.impl.MultipleCLIInputFileWorker"}] - } - } + "required": [ + "type", + "endpoint" + ], + "title": "TriplestoreStorage" + }, + "Task": { + "type": "object", + "oneOf": [ { "$ref": "#/definitions/Stresstest" } ], + "title": "Task" + }, + "Stresstest": { + "type": "object", + "unevaluatedProperties": false, + "properties": { + "type": { + "type": "string", + "const": "stresstest" }, - "then": { - "required": [ - "className", - "threads", - "queriesFile", - "directory", - "queryError", - "queryFinished", - "initFinished" - ], - "properties": { - "className": { - "type": "string" - }, - "threads": { - "type": "integer" - }, - "queriesFile": { - "type": "string" - }, - "timeOut": { - "type": "integer" - }, - "fixedLatency": { - "type": "integer" - }, - "gaussianLatency": { - "type": "integer" - }, - "queryError": { - "type": "string" - }, - "queryFinished": { - "type": "string" - }, - "initFinished": { - "type": "string" - }, - "directory": { - "type": "string" - }, - "numberOfProcesses": { - "type": "integer" - } - }, - "additionalProperties": {"type": "undefined"} + "warmupWorkers": { + "type": "array", + "items": { + "$ref": "#/definitions/Worker" } }, - { - "if": { - "properties": { - "className": { - "oneOf": [{"const": "CLIInputFileWorker"}, {"const": "org.aksw.iguana.cc.worker.impl.CLIInputFileWorker"}] - } - } + "workers": { + "type": "array", + "items": { + "$ref": "#/definitions/Worker" }, - "then": { - "allOf": [{ - "required": [ - "className", - "threads", - "queriesFile", - "directory", - "queryError", - "queryFinished", - "initFinished" - ]}, - {"properties": { - "className": { - "type": "string" - }, - "threads" : {"type": "integer"}, - "queriesFile" : {"type": "string"}, - "timeOut" : {"type": "integer"}, - "fixedLatency" : {"type": "integer"}, - "gaussianLatency" : {"type": "integer"}, - "queryError" : {"type": "string"}, - "queryFinished" : {"type": "string"}, - "initFinished" : {"type": "string"}, - "directory" : {"type" : "string"} - }, "additionalProperties": {"type": "undefined"} - }] - } + "minItems": 1 } - ] + }, + "required": [ + "type", + "workers" + ], + "title": "Stresstest" + }, + "Worker": { + "type": "object", + "oneOf": [ { "$ref": "#/definitions/SPARQLWorker" } ], + "title": "Worker" }, - "task": { + "SPARQLWorker" : { "type": "object", + "unevaluatedProperties": false, "properties": { - "className": { "type": "string" }, - "configuration": { - "oneOf": [{"$ref": "#/definitions/stresstest"}] + "type": { + "type": "string", + "const": "SPARQLProtocolWorker" + }, + "number": { + "type": "integer", + "minimum": 1 + }, + "requestType": { + "type": "string", + "enum": [ "post query", "get query", "post url-enc query", "post url-enc update", "post update" ] + }, + "queries": { + "$ref": "#/definitions/Queries" + }, + "timeout": { + "type": "string" + }, + "connection": { + "type": "string" + }, + "completionTarget": { + "$ref": "#/definitions/CompletionTarget" + }, + "parseResults": { + "type": "boolean" + }, + "acceptHeader": { + "type": "string" } }, - "required": ["className", "configuration"] + "required": [ + "type", + "completionTarget", + "connection", + "queries", + "timeout" + ], + "title": "SPARQLWorker" }, - "genericClassObject": { + "CompletionTarget": { "type": "object", + "oneOf": [ + { "$ref": "#/definitions/TimeLimit" }, + { "$ref": "#/definitions/QueryMixes" } + ], + "title": "CompletionTarget" + }, + "TimeLimit": { "properties": { - "className": { "type": "string" }, - "configuration": { - "type": "object" + "duration": { + "type": "string" } }, - "required": ["className"] - - } - - }, - - "type": "object", - - "properties": { - "connections": { - "type": "array", - "items": { - "$ref": "#/definitions/connection" - } - }, - "datasets": { - "type": "array", - "items": { - "type": "object", - "properties": { - "name" : {"type": "string"} - }, - "required": ["name"] - } - }, - "tasks": { - "type": "array", - "items": { - "$ref":"#/definitions/task" - } - }, - "preScriptHook": { - "type": "string" - }, - "postScriptHook": { - "type": "string" + "title": "TimeLimit", + "type": "object", + "unevaluatedProperties": false, + "required": [ + "duration" + ] }, - "metrics": { - "type": "array", - "items": { - "$ref": "#/definitions/genericClassObject" - } + "QueryMixes": { + "properties": { + "number": { + "type": "integer", + "minimum": 1 + } + }, + "title": "QueryMixes", + "type": "object", + "unevaluatedProperties": false, + "required": [ + "number" + ] }, - "storages": { - "type": "array", - "items": { - "$ref": "#/definitions/genericClassObject" - } + "Queries": { + "type": "object", + "additionalProperties": false, + "properties": { + "path": { + "type": "string" + }, + "format": { + "type": "string", + "enum": [ "one-per-line", "separator", "folder" ] + }, + "separator": { + "type": "string" + }, + "caching": { + "type": "boolean" + }, + "order": { + "type": "string", + "enum": [ "random", "linear" ] + }, + "seed": { + "type": "integer" + }, + "lang": { + "type": "string", + "enum": [ "", "SPARQL" ] + } + }, + "required": [ + "path" + + ], + "title": "Queries" } } } diff --git a/schema/iguana.owl b/schema/iguana.owl deleted file mode 100644 index ef83ff1a7..000000000 --- a/schema/iguana.owl +++ /dev/null @@ -1,435 +0,0 @@ - - - - - - - - - - - - -]> - - - - - Iguana results ontology - 3.3.2 - 2020/09/18 - 2022/09/23 - Iguana results ontology - The Iguana results ontology explains the rdf results of an Iguana benchmark. - - - - - - - Experiment - An experiment is a collection of Connections executed against one dataset. - - - - Suite - A suite is a collection of Experiments. - - - - Worker - A worker is one thread executing a set of queries against a Connection, thus simulating one user. - - - - An ExecutedQuery is a query which was executed one or more times against a Connection using either one Worker or the aggregation of several ExecutedQueries which is assigned to a Task. It provides several Metric results. The ExecutedQuery is assigned to a worker. - ExecutedQuery - - - - A Query is the query string of a given query (most likely a sparql query) together with a collection of statistics. The query is Suite independent. - Query - - - - Metric - A Metric is the abstract Class providing a result metric. - - - - Task - A Task is an abstract Class providing results for one Connection using one Dataset. - - - - Stresstest - The Stresstest is the Task which executes a stresstest. - - - - - Connection - A Connection is a connection used in a Task, basically providing just a label and ID. - - - Dataset - A Dataset is a dataset used in a Task, basically providing just a label and ID. - - - QPS Metric - Queries Per Second Metric. Annotates a Task or Worker if they use this metric. - - - - - - QMPH Metric - Query Mixes Per Hour. Annotates a Task or Worker if they use this metric. - - - - - - NoQPH Metric - Number of Queries Per Hour. Annotates a Task or Worker if they use this metric. - - - - - - Average QPS Metric - Average Queries Per Second Metric. Annotates a Task or Worker if they use this metric. - - - - - NoQ Metric - Number of Queries successfully executed Metric. Annotates a Task or Worker if they use this metric. - - - - - - - - connection - Assigns a Connection to a Task. - - - - - - dataset - Assigns a Dataset to a Task. - - - - - - experiment - Assigns an Experiment to a Suite. - - - - - - task - Assigns a Task to an Experiment. - - - - - - - workerResult - Assigns a Worker to an Task. (mostly a Stresstest) - - - - - - - metric - Annotates a Task or Worker with a Metric. The Metric itself is provided using the Property, this just annotates the task/worker to provide these results. - - - - - - - query - Assigns an ExecutedQuery to a Worker or Task. The ExecutedQuery provides further metrics for example. - - - - - - - queryID - Assigns a Query and its statistics, as well as the query string to an ExecutedQuery. - - - - - - - - version - Version of the triple store tested. - - - - - - timeLimit - Time Limit after the Stresstest ends in milliseconds. - - - - - - - noOfQueryMixes - The number of query mixes executed after the Stresstest ends. - - - - - - noOfWorkers - Number of total Workers the Stresstest simulated. - - - - - - startDate - The date and time the Task was started. - - - - - - endDate - The date and time the Task was ended. - - - - - - workerID - The worked ID assigned to the worker - - - - - - workerType - The worker class name. - - - - - - noOfQueries - The number of Queries in the benchmark query set assigned to the worker. - - - - - - timeOutMS - The timeout in ms set to this worker. - - - - - - - optional - Tells if the the query contains an OPTIONAL element - - - - - - union - Tells if the the query contains a UNION element - - - - - - orderBy - Tells if the the query contains an ORDER BY element - - - - - - offset - Tells if the the query contains an OFFSET element - - - - - - triples - The number of triples in a Query. - - - - - - optional - Tells if the the query contains a HAVING element - - - - - - filter - Tells if the the query contains a FILTER element - - - - - - aggregations - Tells if the the query contains an AGGREGATION element - - - - - - groupBy - Tells if the the query contains a GROUP BY element - - - - - - ID - The query ID. - - - - - - totalTime - The summed up execution time of all executions of the ExecutedQuery in milliseconds. - - - - - - QPS - The queries per second value. - - - - - - penalizedQPS - The queries per second value where failed queries are rated using a penalty (default is the timeOut of a Task). - - - - - - failed - The number of failed executions of the ExecutedQuery. - - - - - - succeeded - The number of succeeded executions of the ExecutedQuery. - - - - - - unknownException - The number of failed executions of the ExecutedQuery whereas the Reason was unknown. - - - - - - resultSize - The result size of a ExecutedQuery. - - - - - - wrongCodes - The number of failed executions of the ExecutedQuery whereas the Reason was a wrong result code (e.g 400) - - - - - - timeOuts - The number of failed executions of the ExecutedQuery whereas the Reason was a time out - - - - - - QMPH - The query mixes per hour value - - - - - - - NoQPH - The number of queries per hour value. - - - - - - - AvgQPS - The average number of queries answered successfully per second value. - - - - - - - penalizedAvgQPS - The average number of queries answered successfully per second value using the penaltyQPS. - - - - - - - - NoQ - The number of successfully executed queries value - - - - - - - diff --git a/schema/iguana.owx b/schema/iguana.owx new file mode 100644 index 000000000..9ae016f28 --- /dev/null +++ b/schema/iguana.owx @@ -0,0 +1,1393 @@ + + + + + + + + + + + + + http://purl.org/dc/elements/1.1/ + http://purl.org/dc/terms/ + http://www.w3.org/1999/02/22-rdf-syntax-ns# + http://www.w3.org/2000/01/rdf-schema# + http://www.w3.org/2002/07/owl + + + Iguana results ontology + + + + 2020/09/18 + + + + 2024/03/20 + + + + http://creativecommons.org/licenses/by/3.0/ + + + + The Iguana results ontology explains the rdf results of an Iguana benchmark. + + + + Iguana results ontology + + + + 4.0.0 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + iont:Connection + A Connection represents a benchmarked endpoint. + + + + iont:Connection + Connection + + + + iont:Dataset + A Dataset represents the dataset used for a benchmarked endpoint. + + + + iont:Dataset + Dataset + + + + iont:ExecutedQuery + An ExecutedQuery is a query which was executed one or more times against a Connection using either one Worker or the aggregation of several ExecutedQueries which are assigned to a Task. It provides several Metric results. The ExecutedQuery is assigned to a worker or a Task. + + + + iont:ExecutedQuery + ExecutedQuery + + + + iont:Metric + A Metric is the abstract class providing a result metric. + + + + iont:Metric + Metric + + + + iont:Query + A Query is the query string of a given query (most likely a sparql query) together with a collection of statistics. The query is Suite independent. + + + + iont:Query + Query + + + + iont:QueryExecution + A QueryExecution is the single execution of a query against a Connection. + + + + iont:QueryExecution + QueryExecution + + + + iont:ResponseBody + The ResponseBody represents the response body of an executed query. + + + + iont:ResponseBody + ResponseBody + + + + iont:Stresstest + The Stresstest Task benchmarks a system by stresstesting it. + + + + iont:Stresstest + Stresstest + + + + iont:Suite + A suite is a collection of benchmarks. + + + + iont:Suite + Suite + + + + iont:Task + Abstract class for various tasks. + + + + iont:Task + Task + + + + iont:Worker + A Worker is a thread that executes a set of queries against a Connection. It simulates a user. + + + + iont:Worker + Worker + + + + http://iguana-benchmark.eu/class/metric/AES + Aggregated Execution Statistics. + + + + http://iguana-benchmark.eu/class/metric/AES + AES Metric + + + + http://iguana-benchmark.eu/class/metric/AvgQPS + Average Queries Per Second. + + + + http://iguana-benchmark.eu/class/metric/AvgQPS + AvgQPS Metric + + + + http://iguana-benchmark.eu/class/metric/EachQuery + Each query execution statistics. + + + + http://iguana-benchmark.eu/class/metric/EachQuery + EachQuery Metric + + + + http://iguana-benchmark.eu/class/metric/NoQ + Number of successfully executed Queries. + + + + http://iguana-benchmark.eu/class/metric/NoQ + NoQ Metric + + + + http://iguana-benchmark.eu/class/metric/NoQPH + Number of Queries Per Hour. + + + + http://iguana-benchmark.eu/class/metric/NoQPH + NoQPH Metric + + + + http://iguana-benchmark.eu/class/metric/PAvgQPS + Penalized Average Queries Per Second. + + + + http://iguana-benchmark.eu/class/metric/PAvgQPS + PAvgQPS Metric + + + + http://iguana-benchmark.eu/class/metric/PQPS + Penalized Queries Per Second. + + + + http://iguana-benchmark.eu/class/metric/PQPS + PQPS Metric + + + + http://iguana-benchmark.eu/class/metric/QMPH + Query Mixes Per Hour. + + + + http://iguana-benchmark.eu/class/metric/QMPH + QMPH Metric + + + + http://iguana-benchmark.eu/class/metric/QPS + Queries Per Second Metric. + + + + http://iguana-benchmark.eu/class/metric/QPS + QPS Metric + + + + iprop:AvgQPS + The average number of queries answered successfully per second value. + + + + iprop:AvgQPS + AvgQPS + + + + iprop:ID + ID + + + + iprop:ID + The numeric query ID. + + + + iprop:NoQ + The number of successfully executed queries value + + + + iprop:NoQ + NoQ + + + + iprop:NoQPH + The number of queries per hour value. + + + + iprop:NoQPH + NoQPH + + + + iprop:QMPH + The query mixes per hour value + + + + iprop:QMPH + QMPH + + + + iprop:QPS + The queries per second value. + + + + iprop:QPS + QPS + + + + iprop:bindings + The number of bindings the query received. + + + + iprop:bindings + bindings + + + + iprop:code + The result code of the execution of a query. + + + + iprop:code + code + + + + iprop:connection + Assigns a Connection to a Worker. + + + + iprop:connection + connection + + + + iprop:dataset + Assigns a Dataset to a Connection. + + + + iprop:dataset + dataset + + + + iprop:endDate + The date and time at which the Task or Worker ended. + + + + iprop:endDate + endDate + + + + iprop:exception + The exception, if any occurred, during the execution of the query or the processing of its response body. + + + + iprop:exception + exception + + + + iprop:executionTook + The time duration of the execution of a query. + + + + iprop:executionTook + executionTook + + + + iprop:failed + The number of failed executions of the ExecutedQuery. + + + + iprop:failed + failed + + + + iprop:fullID + The full query ID consists of the hashcode of its query handler and the query's id inside of the query handler in this format: <queryhandler_hashg;:<id>. + + + + iprop:fullID + fullID + + + + iprop:httpCode + The http response code of the query execution. + + + + iprop:httpCode + httpCode + + + + iprop:metric + Annotates a Task, Worker or ExecutedQuery with a Metric. The Metric itself is provided using the Property, this just annotates the subject to provide these results. + + + + iprop:metric + metric + + + + iprop:noOfQueries + The number of queries assigned to the worker. + + + + iprop:noOfQueries + noOfQueries + + + + iprop:noOfQueryMixes + The number of query mixes a Worker has to execute. + + + + iprop:noOfQueryMixes + noOfQueryMixes + + + + iprop:noOfWorkers + The number of Workers the stresstest utilized. + + + + iprop:noOfWorkers + noOfWorkers + + + + iprop:penalizedAvgQPS + The average number of queries answered successfully per second value using the penaltyQPS. + + + + iprop:penalizedAvgQPS + penalizedAvgQPS + + + + iprop:penalizedQPS + The queries per second value where failed queries are rated using a penalty (default is the timeOut of a Task). + + + + iprop:penalizedQPS + penalizedQPS + + + + iprop:query + Assigns an ExecutedQuery to a Worker or Task. The ExecutedQuery provides further metrics and statistics. + + + + iprop:query + query + + + + iprop:queryExecution + Assigns a QueryExecution to an ExecutedQuery. + + + + iprop:queryExecution + queryExecution + + + + iprop:queryID + Assigns a Query and its statistics, as well as the string of the query to an ExecutedQuery. + + + + iprop:queryID + queryID + + + + iprop:responseBody + Assigns a ResponseBody to a QueryExecution. + + + + iprop:responseBody + responseBody + + + + iprop:responseBodyHash + The hashcode of the response body. + + + + iprop:responseBodyHash + responseBodyHash + + + + iprop:resultSize + The result size of a ExecutedQuery. + + + + iprop:resultSize + The result size of the query's response body. The value is -1 if there wasn't any response body received. + + + + iprop:resultSize + resultSize + + + + iprop:results + The number of results the query received. + + + + iprop:results + results + + + + iprop:run + The number of execution for this query. + + + + iprop:run + run + + + + iprop:startDate + The date and time at which the Task or Worker started. + + + + iprop:startDate + startDate + + + + iprop:succeeded + The number of succeeded executions of the ExecutedQuery. + + + + iprop:succeeded + succeeded + + + + iprop:success + If the query has been successful or not. + + + + iprop:success + success + + + + iprop:task + Assigns a Task to an Suite. + + + + iprop:task + task + + + + iprop:timeLimit + The time limit after which a Worker stops its execution of queries. + + + + iprop:timeLimit + timeLimit + + + + iprop:timeOut + The timeout set for this worker. + + + + iprop:timeOut + timeOut + + + + iprop:timeOuts + The number of failed executions of the ExecutedQuery whereas the Reason was a time out + + + + iprop:timeOuts + timeOuts + + + + iprop:totalTime + The summed up execution time of all executions of the ExecutedQuery in milliseconds. + + + + iprop:totalTime + totalTime + + + + iprop:unknownExceptions + The number of failed executions of the ExecutedQuery whereas the Reason was unknown. + + + + iprop:unknownExceptions + unknownExceptions + + + + iprop:variable + A variable that the query contains. + + + + iprop:variable + variable + + + + iprop:version + Version of the triplestore tested. + + + + iprop:version + version + + + + iprop:workerID + The worked ID assigned to the worker + + + + iprop:workerID + workerID + + + + iprop:workerResult + Assigns a Worker to an Task. (mostly a Stresstest) + + + + iprop:workerResult + workerResult + + + + iprop:workerType + The worker class name. + + + + iprop:workerType + workerType + + + + iprop:wrongCodes + The number of failed executions of the ExecutedQuery whereas the Reason was a wrong result code (e.g 400) + + + + iprop:wrongCodes + wrongCodes + + + + + + + diff --git a/src/main/java/org/aksw/iguana/cc/config/elements/ConnectionConfig.java b/src/main/java/org/aksw/iguana/cc/config/elements/ConnectionConfig.java new file mode 100644 index 000000000..99addab0c --- /dev/null +++ b/src/main/java/org/aksw/iguana/cc/config/elements/ConnectionConfig.java @@ -0,0 +1,38 @@ +package org.aksw.iguana.cc.config.elements; + +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonDeserializer; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; + +import java.io.IOException; +import java.net.URI; + +/** + * A connection configuration class + */ +public record ConnectionConfig( + @JsonProperty(required = true) + String name, + String version, + DatasetConfig dataset, + @JsonProperty(required = true) + @JsonDeserialize(using = URIDeserializer.class) + URI endpoint, + Authentication authentication, + @JsonDeserialize(using = URIDeserializer.class) + URI updateEndpoint, + Authentication updateAuthentication + +) { + public static class URIDeserializer extends JsonDeserializer { + + @Override + public URI deserialize(JsonParser p, DeserializationContext ctxt) throws IOException { + return URI.create(p.getValueAsString()); // verifying uri doesn't work here + } + } + + public record Authentication(String user, String password) {} +} diff --git a/src/main/java/org/aksw/iguana/cc/config/elements/DatasetConfig.java b/src/main/java/org/aksw/iguana/cc/config/elements/DatasetConfig.java new file mode 100644 index 000000000..8986de3be --- /dev/null +++ b/src/main/java/org/aksw/iguana/cc/config/elements/DatasetConfig.java @@ -0,0 +1,13 @@ +package org.aksw.iguana.cc.config.elements; + +import com.fasterxml.jackson.annotation.JsonProperty; + +/** + * The Dataset config class. + *

+ * Will set the name and if it was set in the config file the fileName + */ +public record DatasetConfig( + @JsonProperty(required = true) String name, + @JsonProperty String file +) {} diff --git a/src/main/java/org/aksw/iguana/cc/config/elements/StorageConfig.java b/src/main/java/org/aksw/iguana/cc/config/elements/StorageConfig.java new file mode 100644 index 000000000..bd55cace2 --- /dev/null +++ b/src/main/java/org/aksw/iguana/cc/config/elements/StorageConfig.java @@ -0,0 +1,24 @@ +package org.aksw.iguana.cc.config.elements; + +import com.fasterxml.jackson.annotation.JsonSubTypes; +import com.fasterxml.jackson.annotation.JsonTypeInfo; +import org.aksw.iguana.cc.storage.impl.CSVStorage; +import org.aksw.iguana.cc.storage.impl.RDFFileStorage; +import org.aksw.iguana.cc.storage.impl.TriplestoreStorage; + +/** + * Storage Configuration class + */ + +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, + include = JsonTypeInfo.As.PROPERTY, + property = "type") +@JsonSubTypes({ + @JsonSubTypes.Type(value = TriplestoreStorage.Config.class, name = "triplestore"), + @JsonSubTypes.Type(value = RDFFileStorage.Config.class, name = "rdf file"), + @JsonSubTypes.Type(value = CSVStorage.Config.class, name = "csv file") +}) +public interface StorageConfig {} + + + diff --git a/src/main/java/org/aksw/iguana/cc/controller/MainController.java b/src/main/java/org/aksw/iguana/cc/controller/MainController.java new file mode 100644 index 000000000..1190f84e3 --- /dev/null +++ b/src/main/java/org/aksw/iguana/cc/controller/MainController.java @@ -0,0 +1,77 @@ +package org.aksw.iguana.cc.controller; + +import com.beust.jcommander.*; +import org.aksw.iguana.cc.suite.IguanaSuiteParser; +import org.aksw.iguana.cc.suite.Suite; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.nio.file.Path; + + +/** + * The MainController class is responsible for executing the IGUANA program. + */ +public class MainController { + + + public static class Args { + public class PathConverter implements IStringConverter { + @Override + public Path convert(String value) { + return Path.of(value); + } + } + + + @Parameter(names = {"--ignore-schema", "-is"}, description = "Do not check the schema before parsing the suite file.") + private boolean ignoreShema = false; + + @Parameter(names = {"--dry-run", "-d"}, hidden = true) + public static boolean dryRun = false; + + @Parameter(names = "--help", help = true) + private boolean help; + + @Parameter(description = "suite file {yml,yaml,json}", arity = 1, required = true, converter = PathConverter.class) + private Path suitePath; + } + + private static final Logger LOGGER = LoggerFactory.getLogger(MainController.class); + + /** + * The main method for executing IGUANA + * + * @param argc The command line arguments that are passed to the program. + */ + public static void main(String[] argc) { + // Configurator.reconfigure(URI.create("log4j2.yml")); + + var args = new Args(); + JCommander jc = JCommander.newBuilder() + .addObject(args) + .build(); + try { + jc.parse(argc); + } catch (ParameterException e) { + System.err.println(e.getLocalizedMessage()); + jc.usage(); + System.exit(0); + } + if (args.help) { + jc.usage(); + System.exit(1); + } + + try { + Suite parse = IguanaSuiteParser.parse(args.suitePath, !args.ignoreShema); + parse.run(); + } catch (IOException e) { + LOGGER.error("Error while reading the configuration file.", e); + System.exit(0); + } + System.exit(0); + } + +} diff --git a/src/main/java/org/aksw/iguana/cc/lang/LanguageProcessor.java b/src/main/java/org/aksw/iguana/cc/lang/LanguageProcessor.java new file mode 100644 index 000000000..ee8868528 --- /dev/null +++ b/src/main/java/org/aksw/iguana/cc/lang/LanguageProcessor.java @@ -0,0 +1,63 @@ +package org.aksw.iguana.cc.lang; + +import org.aksw.iguana.cc.storage.Storable; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.InputStream; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import java.lang.reflect.InvocationTargetException; +import java.util.HashMap; +import java.util.Map; + + +/** + * Interface for abstract language processors that work on InputStreams. + * LanguageProcessors are used to process the content of an InputStream and extract relevant information. + * They are used by the Worker to process the response of a request.
+ * LanguageProcessors must be registered in the static block of this class. + */ +public abstract class LanguageProcessor { + + /** + * Provides the content type that a LanguageProcessor consumes. + */ + @Retention(RetentionPolicy.RUNTIME) + @Target(ElementType.TYPE) + public @interface ContentType { + String value(); + } + + public interface LanguageProcessingData extends Storable { + long hash(); + Class processor(); + } + + public abstract LanguageProcessingData process(InputStream inputStream, long hash); + + final private static Map> processors = new HashMap<>(); + + final private static Logger LOGGER = LoggerFactory.getLogger(LanguageProcessor.class); + + // Register all available LanguageProcessors here. + static { + processors.put("application/sparql-results+json", org.aksw.iguana.cc.lang.impl.SaxSparqlJsonResultCountingParser.class); + } + + public static LanguageProcessor getInstance(String contentType) { + Class processorClass = processors.get(contentType); + if (processorClass != null) { + try { + return processorClass.getDeclaredConstructor().newInstance(); + } catch (InstantiationException | IllegalAccessException | InvocationTargetException | + NoSuchMethodException e) { + throw new RuntimeException(e); + } + } + throw new IllegalArgumentException("No LanguageProcessor for ContentType " + contentType); + } + +} diff --git a/src/main/java/org/aksw/iguana/cc/lang/impl/SaxSparqlJsonResultCountingParser.java b/src/main/java/org/aksw/iguana/cc/lang/impl/SaxSparqlJsonResultCountingParser.java new file mode 100644 index 000000000..42a8a4eaf --- /dev/null +++ b/src/main/java/org/aksw/iguana/cc/lang/impl/SaxSparqlJsonResultCountingParser.java @@ -0,0 +1,223 @@ +package org.aksw.iguana.cc.lang.impl; + +import org.aksw.iguana.cc.lang.LanguageProcessor; +import org.aksw.iguana.cc.storage.Storable; +import org.aksw.iguana.commons.rdf.IPROP; +import org.aksw.iguana.commons.rdf.IRES; +import org.apache.jena.rdf.model.Model; +import org.apache.jena.rdf.model.ModelFactory; +import org.apache.jena.rdf.model.Resource; +import org.apache.jena.rdf.model.ResourceFactory; +import org.json.simple.parser.ContentHandler; +import org.json.simple.parser.JSONParser; +import org.json.simple.parser.ParseException; + +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.List; + +import static org.json.simple.parser.ParseException.ERROR_UNEXPECTED_EXCEPTION; + +/** + * SAX Parser for SPARQL JSON Results. + * For correct SPARQL JSON Results it returns the number of solutions, bound values and the names of the variables. + * For malformed results it may or may not fail. For malformed JSON it fails if the underlying json.simple.parser fails. + */ +@LanguageProcessor.ContentType("application/sparql-results+json") +public class SaxSparqlJsonResultCountingParser extends LanguageProcessor { + + @Override + public LanguageProcessingData process(InputStream inputStream, long hash) { + var parser = new JSONParser(); + var handler = new SaxSparqlJsonResultContentHandler(); + try { + parser.parse(new BufferedReader(new InputStreamReader(inputStream, StandardCharsets.UTF_8)), handler); + return new SaxSparqlJsonResultData(hash, handler.solutions(), handler.boundValues(), handler.variables(), null); + } catch (IOException e) { + throw new RuntimeException(e); + } catch (ParseException e) { + return new SaxSparqlJsonResultData(hash, -1, -1, null, e); + } + } + + record SaxSparqlJsonResultData( + long hash, + long results, + long bindings, + List variables, + Exception exception + ) implements LanguageProcessingData, Storable.AsCSV, Storable.AsRDF { + final static String[] header = new String[]{ "responseBodyHash", "results", "bindings", "variables", "exception" }; + + @Override + public Class processor() { + return SaxSparqlJsonResultCountingParser.class; + } + + @Override + public CSVData toCSV() { + String variablesString = ""; + String exceptionString = ""; + if (variables != null) + variablesString = String.join("; ", variables); + if (exception != null) + exceptionString = exception().toString(); + + String[] content = new String[]{ String.valueOf(hash), String.valueOf(results), String.valueOf(bindings), variablesString, exceptionString}; + String[][] data = new String[][]{ header, content }; + + String folderName = "application-sparql+json"; + List files = List.of(new CSVData.CSVFileData("sax-sparql-result-data.csv", data)); + return new Storable.CSVData(folderName, files); + } + + @Override + public Model toRDF() { + Model m = ModelFactory.createDefaultModel(); + Resource responseBodyRes = IRES.getResponsebodyResource(this.hash); + m.add(responseBodyRes, IPROP.results, ResourceFactory.createTypedLiteral(this.results)) + .add(responseBodyRes, IPROP.bindings, ResourceFactory.createTypedLiteral(this.bindings)); + + if (this.variables != null) { + for (String variable : this.variables) { + m.add(responseBodyRes, IPROP.variable, ResourceFactory.createTypedLiteral(variable)); + } + } + if (this.exception != null) { + m.add(responseBodyRes, IPROP.exception, ResourceFactory.createTypedLiteral(this.exception.toString())); + } + + return m; + } + } + + private static class SaxSparqlJsonResultContentHandler implements ContentHandler { + // TODO: add support for ask queries and link + // TODO: code is unnecessary complicated + + private boolean headFound = false; + + private int objectDepth = 0; + private boolean inResults = false; + private boolean inBindings = false; + private boolean inBindingsArray = false; + private boolean inVars = false; + + private long boundValues = 0; + + private long solutions = 0; + + private final List variables = new ArrayList<>(); + + + @Override + public void startJSON() { + } + + @Override + public void endJSON() throws ParseException { + if (inResults || inBindings || inBindingsArray || !headFound || objectDepth != 0) + throw new ParseException(ERROR_UNEXPECTED_EXCEPTION, "SPARQL Json Response was malformed."); + } + + @Override + public boolean startObject() { + objectDepth += 1; + if (inBindingsArray) { + switch (objectDepth) { + case 3 -> solutions += 1; + case 4 -> boundValues += 1; + } + } + return true; + } + + @Override + public boolean endObject() { + switch (objectDepth) { + case 1: + if (inResults) + inResults = false; + break; + case 2: + if (inBindings) { + inBindings = false; + } + break; + } + objectDepth -= 1; + return true; + } + + @Override + public boolean startArray() { + if (objectDepth == 2 && inResults && inBindings && !inBindingsArray) { + inBindingsArray = true; + } + return true; + } + + @Override + public boolean endArray() { + if (inVars) + inVars = false; + if (objectDepth == 2 && inResults && inBindings && inBindingsArray) { + inBindingsArray = false; + } + return true; + } + + + @Override + public boolean startObjectEntry(String key) { + switch (objectDepth) { + case 1 -> { + switch (key) { + case "head" -> headFound = true; + case "results" -> { + if (headFound) + inResults = true; + } + } + } + case 2 -> { + if ("bindings".compareTo(key) == 0) { + inBindings = true; + } + if ("vars".compareTo(key) == 0) { + inVars = true; + } + } + } + return true; + } + + @Override + public boolean endObjectEntry() { + return true; + } + + public boolean primitive(Object value) { + if (inVars) + variables.add(value.toString()); + + return true; + } + + public long boundValues() { + return boundValues; + } + + public long solutions() { + return solutions; + } + + public List variables() { + return variables; + } + } +} \ No newline at end of file diff --git a/src/main/java/org/aksw/iguana/cc/metrics/Metric.java b/src/main/java/org/aksw/iguana/cc/metrics/Metric.java new file mode 100644 index 000000000..0f4bc15fa --- /dev/null +++ b/src/main/java/org/aksw/iguana/cc/metrics/Metric.java @@ -0,0 +1,41 @@ +package org.aksw.iguana.cc.metrics; + +import com.fasterxml.jackson.annotation.*; +import org.aksw.iguana.cc.metrics.impl.*; + +@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "type") +@JsonSubTypes({ + @JsonSubTypes.Type(value = AggregatedExecutionStatistics.class, name = "AES"), + @JsonSubTypes.Type(value = AvgQPS.class, name = "AvgQPS"), + @JsonSubTypes.Type(value = EachExecutionStatistic.class, name = "EachQuery"), + @JsonSubTypes.Type(value = NoQ.class, name = "NoQ"), + @JsonSubTypes.Type(value = NoQPH.class, name = "NoQPH"), + @JsonSubTypes.Type(value = PAvgQPS.class, name = "PAvgQPS"), + @JsonSubTypes.Type(value = PQPS.class, name = "PQPS"), + @JsonSubTypes.Type(value = QMPH.class, name = "QMPH"), + @JsonSubTypes.Type(value = QPS.class, name = "QPS") +}) +public abstract class Metric { + private final String name; + private final String abbreviation; + private final String description; + + public Metric(String name, String abbreviation, String description) { + this.name = name; + this.abbreviation = abbreviation; + this.description = description; + } + + + public String getDescription(){ + return this.description; + } + + public String getName(){ + return this.name; + } + + public String getAbbreviation(){ + return this.abbreviation; + } +} diff --git a/src/main/java/org/aksw/iguana/cc/metrics/ModelWritingMetric.java b/src/main/java/org/aksw/iguana/cc/metrics/ModelWritingMetric.java new file mode 100644 index 000000000..9debe1481 --- /dev/null +++ b/src/main/java/org/aksw/iguana/cc/metrics/ModelWritingMetric.java @@ -0,0 +1,19 @@ +package org.aksw.iguana.cc.metrics; + +import org.aksw.iguana.cc.worker.HttpWorker; +import org.aksw.iguana.commons.rdf.IRES; +import org.apache.jena.rdf.model.Model; +import org.apache.jena.rdf.model.ModelFactory; + +import java.util.List; +import java.util.Map; + +public interface ModelWritingMetric { + default Model createMetricModel(List workers, List[][] data, IRES.Factory iresFactory) { + return ModelFactory.createDefaultModel(); + } + + default Model createMetricModel(List workers, Map> data, IRES.Factory iresFactory) { + return ModelFactory.createDefaultModel(); + } +} diff --git a/src/main/java/org/aksw/iguana/cc/metrics/QueryMetric.java b/src/main/java/org/aksw/iguana/cc/metrics/QueryMetric.java new file mode 100644 index 000000000..9b771a570 --- /dev/null +++ b/src/main/java/org/aksw/iguana/cc/metrics/QueryMetric.java @@ -0,0 +1,9 @@ +package org.aksw.iguana.cc.metrics; + +import org.aksw.iguana.cc.worker.HttpWorker; + +import java.util.List; + +public interface QueryMetric { + Number calculateQueryMetric(List data); +} diff --git a/src/main/java/org/aksw/iguana/cc/metrics/TaskMetric.java b/src/main/java/org/aksw/iguana/cc/metrics/TaskMetric.java new file mode 100644 index 000000000..8b4360306 --- /dev/null +++ b/src/main/java/org/aksw/iguana/cc/metrics/TaskMetric.java @@ -0,0 +1,9 @@ +package org.aksw.iguana.cc.metrics; + +import org.aksw.iguana.cc.worker.HttpWorker; + +import java.util.List; + +public interface TaskMetric { + Number calculateTaskMetric(List workers, List[][] data); +} diff --git a/src/main/java/org/aksw/iguana/cc/metrics/WorkerMetric.java b/src/main/java/org/aksw/iguana/cc/metrics/WorkerMetric.java new file mode 100644 index 000000000..1fe5b763f --- /dev/null +++ b/src/main/java/org/aksw/iguana/cc/metrics/WorkerMetric.java @@ -0,0 +1,9 @@ +package org.aksw.iguana.cc.metrics; + +import org.aksw.iguana.cc.worker.HttpWorker; + +import java.util.List; + +public interface WorkerMetric { + Number calculateWorkerMetric(HttpWorker.Config worker, List[] data); +} diff --git a/src/main/java/org/aksw/iguana/cc/metrics/impl/AggregatedExecutionStatistics.java b/src/main/java/org/aksw/iguana/cc/metrics/impl/AggregatedExecutionStatistics.java new file mode 100644 index 000000000..8582f2020 --- /dev/null +++ b/src/main/java/org/aksw/iguana/cc/metrics/impl/AggregatedExecutionStatistics.java @@ -0,0 +1,87 @@ +package org.aksw.iguana.cc.metrics.impl; + +import org.aksw.iguana.cc.metrics.Metric; +import org.aksw.iguana.cc.metrics.ModelWritingMetric; +import org.aksw.iguana.cc.worker.HttpWorker; +import org.aksw.iguana.commons.rdf.IONT; +import org.aksw.iguana.commons.rdf.IPROP; +import org.aksw.iguana.commons.rdf.IRES; +import org.aksw.iguana.commons.time.TimeUtils; +import org.apache.jena.rdf.model.Model; +import org.apache.jena.rdf.model.ModelFactory; +import org.apache.jena.rdf.model.Resource; +import org.apache.jena.rdf.model.ResourceFactory; +import org.apache.jena.vocabulary.RDF; + +import java.math.BigInteger; +import java.time.Duration; +import java.util.List; +import java.util.Map; +import java.util.Optional; + +public class AggregatedExecutionStatistics extends Metric implements ModelWritingMetric { + + public AggregatedExecutionStatistics() { + super("Aggregated Execution Statistics", "AES", "Sums up the statistics of each query execution for each query a worker and task has. The result size only contains the value of the last execution."); + } + + @Override + public Model createMetricModel(List workers, List[][] data, IRES.Factory iresFactory) { + Model m = ModelFactory.createDefaultModel(); + for (var worker : workers) { + for (int i = 0; i < worker.config().queries().getQueryCount(); i++) { + Resource queryRes = iresFactory.getWorkerQueryResource(worker, i); + m.add(createAggregatedModel(data[(int) worker.getWorkerID()][i], queryRes)); + } + } + return m; + } + + @Override + public Model createMetricModel(List workers, Map> data, IRES.Factory iresFactory) { + Model m = ModelFactory.createDefaultModel(); + for (String queryID : data.keySet()) { + Resource queryRes = iresFactory.getTaskQueryResource(queryID); + m.add(createAggregatedModel(data.get(queryID), queryRes)); + } + return m; + } + + private static Model createAggregatedModel(List data, Resource queryRes) { + Model m = ModelFactory.createDefaultModel(); + BigInteger succeeded = BigInteger.ZERO; + BigInteger failed = BigInteger.ZERO; + Optional resultSize = Optional.empty(); + BigInteger wrongCodes = BigInteger.ZERO; + BigInteger timeOuts = BigInteger.ZERO; + BigInteger unknownExceptions = BigInteger.ZERO; + Duration totalTime = Duration.ZERO; + + for (HttpWorker.ExecutionStats exec : data) { + switch (exec.endState()) { + case SUCCESS -> succeeded = succeeded.add(BigInteger.ONE); + case TIMEOUT -> timeOuts = timeOuts.add(BigInteger.ONE); + case HTTP_ERROR -> wrongCodes = wrongCodes.add(BigInteger.ONE); + case MISCELLANEOUS_EXCEPTION -> unknownExceptions = unknownExceptions.add(BigInteger.ONE); + } + + if (!exec.successful()) + failed = failed.add(BigInteger.ONE); + + totalTime = totalTime.plus(exec.duration()); + if (exec.contentLength().isPresent()) + resultSize = Optional.of(BigInteger.valueOf(exec.contentLength().getAsLong())); + } + + m.add(queryRes, IPROP.succeeded, ResourceFactory.createTypedLiteral(succeeded)); + m.add(queryRes, IPROP.failed, ResourceFactory.createTypedLiteral(failed)); + m.add(queryRes, IPROP.resultSize, ResourceFactory.createTypedLiteral(resultSize.orElse(BigInteger.valueOf(-1)))); + m.add(queryRes, IPROP.timeOuts, ResourceFactory.createTypedLiteral(timeOuts)); + m.add(queryRes, IPROP.wrongCodes, ResourceFactory.createTypedLiteral(wrongCodes)); + m.add(queryRes, IPROP.unknownException, ResourceFactory.createTypedLiteral(unknownExceptions)); + m.add(queryRes, IPROP.totalTime, TimeUtils.createTypedDurationLiteralInSeconds(totalTime)); + m.add(queryRes, RDF.type, IONT.executedQuery); + + return m; + } +} diff --git a/src/main/java/org/aksw/iguana/cc/metrics/impl/AvgQPS.java b/src/main/java/org/aksw/iguana/cc/metrics/impl/AvgQPS.java new file mode 100644 index 000000000..cb27e55b4 --- /dev/null +++ b/src/main/java/org/aksw/iguana/cc/metrics/impl/AvgQPS.java @@ -0,0 +1,45 @@ +package org.aksw.iguana.cc.metrics.impl; + +import org.aksw.iguana.cc.metrics.Metric; +import org.aksw.iguana.cc.metrics.TaskMetric; +import org.aksw.iguana.cc.metrics.WorkerMetric; +import org.aksw.iguana.cc.worker.HttpWorker; + +import java.math.BigDecimal; +import java.math.RoundingMode; +import java.util.List; + +public class AvgQPS extends Metric implements TaskMetric, WorkerMetric { + + public AvgQPS() { + super("Average Queries per Second", "AvgQPS", "This metric calculates the average QPS between all queries."); + } + + @Override + public Number calculateTaskMetric(List workers, List[][] data) { + final var sum = workers.stream() + .map(worker -> (BigDecimal) this.calculateWorkerMetric(worker.config(), data[(int) worker.getWorkerID()])) + .reduce(BigDecimal.ZERO, BigDecimal::add); + + try { + return sum.divide(BigDecimal.valueOf(data.length), 10, RoundingMode.HALF_UP); + } catch (ArithmeticException e) { + return BigDecimal.ZERO; + } + } + + @Override + public Number calculateWorkerMetric(HttpWorker.Config worker, List[] data) { + BigDecimal sum = BigDecimal.ZERO; + QPS qpsmetric = new QPS(); + for (List datum : data) { + sum = sum.add((BigDecimal) qpsmetric.calculateQueryMetric(datum)); + } + + try { + return sum.divide(BigDecimal.valueOf(data.length), 10, RoundingMode.HALF_UP); + } catch (ArithmeticException e) { + return BigDecimal.ZERO; + } + } +} diff --git a/src/main/java/org/aksw/iguana/cc/metrics/impl/EachExecutionStatistic.java b/src/main/java/org/aksw/iguana/cc/metrics/impl/EachExecutionStatistic.java new file mode 100644 index 000000000..c6e1bf95a --- /dev/null +++ b/src/main/java/org/aksw/iguana/cc/metrics/impl/EachExecutionStatistic.java @@ -0,0 +1,60 @@ +package org.aksw.iguana.cc.metrics.impl; + +import org.aksw.iguana.cc.metrics.Metric; +import org.aksw.iguana.cc.metrics.ModelWritingMetric; +import org.aksw.iguana.cc.worker.HttpWorker; +import org.aksw.iguana.commons.rdf.IONT; +import org.aksw.iguana.commons.rdf.IPROP; +import org.aksw.iguana.commons.rdf.IRES; +import org.aksw.iguana.commons.time.TimeUtils; +import org.apache.jena.rdf.model.Model; +import org.apache.jena.rdf.model.ModelFactory; +import org.apache.jena.rdf.model.Resource; +import org.apache.jena.rdf.model.ResourceFactory; +import org.apache.jena.vocabulary.RDF; + +import java.math.BigInteger; +import java.util.List; + +public class EachExecutionStatistic extends Metric implements ModelWritingMetric { + + public EachExecutionStatistic() { + super("Each Query Execution Statistic", "EachQuery", "This metric saves the statistics of each query execution."); + } + + @Override + public Model createMetricModel(List workers, List[][] data, IRES.Factory iresFactory) { + Model m = ModelFactory.createDefaultModel(); + for (var worker : workers) { + for (int i = 0; i < worker.config().queries().getQueryCount(); i++) { + Resource workerQueryResource = iresFactory.getWorkerQueryResource(worker, i); + Resource queryRes = IRES.getResource(worker.config().queries().getQueryId(i)); + BigInteger run = BigInteger.ONE; + for (HttpWorker.ExecutionStats exec : data[(int) worker.getWorkerID()][i]) { + Resource runRes = iresFactory.getWorkerQueryRunResource(worker, i, run); + m.add(workerQueryResource, IPROP.queryExecution, runRes); + m.add(runRes, RDF.type, IONT.queryExecution); + m.add(runRes, IPROP.time, TimeUtils.createTypedDurationLiteral(exec.duration())); + m.add(runRes, IPROP.startTime, TimeUtils.createTypedInstantLiteral(exec.startTime())); + m.add(runRes, IPROP.success, ResourceFactory.createTypedLiteral(exec.successful())); + m.add(runRes, IPROP.run, ResourceFactory.createTypedLiteral(run)); + m.add(runRes, IPROP.code, ResourceFactory.createTypedLiteral(exec.endState().value)); + m.add(runRes, IPROP.resultSize, ResourceFactory.createTypedLiteral(exec.contentLength().orElse(-1))); + m.add(runRes, IPROP.queryID, queryRes); + if (exec.responseBodyHash().isPresent()) { + Resource responseBodyRes = IRES.getResponsebodyResource(exec.responseBodyHash().getAsLong()); + m.add(runRes, IPROP.responseBody, responseBodyRes); + m.add(responseBodyRes, RDF.type, IONT.responseBody); + m.add(responseBodyRes, IPROP.responseBodyHash, ResourceFactory.createTypedLiteral(exec.responseBodyHash().getAsLong())); + } + if (exec.error().isPresent()) + m.add(runRes, IPROP.exception, ResourceFactory.createTypedLiteral(exec.error().get().toString())); + if (exec.httpStatusCode().isPresent()) + m.add(runRes, IPROP.httpCode, ResourceFactory.createTypedLiteral(exec.httpStatusCode().get())); + run = run.add(BigInteger.ONE); + } + } + } + return m; + } +} diff --git a/src/main/java/org/aksw/iguana/cc/metrics/impl/NoQ.java b/src/main/java/org/aksw/iguana/cc/metrics/impl/NoQ.java new file mode 100644 index 000000000..411f73ca9 --- /dev/null +++ b/src/main/java/org/aksw/iguana/cc/metrics/impl/NoQ.java @@ -0,0 +1,38 @@ +package org.aksw.iguana.cc.metrics.impl; + +import org.aksw.iguana.cc.metrics.Metric; +import org.aksw.iguana.cc.metrics.TaskMetric; +import org.aksw.iguana.cc.metrics.WorkerMetric; +import org.aksw.iguana.cc.worker.HttpWorker; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.util.List; + +public class NoQ extends Metric implements TaskMetric, WorkerMetric { + + public NoQ() { + super("Number of Queries", "NoQ", "This metric calculates the number of successfully executed queries."); + } + + @Override + public Number calculateTaskMetric(List workers, List[][] data) { + final var sum = workers.stream() + .map(worker -> (BigInteger) this.calculateWorkerMetric(worker.config(), data[(int) worker.getWorkerID()])) + .reduce(BigInteger.ZERO, BigInteger::add); + return sum; + } + + @Override + public Number calculateWorkerMetric(HttpWorker.Config worker, List[] data) { + BigInteger sum = BigInteger.ZERO; + for (List datum : data) { + for (HttpWorker.ExecutionStats exec : datum) { + if (exec.successful()) { + sum = sum.add(BigInteger.ONE); + } + } + } + return sum; + } +} diff --git a/src/main/java/org/aksw/iguana/cc/metrics/impl/NoQPH.java b/src/main/java/org/aksw/iguana/cc/metrics/impl/NoQPH.java new file mode 100644 index 000000000..790f17a89 --- /dev/null +++ b/src/main/java/org/aksw/iguana/cc/metrics/impl/NoQPH.java @@ -0,0 +1,47 @@ +package org.aksw.iguana.cc.metrics.impl; + +import org.aksw.iguana.cc.metrics.Metric; +import org.aksw.iguana.cc.metrics.TaskMetric; +import org.aksw.iguana.cc.metrics.WorkerMetric; +import org.aksw.iguana.cc.worker.HttpWorker; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.math.RoundingMode; +import java.time.Duration; +import java.util.List; + +public class NoQPH extends Metric implements TaskMetric, WorkerMetric { + + public NoQPH() { + super("Number of Queries per Hour", "NoQPH", "This metric calculates the number of successfully executed queries per hour."); + } + @Override + public Number calculateTaskMetric(List workers, List[][] data) { + final var sum = workers.stream() + .map(worker -> (BigDecimal) this.calculateWorkerMetric(worker.config(), data[(int) worker.getWorkerID()])) + .reduce(BigDecimal.ZERO, BigDecimal::add); + return sum; + } + + @Override + public Number calculateWorkerMetric(HttpWorker.Config worker, List[] data) { + BigDecimal successes = BigDecimal.ZERO; + Duration totalTime = Duration.ZERO; + for (List datum : data) { + for (HttpWorker.ExecutionStats exec : datum) { + if (exec.successful()) { + successes = successes.add(BigDecimal.ONE); + totalTime = totalTime.plus(exec.duration()); + } + } + } + BigDecimal tt = (new BigDecimal(BigInteger.valueOf(totalTime.toNanos()), 9)).divide(BigDecimal.valueOf(3600), 20, RoundingMode.HALF_UP); + + try { + return successes.divide(tt, 10, RoundingMode.HALF_UP); + } catch (ArithmeticException e) { + return BigDecimal.ZERO; + } + } +} diff --git a/src/main/java/org/aksw/iguana/cc/metrics/impl/PAvgQPS.java b/src/main/java/org/aksw/iguana/cc/metrics/impl/PAvgQPS.java new file mode 100644 index 000000000..d22472a55 --- /dev/null +++ b/src/main/java/org/aksw/iguana/cc/metrics/impl/PAvgQPS.java @@ -0,0 +1,52 @@ +package org.aksw.iguana.cc.metrics.impl; + +import com.fasterxml.jackson.annotation.JsonProperty; +import org.aksw.iguana.cc.metrics.Metric; +import org.aksw.iguana.cc.metrics.TaskMetric; +import org.aksw.iguana.cc.metrics.WorkerMetric; +import org.aksw.iguana.cc.worker.HttpWorker; + +import java.math.BigDecimal; +import java.math.RoundingMode; +import java.util.List; + +public class PAvgQPS extends Metric implements TaskMetric, WorkerMetric { + + private final int penalty; + + public PAvgQPS(@JsonProperty("penalty") Integer penalty) { + super("Penalized Average Queries per Second", "PAvgQPS", "This metric calculates the average QPS between all queries. Failed executions receive a time penalty."); + this.penalty = penalty; + } + + @Override + public Number calculateTaskMetric(List workers, List[][] data) { + final var sum = workers.stream() + .map(worker -> (BigDecimal) this.calculateWorkerMetric(worker.config(), data[(int) worker.getWorkerID()])) + .reduce(BigDecimal.ZERO, BigDecimal::add); + + try { + return sum.divide(BigDecimal.valueOf(data.length), 10, RoundingMode.HALF_UP); + } catch (ArithmeticException e) { + return BigDecimal.ZERO; + } + } + + @Override + public Number calculateWorkerMetric(HttpWorker.Config worker, List[] data) { + BigDecimal sum = BigDecimal.ZERO; + PQPS pqpsmetric = new PQPS(penalty); + for (List datum : data) { + sum = sum.add((BigDecimal) pqpsmetric.calculateQueryMetric(datum)); + } + if (data.length == 0) { + return BigDecimal.ZERO; + } + + try { + return sum.divide(BigDecimal.valueOf(data.length), 10, RoundingMode.HALF_UP); + } catch (ArithmeticException e) { + return BigDecimal.ZERO; + } + } +} diff --git a/src/main/java/org/aksw/iguana/cc/metrics/impl/PQPS.java b/src/main/java/org/aksw/iguana/cc/metrics/impl/PQPS.java new file mode 100644 index 000000000..78b237c5e --- /dev/null +++ b/src/main/java/org/aksw/iguana/cc/metrics/impl/PQPS.java @@ -0,0 +1,43 @@ +package org.aksw.iguana.cc.metrics.impl; + +import com.fasterxml.jackson.annotation.JsonProperty; +import org.aksw.iguana.cc.metrics.Metric; +import org.aksw.iguana.cc.metrics.QueryMetric; +import org.aksw.iguana.cc.worker.HttpWorker; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.math.RoundingMode; +import java.time.Duration; +import java.util.List; + +public class PQPS extends Metric implements QueryMetric { + + private final int penalty; + + public PQPS(@JsonProperty("penalty") Integer penalty) { + super("Penalized Queries per Second", "PQPS", "This metric calculates for each query the amount of executions per second. Failed executions receive a time penalty."); + this.penalty = penalty; + } + + @Override + public Number calculateQueryMetric(List data) { + BigDecimal numberOfExecutions = BigDecimal.ZERO; + Duration totalTime = Duration.ZERO; + for (HttpWorker.ExecutionStats exec : data) { + numberOfExecutions = numberOfExecutions.add(BigDecimal.ONE); + if (exec.successful()) { + totalTime = totalTime.plus(exec.duration()); + } else { + totalTime = totalTime.plusMillis(penalty); + } + } + BigDecimal tt = (new BigDecimal(BigInteger.valueOf(totalTime.toNanos()), 9)); + + try { + return numberOfExecutions.divide(tt, 10, RoundingMode.HALF_UP); + } catch (ArithmeticException e) { + return BigDecimal.ZERO; + } + } +} diff --git a/src/main/java/org/aksw/iguana/cc/metrics/impl/QMPH.java b/src/main/java/org/aksw/iguana/cc/metrics/impl/QMPH.java new file mode 100644 index 000000000..d2ae19143 --- /dev/null +++ b/src/main/java/org/aksw/iguana/cc/metrics/impl/QMPH.java @@ -0,0 +1,49 @@ +package org.aksw.iguana.cc.metrics.impl; + +import org.aksw.iguana.cc.metrics.Metric; +import org.aksw.iguana.cc.metrics.TaskMetric; +import org.aksw.iguana.cc.metrics.WorkerMetric; +import org.aksw.iguana.cc.worker.HttpWorker; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.math.RoundingMode; +import java.time.Duration; +import java.util.List; + +public class QMPH extends Metric implements TaskMetric, WorkerMetric { + + public QMPH() { + super("Query Mixes per Hour", "QMPH", "This metric calculates the amount of query mixes (a given set of queries) that are executed per hour."); + } + + @Override + public Number calculateTaskMetric(List workers, List[][] data) { + final var sum = workers.stream() + .map(worker -> (BigDecimal) this.calculateWorkerMetric(worker.config(), data[(int) worker.getWorkerID()])) + .reduce(BigDecimal.ZERO, BigDecimal::add); + return sum; + } + + @Override + public Number calculateWorkerMetric(HttpWorker.Config worker, List[] data) { + BigDecimal successes = BigDecimal.ZERO; + BigDecimal noq = BigDecimal.valueOf(worker.queries().getQueryCount()); + Duration totalTime = Duration.ZERO; + for (List datum : data) { + for (HttpWorker.ExecutionStats exec : datum) { + if (exec.successful()) { + successes = successes.add(BigDecimal.ONE); + totalTime = totalTime.plus(exec.duration()); + } + } + } + BigDecimal tt = (new BigDecimal(BigInteger.valueOf(totalTime.toNanos()), 9)).divide(BigDecimal.valueOf(3600), 20, RoundingMode.HALF_UP); + + try { + return successes.divide(tt, 10, RoundingMode.HALF_UP).divide(noq, 10, RoundingMode.HALF_UP); + } catch (ArithmeticException e) { + return BigDecimal.ZERO; + } + } +} diff --git a/src/main/java/org/aksw/iguana/cc/metrics/impl/QPS.java b/src/main/java/org/aksw/iguana/cc/metrics/impl/QPS.java new file mode 100644 index 000000000..b20e2d84d --- /dev/null +++ b/src/main/java/org/aksw/iguana/cc/metrics/impl/QPS.java @@ -0,0 +1,36 @@ +package org.aksw.iguana.cc.metrics.impl; + +import org.aksw.iguana.cc.metrics.Metric; +import org.aksw.iguana.cc.metrics.QueryMetric; +import org.aksw.iguana.cc.worker.HttpWorker; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.math.RoundingMode; +import java.time.Duration; +import java.util.List; + +public class QPS extends Metric implements QueryMetric { + + public QPS() { + super("Queries per Second", "QPS", "This metric calculates for each query the amount of executions per second."); + } + + @Override + public Number calculateQueryMetric(List data) { + BigDecimal successes = BigDecimal.ZERO; + Duration totalTime = Duration.ZERO; + for (HttpWorker.ExecutionStats exec : data) { + if (exec.successful()) { + successes = successes.add(BigDecimal.ONE); + totalTime = totalTime.plus(exec.duration()); + } + } + BigDecimal tt = (new BigDecimal(BigInteger.valueOf(totalTime.toNanos()), 9)); + try { + return successes.divide(tt, 10, RoundingMode.HALF_UP); + } catch (ArithmeticException e) { + return BigDecimal.ZERO; + } + } +} diff --git a/src/main/java/org/aksw/iguana/cc/query/handler/QueryHandler.java b/src/main/java/org/aksw/iguana/cc/query/handler/QueryHandler.java new file mode 100644 index 000000000..ceea25660 --- /dev/null +++ b/src/main/java/org/aksw/iguana/cc/query/handler/QueryHandler.java @@ -0,0 +1,227 @@ +package org.aksw.iguana.cc.query.handler; + +import com.fasterxml.jackson.annotation.*; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.annotation.JsonDeserialize; +import com.fasterxml.jackson.databind.deser.std.StdDeserializer; +import org.aksw.iguana.cc.query.selector.QuerySelector; +import org.aksw.iguana.cc.query.selector.impl.LinearQuerySelector; +import org.aksw.iguana.cc.query.selector.impl.RandomQuerySelector; +import org.aksw.iguana.cc.query.list.QueryList; +import org.aksw.iguana.cc.query.list.impl.FileBasedQueryList; +import org.aksw.iguana.cc.query.list.impl.InMemQueryList; +import org.aksw.iguana.cc.query.source.impl.FileLineQuerySource; +import org.aksw.iguana.cc.query.source.impl.FileSeparatorQuerySource; +import org.aksw.iguana.cc.query.source.impl.FolderQuerySource; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.io.InputStream; +import java.nio.file.Path; +import java.util.HashMap; +import java.util.Objects; +import java.util.function.Supplier; + +/** + * The QueryHandler is used by every worker that extends the AbstractWorker. + * It initializes the QuerySource, QuerySelector, QueryList and, if needed, PatternHandler. + * After the initialization, it provides the next query to the worker using the generated QuerySource + * and the order given by the QuerySelector. + * + * @author frensing + */ +@JsonDeserialize(using = QueryHandler.Deserializer.class) +public class QueryHandler { + static class Deserializer extends StdDeserializer { + final HashMap queryHandlers = new HashMap<>(); + protected Deserializer(Class vc) { + super(vc); + } + + protected Deserializer() { + this(null); + } + + @Override + public QueryHandler deserialize(JsonParser jp, DeserializationContext ctxt) throws IOException { + QueryHandler.Config queryHandlerConfig = ctxt.readValue(jp, QueryHandler.Config.class); + if (!queryHandlers.containsKey(queryHandlerConfig)) + queryHandlers.put(queryHandlerConfig, new QueryHandler(queryHandlerConfig)); + + return queryHandlers.get(queryHandlerConfig); + } + } + + public record Config ( + String path, + Format format, + String separator, + Boolean caching, + Order order, + Long seed, + Language lang + ) { + public Config(@JsonProperty(required = true) String path, Format format, String separator, Boolean caching, Order order, Long seed, Language lang) { + this.path = path; + this.format = (format == null ? Format.ONE_PER_LINE : format); + this.caching = (caching == null || caching); + this.order = (order == null ? Order.LINEAR : order); + this.seed = (seed == null ? 0 : seed); + this.lang = (lang == null ? Language.SPARQL : lang); + this.separator = (separator == null ? "" : separator); + } + + public enum Format { + @JsonEnumDefaultValue ONE_PER_LINE("one-per-line"), + SEPARATOR("separator"), + FOLDER("folder"); + + final String value; + + Format(String value) { + this.value = Objects.requireNonNullElse(value, "one-per-line"); + } + + @JsonValue + public String value() { + return value; + } + } + + public enum Order { + @JsonEnumDefaultValue LINEAR("linear"), + RANDOM("random"); + + final String value; + + Order(String value) { + this.value = value; + } + + @JsonValue + public String value() { + return value; + } + } + + public enum Language { + @JsonEnumDefaultValue SPARQL("SPARQL"), + UNSPECIFIED("unspecified"); + + final String value; + + Language(String value) { + this.value = value; + } + + @JsonValue + public String value() { + return value; + } + } + } + + public record QueryStringWrapper(int index, String query) {} + public record QueryStreamWrapper(int index, boolean cached, Supplier queryInputStreamSupplier) {} + + + protected final Logger LOGGER = LoggerFactory.getLogger(QueryHandler.class); + + @JsonValue + final protected Config config; + + final protected QueryList queryList; + + private int workerCount = 0; // give every worker inside the same worker config an offset seed + + final protected int hashCode; + + /** + * Empty Constructor for Testing purposes. + * TODO: look for an alternative + */ + protected QueryHandler() { + config = null; + queryList = null; + hashCode = 0; + } + + @JsonCreator + public QueryHandler(Config config) throws IOException { + final var querySource = switch (config.format()) { + case ONE_PER_LINE -> new FileLineQuerySource(Path.of(config.path())); + case SEPARATOR -> new FileSeparatorQuerySource(Path.of(config.path()), config.separator); + case FOLDER -> new FolderQuerySource(Path.of(config.path())); + }; + + queryList = (config.caching()) ? + new InMemQueryList(querySource) : + new FileBasedQueryList(querySource); + + this.config = config; + hashCode = queryList.hashCode(); + } + + public QuerySelector getQuerySelectorInstance() { + switch (config.order()) { + case LINEAR -> { return new LinearQuerySelector(queryList.size()); } + case RANDOM -> { return new RandomQuerySelector(queryList.size(), config.seed() + workerCount++); } + } + + throw new IllegalStateException("Unknown query selection order: " + config.order()); + } + + public QueryStringWrapper getNextQuery(QuerySelector querySelector) throws IOException { + final var queryIndex = querySelector.getNextIndex(); + return new QueryStringWrapper(queryIndex, queryList.getQuery(queryIndex)); + } + + public QueryStreamWrapper getNextQueryStream(QuerySelector querySelector) throws IOException { + final var queryIndex = querySelector.getNextIndex(); + return new QueryStreamWrapper(queryIndex, config.caching(), () -> { + try { + return this.queryList.getQueryStream(queryIndex); + } catch (IOException e) { + throw new RuntimeException(e); + } + }); + } + + @Override + public int hashCode() { + return hashCode; + } + + public int getQueryCount() { + return this.queryList.size(); + } + + public String getQueryId(int i) { + return this.queryList.hashCode() + ":" + i; + } + + /** + * Returns every query id in the format: queryListHash:index
+ * The index of a query inside the returned array is the same as the index inside the string. + * + * @return String[] of query ids + */ + public String[] getAllQueryIds() { + String[] out = new String[queryList.size()]; + for (int i = 0; i < queryList.size(); i++) { + out[i] = getQueryId(i); + } + return out; + } + + /** + * Returns the configuration of the QueryHandler. + * + * @return the configuration of the QueryHandler + */ + public Config getConfig() { + return config; + } +} diff --git a/src/main/java/org/aksw/iguana/cc/query/list/QueryList.java b/src/main/java/org/aksw/iguana/cc/query/list/QueryList.java new file mode 100644 index 000000000..df9cd83ef --- /dev/null +++ b/src/main/java/org/aksw/iguana/cc/query/list/QueryList.java @@ -0,0 +1,54 @@ +package org.aksw.iguana.cc.query.list; + +import org.aksw.iguana.cc.query.source.QuerySource; + +import java.io.IOException; +import java.io.InputStream; + +/** + * The abstract class for a QueryList. A query list provides the queries to the QueryHandler. + * + * @author frensing + */ +public abstract class QueryList { + + /** + * This is the QuerySource from which the queries should be retrieved. + */ + final protected QuerySource querySource; + + public QueryList(QuerySource querySource) { + if (querySource == null) + throw new IllegalArgumentException("QuerySource must not be null."); + this.querySource = querySource; + } + + /** + * This method returns the amount of queries in the query list. + * + * @return The amount of queries in the query list + */ + public int size() { + return querySource.size(); + } + + /** + * This method returns the hashcode of the query list which is the hashcode of the query source. + * + * @return The hashcode of the query list + */ + @Override + public int hashCode() { + return querySource.hashCode(); + } + + /** + * This method returns a query at the given index. + * + * @param index Index of the query in the list + * @return The query at the given index + */ + public abstract String getQuery(int index) throws IOException; + + public abstract InputStream getQueryStream(int index) throws IOException; +} diff --git a/src/main/java/org/aksw/iguana/cc/query/list/impl/FileBasedQueryList.java b/src/main/java/org/aksw/iguana/cc/query/list/impl/FileBasedQueryList.java new file mode 100644 index 000000000..f01c3ab63 --- /dev/null +++ b/src/main/java/org/aksw/iguana/cc/query/list/impl/FileBasedQueryList.java @@ -0,0 +1,29 @@ +package org.aksw.iguana.cc.query.list.impl; + +import org.aksw.iguana.cc.query.list.QueryList; +import org.aksw.iguana.cc.query.source.QuerySource; + +import java.io.IOException; +import java.io.InputStream; + +/** + * A query list which reads the queries directly from a file. + * + * @author frensing + */ +public class FileBasedQueryList extends QueryList { + + public FileBasedQueryList(QuerySource querySource) { + super(querySource); + } + + @Override + public String getQuery(int index) throws IOException { + return querySource.getQuery(index); + } + + @Override + public InputStream getQueryStream(int index) throws IOException { + return querySource.getQueryStream(index); + } +} diff --git a/src/main/java/org/aksw/iguana/cc/query/list/impl/InMemQueryList.java b/src/main/java/org/aksw/iguana/cc/query/list/impl/InMemQueryList.java new file mode 100644 index 000000000..7e6d30a37 --- /dev/null +++ b/src/main/java/org/aksw/iguana/cc/query/list/impl/InMemQueryList.java @@ -0,0 +1,45 @@ +package org.aksw.iguana.cc.query.list.impl; + +import org.aksw.iguana.cc.query.list.QueryList; +import org.aksw.iguana.cc.query.source.QuerySource; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.nio.charset.StandardCharsets; +import java.util.List; + +/** + * A query list which reads the queries into memory on initialization. + * During the benchmark the query are returned from the memory. + * + * @author frensing + */ +public class InMemQueryList extends QueryList { + + private static final Logger LOGGER = LoggerFactory.getLogger(InMemQueryList.class); + + private final List queries; + + public InMemQueryList(QuerySource querySource) throws IOException { + super(querySource); + queries = this.querySource.getAllQueries().stream().map(s -> s.getBytes(StandardCharsets.UTF_8)).toList(); + } + + @Override + public String getQuery(int index) { + return new String(this.queries.get(index), StandardCharsets.UTF_8); + } + + @Override + public InputStream getQueryStream(int index) { + return new ByteArrayInputStream(this.queries.get(index)); + } + + @Override + public int size() { + return this.queries.size(); + } +} diff --git a/src/main/java/org/aksw/iguana/cc/query/selector/QuerySelector.java b/src/main/java/org/aksw/iguana/cc/query/selector/QuerySelector.java new file mode 100644 index 000000000..824643213 --- /dev/null +++ b/src/main/java/org/aksw/iguana/cc/query/selector/QuerySelector.java @@ -0,0 +1,31 @@ +package org.aksw.iguana.cc.query.selector; + +import static java.text.MessageFormat.format; + +/** + * The QuerySelector provides a method to retrieve the index of a query, that should be executed next.
+ * It is used by the QueryHandler to get the next query. + * + * @author frensing + */ +public abstract class QuerySelector { + + protected int index = 0; + + protected final int size; + + public QuerySelector(int size) { + if (size <= 0) + throw new IllegalArgumentException(format("{0} size must be >0.", QuerySelector.class.getSimpleName())); + this.size = size; + } + + /** + * This method gives the next query index that should be used. + * + * @return the next query index + */ + public abstract int getNextIndex(); + + public abstract int getCurrentIndex(); +} diff --git a/src/main/java/org/aksw/iguana/cc/query/selector/impl/LinearQuerySelector.java b/src/main/java/org/aksw/iguana/cc/query/selector/impl/LinearQuerySelector.java new file mode 100644 index 000000000..3d3faad32 --- /dev/null +++ b/src/main/java/org/aksw/iguana/cc/query/selector/impl/LinearQuerySelector.java @@ -0,0 +1,38 @@ +package org.aksw.iguana.cc.query.selector.impl; + +import org.aksw.iguana.cc.query.selector.QuerySelector; + +/** + * This QuerySelector is used to get the next query index in a linear order. If the last query is reached it starts + * again at the first query. + *

+ * It is used by the QueryHandler to get the next query. + * + * @author frensing + */ +public class LinearQuerySelector extends QuerySelector { + + public LinearQuerySelector(int size) { + super(size); + index = -1; + } + + @Override + public int getNextIndex() { + index++; + if (index >= this.size) { + index = 0; + } + return index; + } + + /** + * Return the current index. This is the index of the last returned query. If no query was returned yet, it returns + * -1. + * @return + */ + @Override + public int getCurrentIndex() { + return index; + } +} diff --git a/src/main/java/org/aksw/iguana/cc/query/selector/impl/RandomQuerySelector.java b/src/main/java/org/aksw/iguana/cc/query/selector/impl/RandomQuerySelector.java new file mode 100644 index 000000000..80b18d51c --- /dev/null +++ b/src/main/java/org/aksw/iguana/cc/query/selector/impl/RandomQuerySelector.java @@ -0,0 +1,33 @@ +package org.aksw.iguana.cc.query.selector.impl; + +import org.aksw.iguana.cc.query.selector.QuerySelector; + +import java.util.Random; + +/** + * This QuerySelector is used to get the next query index in a random order. + *

+ * It is used by the QueryHandler to get the next query. + * + * @author frensing + */ +public class RandomQuerySelector extends QuerySelector { + + final protected Random indexGenerator; + int currentIndex; + + public RandomQuerySelector(int size, long seed) { + super(size); + indexGenerator = new Random(seed); + } + + @Override + public int getNextIndex() { + return currentIndex = this.indexGenerator.nextInt(this.size); + } + + @Override + public int getCurrentIndex() { + return currentIndex; + } +} diff --git a/src/main/java/org/aksw/iguana/cc/query/source/QuerySource.java b/src/main/java/org/aksw/iguana/cc/query/source/QuerySource.java new file mode 100644 index 000000000..59285cfee --- /dev/null +++ b/src/main/java/org/aksw/iguana/cc/query/source/QuerySource.java @@ -0,0 +1,65 @@ +package org.aksw.iguana.cc.query.source; + +import org.aksw.iguana.cc.utils.files.FileUtils; + +import java.io.IOException; +import java.io.InputStream; +import java.nio.file.Path; +import java.util.List; + +/** + * The abstract class for a QuerySource.
+ * The QuerySource provides the queries to the QueryList. It abstracts the actual format of the query files. + * + * @author frensing + */ +public abstract class QuerySource { + + /** This string represents the path of the file or folder, that contains the queries. */ + final protected Path path; + + /** + * This integer represents the hashcode of the file or folder, that contains the queries. It is stored for + * performance reasons, so that the hashcode does not have to be calculated every time it is needed. + * (It's needed everytime the id of a query is requested.) + */ + final protected int hashCode; + + public QuerySource(Path path) { + if (path == null) + throw new IllegalArgumentException("Path for a query source must not be null."); + this.path = path; + this.hashCode = FileUtils.getHashcodeFromFileContent(path); + } + + /** + * This method returns the amount of queries in the source. + * + * @return the number of queries in the source + */ + public abstract int size(); + + /** + * This method returns the query at the given index. + * + * @param index the index of the query counted from the first query (in the first file) + * @return String of the query + * @throws IOException if the query could not be read + */ + public abstract String getQuery(int index) throws IOException; + + public abstract InputStream getQueryStream(int index) throws IOException; + + /** + * This method returns all queries in the source as a list of Strings. + * + * @return List of Strings of all queries + * @throws IOException if the queries could not be read + */ + public abstract List getAllQueries() throws IOException; + + @Override + public int hashCode() { + return hashCode; + } +} diff --git a/src/main/java/org/aksw/iguana/cc/query/source/impl/FileLineQuerySource.java b/src/main/java/org/aksw/iguana/cc/query/source/impl/FileLineQuerySource.java new file mode 100644 index 000000000..69789aa6b --- /dev/null +++ b/src/main/java/org/aksw/iguana/cc/query/source/impl/FileLineQuerySource.java @@ -0,0 +1,18 @@ +package org.aksw.iguana.cc.query.source.impl; + +import org.aksw.iguana.cc.utils.files.FileUtils; + +import java.io.IOException; +import java.nio.file.Path; + +/** + * The FileLineQuerySource reads queries from a file with one query per line. + * + * @author frensing + */ +public class FileLineQuerySource extends FileSeparatorQuerySource { + public FileLineQuerySource(Path filepath) throws IOException { + super(filepath, FileUtils.getLineEnding(filepath)); + } + +} diff --git a/src/main/java/org/aksw/iguana/cc/query/source/impl/FileSeparatorQuerySource.java b/src/main/java/org/aksw/iguana/cc/query/source/impl/FileSeparatorQuerySource.java new file mode 100644 index 000000000..b1e82c9c3 --- /dev/null +++ b/src/main/java/org/aksw/iguana/cc/query/source/impl/FileSeparatorQuerySource.java @@ -0,0 +1,73 @@ +package org.aksw.iguana.cc.query.source.impl; + +import org.aksw.iguana.cc.query.source.QuerySource; +import org.aksw.iguana.cc.utils.files.IndexedQueryReader; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.io.InputStream; +import java.nio.file.Path; +import java.util.List; + +/** + * The FileSeparatorQuerySource reads queries from a file with + * (multiline) queries that are separated by a separator. + * + * @author frensing + */ +public class FileSeparatorQuerySource extends QuerySource { + private static final Logger LOGGER = LoggerFactory.getLogger(FileSeparatorQuerySource.class); + + private static final String DEFAULT_SEPARATOR = "###"; + + final protected IndexedQueryReader iqr; + + /** + * This constructor indexes the queries inside the given file. It assumes, that the queries inside the file are + * separated with the default separator ('###'). + * + * @param path path to the queries-file + */ + public FileSeparatorQuerySource(Path path) throws IOException { + super(path); + iqr = getIqr(path, DEFAULT_SEPARATOR); + } + + /** + * This constructor indexes the queries inside the given file. Queries inside the file should be separated with the + * given separator string. If the separator string parameter is blank, it assumes that the queries inside the file + * are separated by blank lines. + * + * @param path path to the queries-file + * @param separator string with which the queries inside the file are separated + */ + public FileSeparatorQuerySource(Path path, String separator) throws IOException { + super(path); + iqr = getIqr(path, separator); + } + + private static IndexedQueryReader getIqr(Path path, String separator) throws IOException { + return (separator.isEmpty()) ? IndexedQueryReader.makeWithEmptyLines(path) : IndexedQueryReader.makeWithStringSeparator(path, separator); + } + + @Override + public int size() { + return iqr.size(); + } + + @Override + public String getQuery(int index) throws IOException { + return iqr.readQuery(index); + } + + @Override + public InputStream getQueryStream(int index) throws IOException { + return iqr.streamQuery(index); + } + + @Override + public List getAllQueries() throws IOException { + return iqr.readQueries(); + } +} diff --git a/src/main/java/org/aksw/iguana/cc/query/source/impl/FolderQuerySource.java b/src/main/java/org/aksw/iguana/cc/query/source/impl/FolderQuerySource.java new file mode 100644 index 000000000..be71ccec8 --- /dev/null +++ b/src/main/java/org/aksw/iguana/cc/query/source/impl/FolderQuerySource.java @@ -0,0 +1,79 @@ +package org.aksw.iguana.cc.query.source.impl; + +import org.aksw.iguana.cc.query.source.QuerySource; +import org.aksw.iguana.cc.utils.files.FileUtils; +import org.apache.commons.io.input.AutoCloseInputStream; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.*; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.List; +import java.util.stream.Stream; + +import static java.text.MessageFormat.format; + +/** + * The FileSeparatorQuerySource reads queries from a folder with query files. + * Each query contains one (multiline) query. + * + * @author frensing + */ +public class FolderQuerySource extends QuerySource { + + protected static final Logger LOGGER = LoggerFactory.getLogger(FolderQuerySource.class); + + protected Path[] files; + + public FolderQuerySource(Path path) throws IOException { + super(path); + + if (!Files.isDirectory(path)) { + final var message = format("Folder does not exist {0}.", path); + LOGGER.error(message); + throw new IOException(message); + } + + LOGGER.info("Indexing folder {}.", path); + + try (Stream pathStream = Files.list(path);) { + files = pathStream + .filter(p -> Files.isReadable(p) && Files.isRegularFile(p)) + .sorted() + .toArray(Path[]::new); + } + + } + + @Override + public int size() { + return this.files.length; + } + + @Override + public String getQuery(int index) throws IOException { + return Files.readString(files[index], StandardCharsets.UTF_8); + } + + @Override + public InputStream getQueryStream(int index) throws IOException { + return new AutoCloseInputStream(new BufferedInputStream(new FileInputStream(files[index].toFile()))); + } + + @Override + public List getAllQueries() throws IOException { + List queries = new ArrayList<>(this.files.length); + for (int i = 0; i < this.files.length; i++) { + queries.add(getQuery(i)); + } + return queries; + } + + @Override + public int hashCode() { + return FileUtils.getHashcodeFromFileContent(this.files[0]); + } +} diff --git a/src/main/java/org/aksw/iguana/cc/storage/Storable.java b/src/main/java/org/aksw/iguana/cc/storage/Storable.java new file mode 100644 index 000000000..e45bcc976 --- /dev/null +++ b/src/main/java/org/aksw/iguana/cc/storage/Storable.java @@ -0,0 +1,40 @@ +package org.aksw.iguana.cc.storage; + +import org.apache.jena.rdf.model.Model; + +import java.util.List; + +/** + * This interface provides the functionality to store data in different formats. The data can be stored in CSV files + * or in RDF models. + */ +public interface Storable { + + record CSVData ( + String folderName, + List files + ) { + public record CSVFileData(String filename, String[][] data) {} + } + + interface AsCSV extends Storable { + + /** + * Converts the data into CSV files. The key of the map contains the file name for the linked entries. + * + * @return CSVFileData list which contains all the files and their data that should be created and stored + */ + CSVData toCSV(); + } + + interface AsRDF extends Storable { + + /** + * Converts the data into an RDF model, which will be added to the appropriate storages. + * + * @return RDF model that contains the data + */ + Model toRDF(); + } + +} diff --git a/src/main/java/org/aksw/iguana/cc/storage/Storage.java b/src/main/java/org/aksw/iguana/cc/storage/Storage.java new file mode 100644 index 000000000..06d1c2234 --- /dev/null +++ b/src/main/java/org/aksw/iguana/cc/storage/Storage.java @@ -0,0 +1,34 @@ +package org.aksw.iguana.cc.storage; + +import org.apache.jena.rdf.model.Model; + +/** + * Interface for the Result Storages + * + * @author f.conrads + * + */ +public interface Storage { + + /** + * Stores the task result into the storage. This method will be executed after a task has finished. + * Depending on the storages format, the storage class will need convert the data into the appropriate format. + * + * @param data the given result model + */ + void storeResult(Model data); + + /** + * General purpose method to store data into the storage. + * This method will mostly be used by the language processors to store their already formatted data.
+ * The default implementation will call the {@link #storeResult(Model)} method. This might not be the best solution + * for storages, that do not use RDF as their format. + * + * @param data the data to store + */ + default void storeData(Storable data) { + if (data instanceof Storable.AsRDF) { + storeResult(((Storable.AsRDF) data).toRDF()); + } + } +} diff --git a/src/main/java/org/aksw/iguana/cc/storage/impl/CSVStorage.java b/src/main/java/org/aksw/iguana/cc/storage/impl/CSVStorage.java new file mode 100644 index 000000000..6565525fc --- /dev/null +++ b/src/main/java/org/aksw/iguana/cc/storage/impl/CSVStorage.java @@ -0,0 +1,430 @@ +package org.aksw.iguana.cc.storage.impl; + +import com.opencsv.CSVReader; +import com.opencsv.CSVWriter; +import com.opencsv.CSVWriterBuilder; +import com.opencsv.exceptions.CsvValidationException; +import org.aksw.iguana.cc.config.elements.StorageConfig; +import org.aksw.iguana.cc.metrics.*; +import org.aksw.iguana.cc.metrics.impl.AggregatedExecutionStatistics; +import org.aksw.iguana.cc.metrics.impl.EachExecutionStatistic; +import org.aksw.iguana.cc.storage.Storable; +import org.aksw.iguana.cc.storage.Storage; +import org.aksw.iguana.commons.rdf.IONT; +import org.aksw.iguana.commons.rdf.IPROP; +import org.apache.jena.arq.querybuilder.SelectBuilder; +import org.apache.jena.arq.querybuilder.WhereBuilder; +import org.apache.jena.query.*; +import org.apache.jena.rdf.model.*; +import org.apache.jena.sparql.lang.sparql_11.ParseException; +import org.apache.jena.vocabulary.RDF; +import org.apache.jena.vocabulary.RDFS; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.*; +import java.nio.file.*; +import java.util.*; +import java.util.function.Predicate; + +public class CSVStorage implements Storage { + + /** This private record is used to store information about the connections used in a task. */ + private record ConnectionInfo(String connection, String version, String dataset) {} + + public record Config(String directory) implements StorageConfig { + public Config(String directory) { + if (directory == null) { + directory = "results"; + } + Path path = Paths.get(directory); + if (Files.exists(path) && !Files.isDirectory(path)) { + throw new IllegalArgumentException("The given path is not a directory."); + } + this.directory = directory; + } + } + + private static final Logger LOGGER = LoggerFactory.getLogger(CSVStorage.class); + + private final List metrics; + + private final Path suiteFolder; + private Path currentFolder; + private final Path taskFile; + private final Path taskConfigFile; + + private List workerResources; + private Resource taskRes; + List connections; + + public CSVStorage(Config config, List metrics, String suiteID) { + this(config.directory(), metrics, suiteID); + } + + public CSVStorage(String folderPath, List metrics, String suiteID) { + this.metrics = metrics; + + Path parentFolder; + try { + parentFolder = Paths.get(folderPath); + } catch (InvalidPathException e) { + LOGGER.error("Can't store csv files, the given path is invalid.", e); + this.suiteFolder = null; + this.taskFile = null; + this.taskConfigFile = null; + return; + } + + this.suiteFolder = parentFolder.resolve("suite-" + suiteID); + this.taskFile = this.suiteFolder.resolve("suite-summary.csv"); + this.taskConfigFile = this.suiteFolder.resolve("task-configuration.csv"); + + if (Files.notExists(suiteFolder)) { + try { + Files.createDirectories(suiteFolder); + } catch (IOException e) { + LOGGER.error("Can't store csv files, directory could not be created.", e); + return; + } + } + + try { + Files.createFile(taskFile); + } catch (IOException e) { + LOGGER.error("Couldn't create the file: " + taskFile.toAbsolutePath(), e); + return; + } + + try { + Files.createFile(taskConfigFile); + } catch (IOException e) { + LOGGER.error("Couldn't create the file: " + taskFile.toAbsolutePath(), e); + return; + } + + // write headers for the suite-summary.csv file + try (CSVWriter csvWriter = getCSVWriter(taskFile)) { + Metric[] taskMetrics = metrics.stream().filter(x -> TaskMetric.class.isAssignableFrom(x.getClass())).toArray(Metric[]::new); + List headerList = new LinkedList<>(); + // headerList.addAll(List.of("connection", "dataset", "startDate", "endDate", "noOfWorkers")); + headerList.addAll(List.of("taskID", "startDate", "endDate", "noOfWorkers")); + headerList.addAll(Arrays.stream(taskMetrics).map(Metric::getAbbreviation).toList()); + String[] header = headerList.toArray(String[]::new); + csvWriter.writeNext(header, true); + } catch (IOException e) { + LOGGER.error("Error while writing to file: " + taskFile.toAbsolutePath(), e); + } + + // write headers for the task-configuration.csv file + try (CSVWriter csvWriter = getCSVWriter(taskConfigFile)) { + csvWriter.writeNext(new String[]{"taskID", "connection", "version", "dataset"}, true); + } catch (IOException e) { + LOGGER.error("Error while writing to file: " + taskConfigFile.toAbsolutePath(), e); + } + } + + /** + * Stores the task result into the storage. This method will be executed after a task has finished. + * + * @param data the given result model + */ + @Override + public void storeResult(Model data) { + try { + setObjectAttributes(data); + } catch (NoSuchElementException e) { + LOGGER.error("Error while querying the result model. The given model is probably incorrect.", e); + return; + } + + this.currentFolder = this.suiteFolder.resolve("task-" + retrieveTaskID(this.taskRes)); + try { + Files.createDirectory(this.currentFolder); + } catch (IOException e) { + LOGGER.error("Error while storing the task result in a csv file.", e); + } + + try { + storeTaskInfo(); + storeTaskResults(data); + } catch (IOException e) { + LOGGER.error("Error while storing the task result in a csv file.", e); + } catch (NoSuchElementException | ParseException e) { + LOGGER.error("Error while storing the task result in a csv file. The given model is probably incorrect.", e); + } + + try { + Path temp = createCSVFile("worker", "summary"); + storeWorkerResults(this.taskRes, temp, data, this.metrics); + for (Resource workerRes : workerResources) { + String workerID = data.listObjectsOfProperty(workerRes, IPROP.workerID).next().asLiteral().getLexicalForm(); + try { + Path file = createCSVFile("query", "summary", "worker", workerID); + Path file2 = createCSVFile("each", "execution", "worker", workerID); + storeSummarizedQueryResults(workerRes, file, data, this.metrics, false); + storeEachQueryResults(workerRes, file2, data, this.metrics); + } catch (IOException e) { + LOGGER.error("Error while storing the query results of a worker in a csv file.", e); + } catch (NoSuchElementException e) { + LOGGER.error("Error while storing the query results of a worker in a csv file. The given model is probably incorrect.", e); + } + } + } catch (IOException e) { + LOGGER.error("Error while storing the worker results in a csv file.", e); + } catch (NoSuchElementException e) { + LOGGER.error("Error while storing the worker results in a csv file. The given model is probably incorrect.", e); + } + + try { + Path file = createCSVFile("query", "summary", "task"); + storeSummarizedQueryResults(taskRes, file, data, this.metrics, true); + } catch (IOException e) { + LOGGER.error("Error while storing the query results of a task result in a csv file.", e); + } catch (NoSuchElementException e) { + LOGGER.error("Error while storing the query results of a task result in a csv file. The given model is probably incorrect.", e); + } + } + + @Override + public void storeData(Storable data) { + if (!(data instanceof Storable.AsCSV)) return; // dismiss data if it can't be stored as csv + Storable.CSVData csvdata = ((Storable.AsCSV) data).toCSV(); + + Path responseTypeDir = Path.of(csvdata.folderName()); + responseTypeDir = this.currentFolder.resolve(responseTypeDir); + + try { + Files.createDirectory(responseTypeDir); + } catch (FileAlreadyExistsException ignored) { + } catch (IOException e) { + LOGGER.error("Error while creating the directory for the language processor results. ", e); + return; + } + + for (var csvFile : csvdata.files()) { + // check for file extension + String filename = csvFile.filename().endsWith(".csv") ? csvFile.filename() : csvFile.filename() + ".csv"; + Path file = responseTypeDir.resolve(filename); + + int i = 1; // skip the header by default + + if (Files.notExists(file)) { + try { + Files.createFile(file); + } catch (IOException e) { + LOGGER.error("Error while creating a csv file for language processor results. The storing of language processor results will be skipped.", e); + return; + } + i = 0; // include header if file is new + } + + try (CSVWriter writer = getCSVWriter(file)) { + for (; i < csvFile.data().length; i++) { + writer.writeNext(csvFile.data()[i], true); + } + } catch (IOException e) { + LOGGER.error("Error while writing the data into a csv file for language processor results. The storing of language processor results will be skipped.", e); + return; + } + } + } + + /** + * This method sets the objects attributes by querying the given model. + * + * @param data the result model + * @throws NoSuchElementException might be thrown if the model is incorrect + */ + private void setObjectAttributes(Model data) throws NoSuchElementException { + // obtain connection information of task + this.connections = new ArrayList<>(); + ResIterator resIterator = data.listSubjectsWithProperty(RDF.type, IONT.connection); + while (resIterator.hasNext()) { + Resource connectionRes = resIterator.nextResource(); + NodeIterator nodeIterator = data.listObjectsOfProperty(connectionRes, RDFS.label); + String conString = nodeIterator.next().asLiteral().getLexicalForm(); + + // obtain connection version + String conVersionString = ""; + nodeIterator = data.listObjectsOfProperty(connectionRes, IPROP.version); + if (nodeIterator.hasNext()) { + conVersionString = nodeIterator.next().toString(); + } + + // obtain dataset + String conDatasetString = ""; + nodeIterator = data.listObjectsOfProperty(connectionRes, IPROP.dataset); + if (nodeIterator.hasNext()) { + conDatasetString = nodeIterator.next().toString(); + } + this.connections.add(new ConnectionInfo(conString, conVersionString, conDatasetString)); + } + + // obtain task type + resIterator = data.listSubjectsWithProperty(RDF.type, IONT.task); + this.taskRes = resIterator.nextResource(); + + // obtain worker resources + NodeIterator nodeIterator = data.listObjectsOfProperty(this.taskRes, IPROP.workerResult); + this.workerResources = nodeIterator.toList().stream().map(RDFNode::asResource).toList(); + } + + /** + * Creates a CSV file with the given name values that will be located inside the parent folder. The name value are + * joined together with the character '-'. Empty values will be ignored. + * + * @param nameValues strings that build up the name of the file + * @throws IOException if an I/O error occurs + * @return path object to the created CSV file + */ + private Path createCSVFile(String... nameValues) throws IOException { + // remove empty string values + nameValues = Arrays.stream(nameValues).filter(Predicate.not(String::isEmpty)).toArray(String[]::new); + String filename = String.join("-", nameValues) + ".csv"; + Path file = this.currentFolder.resolve(filename); + Files.createFile(file); + return file; + } + + /** + * Store the summarized query results for the given rdf resource into a CSV file. + * + * @param parentRes the parent resource inside the model that contains the query results + * @param file the file where the results should be stored + * @param data the model that contains the data + * @param metrics the metrics that should be stored + * @param summarizeForTask if true, the query results will be summarized for a task, therefore, for each query + * its full ID will be used (otherwise the query ids could clash with each other) + */ + private static void storeSummarizedQueryResults(Resource parentRes, Path file, Model data, List metrics, boolean summarizeForTask) throws IOException, NoSuchElementException { + boolean containsAggrStats = !metrics.stream().filter(AggregatedExecutionStatistics.class::isInstance).toList().isEmpty(); + Metric[] queryMetrics = metrics.stream().filter(x -> QueryMetric.class.isAssignableFrom(x.getClass())).toArray(Metric[]::new); + + SelectBuilder sb = new SelectBuilder(); + sb.addWhere(parentRes, IPROP.query, "?eQ"); + sb.addWhere("?eQ", IPROP.queryID, "?query"); + sb.addVar("queryID").addWhere("?query", summarizeForTask ? IPROP.fullID : IPROP.id, "?queryID"); + if (containsAggrStats) { + queryProperties(sb, "?eQ", IPROP.succeeded, IPROP.failed, IPROP.totalTime, IPROP.resultSize, IPROP.wrongCodes, IPROP.timeOuts, IPROP.unknownException); + } + queryMetrics(sb, "?eQ", queryMetrics); + + executeAndStoreQuery(sb, file, data); + } + + private static void storeEachQueryResults(Resource parentRes, Path file, Model data, List metrics) throws IOException { + boolean containsEachStats = !metrics.stream().filter(EachExecutionStatistic.class::isInstance).toList().isEmpty(); + if (!containsEachStats) { + return; + } + + SelectBuilder sb = new SelectBuilder(); + sb.addWhere(parentRes, IPROP.query, "?eQ") // variable name should be different from property names + .addWhere("?eQ", IPROP.queryExecution, "?exec") + .addOptional(new WhereBuilder().addWhere("?exec", IPROP.responseBody, "?rb").addWhere("?rb", IPROP.responseBodyHash, "?responseBodyHash")) + .addOptional(new WhereBuilder().addWhere("?exec", IPROP.exception, "?exception")) + .addOptional(new WhereBuilder().addWhere("?exec", IPROP.httpCode, "?httpCode")); + sb.addWhere("?eQ", IPROP.queryID, "?query"); + sb.addVar("queryID").addWhere("?query", IPROP.id, "?queryID"); + queryProperties(sb, "?exec", IPROP.run, IPROP.success, IPROP.startTime, IPROP.time, IPROP.resultSize, IPROP.code); + sb.addVar("httpCode").addVar("exception").addVar("responseBodyHash"); + executeAndStoreQuery(sb, file, data); + } + + /** + * Stores the current task information into the task configuration file. + */ + private void storeTaskInfo() { + try (CSVWriter csvWriter = getCSVWriter(taskConfigFile)) { + for (ConnectionInfo connectionInfo : connections) { + csvWriter.writeNext(new String[]{this.taskRes.toString(), connectionInfo.connection(), connectionInfo.version(), connectionInfo.dataset()}, true); + } + } catch (IOException e) { + LOGGER.error("Error while writing to file: " + taskConfigFile.toAbsolutePath(), e); + } + } + + private void storeTaskResults(Model data) throws IOException, NoSuchElementException, ParseException { + Metric[] taskMetrics = metrics.stream().filter(x -> TaskMetric.class.isAssignableFrom(x.getClass())).toArray(Metric[]::new); + + SelectBuilder sb = new SelectBuilder(); + queryProperties(sb, String.format("<%s>", this.taskRes.toString()), IPROP.startDate, IPROP.endDate, IPROP.noOfWorkers); + queryMetrics(sb, String.format("<%s>", this.taskRes.toString()), taskMetrics); + + try (QueryExecution exec = QueryExecutionFactory.create(sb.build(), data); + CSVWriter csvWriter = getCSVWriter(taskFile); + ByteArrayOutputStream baos = new ByteArrayOutputStream()) { + ResultSet results = exec.execSelect(); + ResultSetFormatter.outputAsCSV(baos, results); + + // workaround to remove the created header from the ResultSetFormatter + CSVReader reader = new CSVReader(new StringReader(baos.toString())); + try { + reader.readNext(); + + // inject connection and dataset information + String[] row = reader.readNext(); + String[] newRow = new String[row.length + 1]; + newRow[0] = this.taskRes.getURI(); + // newRow[0] = connection; + // newRow[1] = dataset; + System.arraycopy(row, 0, newRow, 1, row.length); + csvWriter.writeNext(newRow, true); + } catch (CsvValidationException ignored) { + // shouldn't happen + } + } + } + + private static void storeWorkerResults(Resource taskRes, Path file, Model data, List metrics) throws IOException, NoSuchElementException { + Metric[] workerMetrics = metrics.stream().filter(x -> WorkerMetric.class.isAssignableFrom(x.getClass())).toArray(Metric[]::new); + + SelectBuilder sb = new SelectBuilder(); + sb.addWhere(taskRes, IPROP.workerResult, "?worker"); + queryProperties(sb, "?worker", IPROP.workerID, IPROP.workerType, IPROP.noOfQueries, IPROP.timeOut, IPROP.startDate, IPROP.endDate); + queryMetrics(sb, "?worker", workerMetrics); + + executeAndStoreQuery(sb, file, data); + } + + private static CSVWriter getCSVWriter(Path file) throws IOException { + return (CSVWriter) new CSVWriterBuilder(new FileWriter(file.toAbsolutePath().toString(), true)) + .withQuoteChar('\"') + .withSeparator(',') + .withLineEnd("\n") + .build(); + } + + private static void queryProperties(SelectBuilder sb, String variable, Property... properties) { + for (Property prop : properties) { + sb.addVar(prop.getLocalName()).addWhere(variable, prop, "?" + prop.getLocalName()); + } + } + + private static void queryMetrics(SelectBuilder sb, String variable, Metric[] metrics) { + for (Metric m : metrics) { + // Optional, in case metric isn't created, because of failed executions + sb.addVar(m.getAbbreviation()).addOptional(variable, IPROP.createMetricProperty(m), "?" + m.getAbbreviation()); + } + } + + private static void executeAndStoreQuery(SelectBuilder sb, Path file, Model data) throws IOException { + try(QueryExecution exec = QueryExecutionFactory.create(sb.build(), data); + FileOutputStream fos = new FileOutputStream(file.toFile())) { + ResultSet results = exec.execSelect(); + ResultSetFormatter.outputAsCSV(fos, results); + } + } + + /** + * Retrieves the task ID from the given task resource. The current model doesn't save the task ID as a property of + * the task resource. Therefore, the task ID is extracted from the URI of the task resource. + * + * @param taskRes the task resource + * @return the task ID + */ + private static String retrieveTaskID(Resource taskRes) { + return taskRes.getURI().substring(taskRes.getURI().lastIndexOf("/") + 1); + } +} diff --git a/src/main/java/org/aksw/iguana/cc/storage/impl/RDFFileStorage.java b/src/main/java/org/aksw/iguana/cc/storage/impl/RDFFileStorage.java new file mode 100644 index 000000000..73ca1642d --- /dev/null +++ b/src/main/java/org/aksw/iguana/cc/storage/impl/RDFFileStorage.java @@ -0,0 +1,98 @@ +package org.aksw.iguana.cc.storage.impl; + +import org.aksw.iguana.cc.config.elements.StorageConfig; +import org.aksw.iguana.cc.storage.Storage; +import org.apache.commons.io.FilenameUtils; +import org.apache.jena.rdf.model.Model; +import org.apache.jena.riot.Lang; +import org.apache.jena.riot.RDFDataMgr; +import org.apache.jena.riot.RDFLanguages; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.OutputStream; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.Calendar; +import java.util.Optional; +import java.util.function.Supplier; + +public class RDFFileStorage implements Storage { + public record Config(String path) implements StorageConfig {} + + private static final Logger LOGGER = LoggerFactory.getLogger(RDFFileStorage.class.getName()); + + protected static Supplier defaultFileNameSupplier = () -> { + var now = Calendar.getInstance(); + return String.format("%d-%02d-%02d_%02d-%02d.%03d", + now.get(Calendar.YEAR), + now.get(Calendar.MONTH) + 1, + now.get(Calendar.DAY_OF_MONTH), + now.get(Calendar.HOUR_OF_DAY), + now.get(Calendar.MINUTE), + now.get(Calendar.MILLISECOND)); + }; + + final private Lang lang; + private Path path; + + public RDFFileStorage(Config config) { + this(config.path()); + } + + /** + * Uses a generated file called results_{DD}-{MM}-{YYYY}_{HH}-{mm}.ttl + */ + public RDFFileStorage() { + this(""); + } + + /** + * Uses the provided filename. If the filename is null or empty, a generated file called + * results_{DD}-{MM}-{YYYY}_{HH}-{mm}.ttl is used. The file extension determines the file format. + * + * @param fileName the filename to use + */ + public RDFFileStorage(String fileName) { + if (fileName == null || Optional.of(fileName).orElse("").isBlank()) { + path = Paths.get("").resolve(defaultFileNameSupplier.get() + ".ttl"); + } + else { + path = Paths.get(fileName); + if (Files.exists(path) && Files.isDirectory(path)) { + path = path.resolve(defaultFileNameSupplier.get() + ".ttl"); + } else if (Files.exists(path)) { + path = Paths.get(FilenameUtils.removeExtension(fileName) + "_" + defaultFileNameSupplier.get() + ".ttl"); // we're just going to assume that that's enough to make it unique + } + } + final var parentDir = path.toAbsolutePath().getParent(); + try { + Files.createDirectories(parentDir); + } catch (IOException e) { + LOGGER.error("Could not create parent directories for RDFFileStorage. ", e); + } + + this.lang = RDFLanguages.filenameToLang(path.toString(), Lang.TTL); + } + + @Override + public void storeResult(Model data){ + try (OutputStream os = new FileOutputStream(path.toString(), true)) { + RDFDataMgr.write(os, data, this.lang); + } catch (IOException e) { + LOGGER.error("Could not write to RDFFileStorage using lang: " + lang, e); + } + } + + @Override + public String toString() { + return this.getClass().getSimpleName(); + } + + public String getFileName() { + return this.path.toString(); + } +} diff --git a/src/main/java/org/aksw/iguana/cc/storage/impl/TriplestoreStorage.java b/src/main/java/org/aksw/iguana/cc/storage/impl/TriplestoreStorage.java new file mode 100644 index 000000000..d391d3b25 --- /dev/null +++ b/src/main/java/org/aksw/iguana/cc/storage/impl/TriplestoreStorage.java @@ -0,0 +1,117 @@ +package org.aksw.iguana.cc.storage.impl; + +import com.fasterxml.jackson.annotation.JsonProperty; +import org.aksw.iguana.cc.config.elements.StorageConfig; +import org.aksw.iguana.cc.controller.MainController; +import org.aksw.iguana.cc.storage.Storage; +import org.apache.http.auth.AuthScope; +import org.apache.http.auth.Credentials; +import org.apache.http.auth.UsernamePasswordCredentials; +import org.apache.http.client.CredentialsProvider; +import org.apache.http.client.HttpClient; +import org.apache.http.impl.client.BasicCredentialsProvider; +import org.apache.http.impl.client.HttpClients; +import org.apache.jena.rdf.model.Model; +import org.apache.jena.riot.Lang; +import org.apache.jena.riot.RDFDataMgr; +import org.apache.jena.update.UpdateExecutionFactory; +import org.apache.jena.update.UpdateFactory; +import org.apache.jena.update.UpdateProcessor; +import org.apache.jena.update.UpdateRequest; +import org.mortbay.jetty.Main; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.StringWriter; + + +/** + * This Storage will save all the metric results into a specified triple store + * + * @author f.conrads + * + */ +public class TriplestoreStorage implements Storage { + + Logger logger = LoggerFactory.getLogger(TriplestoreStorage.class); + + public record Config( + @JsonProperty(required = true) String endpoint, + String user, + String password, + String baseUri + ) implements StorageConfig {} + + private UpdateRequest blockRequest = UpdateFactory.create(); + private final String endpoint; + private final String user; + private final String password; + private final String baseUri; + + public TriplestoreStorage(Config config) { + endpoint = config.endpoint(); + user = config.user(); + password = config.password(); + baseUri = config.baseUri(); + } + + + public TriplestoreStorage(String endpoint, String user, String pwd, String baseUri) { + this.endpoint = endpoint; + this.user = user; + this.password = pwd; + this.baseUri = baseUri; + } + + public TriplestoreStorage(String endpoint) { + this.endpoint = endpoint; + this.user = null; + this.password = null; + this.baseUri = null; + } + + @Override + public void storeResult(Model data) { + StringWriter results = new StringWriter(); + RDFDataMgr.write(results, data, Lang.NT); + String update = "INSERT DATA {" + results.toString() + "}"; + //Create Update Request from block + blockRequest.add(update); + + //submit Block to Triple Store + UpdateProcessor processor = UpdateExecutionFactory + .createRemote(blockRequest, endpoint, createHttpClient()); + + // If dry run is enabled, the data will not be sent to an existing triplestore, + // therefore we catch the exception and log it instead of letting the program crash. + // The dry run is used for generating the configuration files for the native compilation with GraalVM. + // For normal runs, exceptions will be thrown normally. + if (MainController.Args.dryRun) { + try { + processor.execute(); + } catch (Exception e) { + logger.error("Error while storing data in triplestore: " + e.getMessage()); + } + } else { + processor.execute(); + } + blockRequest = new UpdateRequest(); + } + + private HttpClient createHttpClient() { + CredentialsProvider credsProvider = new BasicCredentialsProvider(); + if(user != null && password != null){ + Credentials credentials = new UsernamePasswordCredentials(user, password); + credsProvider.setCredentials(AuthScope.ANY, credentials); + } + HttpClient httpclient = HttpClients.custom() + .setDefaultCredentialsProvider(credsProvider) + .build(); + return httpclient; + } + + @Override + public String toString(){ + return this.getClass().getSimpleName(); + } +} diff --git a/src/main/java/org/aksw/iguana/cc/suite/IguanaSuiteParser.java b/src/main/java/org/aksw/iguana/cc/suite/IguanaSuiteParser.java new file mode 100644 index 000000000..ad431cb65 --- /dev/null +++ b/src/main/java/org/aksw/iguana/cc/suite/IguanaSuiteParser.java @@ -0,0 +1,260 @@ +package org.aksw.iguana.cc.suite; + +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.core.JsonFactory; +import com.fasterxml.jackson.core.JsonParser; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.DeserializationContext; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.deser.std.StdDeserializer; +import com.fasterxml.jackson.databind.module.SimpleModule; +import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; +import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule; +import com.networknt.schema.JsonSchema; +import com.networknt.schema.JsonSchemaFactory; +import com.networknt.schema.SpecVersion; +import com.networknt.schema.ValidationMessage; +import org.aksw.iguana.cc.config.elements.ConnectionConfig; +import org.aksw.iguana.cc.config.elements.DatasetConfig; +import org.apache.commons.io.FilenameUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.*; +import java.nio.charset.StandardCharsets; +import java.nio.file.Path; +import java.text.MessageFormat; +import java.time.Duration; +import java.time.Instant; +import java.util.*; +import java.util.function.Function; +import java.util.stream.Collectors; + +/** + * Creates an IguanaConfig from a given JSON or YAML file, and validates the config using a JSON schema file + */ +public class IguanaSuiteParser { + private static final Logger LOGGER = LoggerFactory.getLogger(IguanaSuiteParser.class); + + private static final String SCHEMA_FILE = "/iguana-schema.json"; + + enum DataFormat { + YAML, JSON; + + public static DataFormat getFormat(Path file) { + final var extension = FilenameUtils.getExtension(file.toString()); + switch (extension) { + case "yml", "yaml" -> { + return YAML; + } + case "json" -> { + return JSON; + } + default -> throw new IllegalStateException("Unexpected suite file extension: " + extension); + } + } + } + + /** + * Parses an IGUANA configuration file and optionally validates it against a JSON schema file, before parsing. + * + * @param config the path to the configuration file. + * @param validate whether to validate the configuration file against the JSON schema file. + * @return a Suite object containing the parsed configuration. + * @throws IOException if there is an error during IO. + * @throws IllegalStateException if the configuration file is invalid. + */ + public static Suite parse(Path config, boolean validate) throws IOException { + final var format = DataFormat.getFormat(config); + JsonFactory factory = switch (format) { + case YAML -> new YAMLFactory(); + case JSON -> new JsonFactory(); + }; + + if (validate && !validateConfig(config)) { + throw new IllegalStateException("Invalid config file"); + } + + try (var stream = new FileInputStream(config.toFile())) { + return parse(stream, factory); + } + } + + /** + * Validates an IGUANA configuration file against a JSON schema file. + * + * @param config the path to the configuration file. + * @return true if the configuration file is valid, false otherwise. + * @throws IOException if there is an error during IO. + */ + public static boolean validateConfig(Path config) throws IOException { + final var format = DataFormat.getFormat(config); + JsonFactory factory = switch (format) { + case YAML -> new YAMLFactory(); + case JSON -> new JsonFactory(); + }; + final var mapper = new ObjectMapper(factory); + + JsonSchemaFactory schemaFactory = JsonSchemaFactory.getInstance(SpecVersion.VersionFlag.V6); + InputStream is = IguanaSuiteParser.class.getResourceAsStream(SCHEMA_FILE); + JsonSchema schema = schemaFactory.getSchema(is); + JsonNode node = mapper.readTree(config.toFile()); + Set errors = schema.validate(node); + if (!errors.isEmpty()) { + LOGGER.error("Found {} errors in configuration file.", errors.size()); + } + for (ValidationMessage message : errors) { + LOGGER.error(message.getMessage()); + } + return errors.isEmpty(); + } + + /** + * Parses an IGUANA configuration file.

+ * + * This involves two steps: First, datasets and connections are parsed and stored. In a second step, the rest of the + * file is parsed. If the names of datasets and connections are used, they are replaced with the respective + * configurations that were parsed in the first step. + * + * @param inputStream the input stream containing the configuration file content. + * @param factory the JsonFactory instance used for parsing the configuration file. + * @return a Suite object containing the parsed configuration. + * @throws IOException if there is an error during IO. + */ + private static Suite parse(InputStream inputStream, JsonFactory factory) throws IOException { + ObjectMapper mapper = new ObjectMapper(factory); + + final var input = new String(inputStream.readAllBytes(), StandardCharsets.UTF_8); + final var datasets = preparseDataset(mapper, input); + + class DatasetDeserializer extends StdDeserializer { + public DatasetDeserializer() { + this(null); + } + + protected DatasetDeserializer(Class vc) { + super(vc); + } + + @Override + public DatasetConfig deserialize(JsonParser jp, DeserializationContext ctxt) throws IOException { + JsonNode node = jp.getCodec().readTree(jp); + if (node.isTextual()) { + final var datasetName = node.asText(); + if (!datasets.containsKey(datasetName)) + throw new IllegalStateException(MessageFormat.format("Unknown dataset name: {0}", datasetName)); + return datasets.get(datasetName); + } else { + DatasetConfig datasetConfig = ctxt.readValue(jp, DatasetConfig.class); + if (datasets.containsKey(datasetConfig.name())) + assert datasets.get(datasetConfig.name()) == datasetConfig; + else datasets.put(datasetConfig.name(), datasetConfig); + return datasetConfig; + } + } + } + mapper = new ObjectMapper(factory).registerModule(new SimpleModule() + .addDeserializer(DatasetConfig.class, new DatasetDeserializer())); + + final var connections = preparseConnections(mapper, input); + + class ConnectionDeserializer extends StdDeserializer { + + public ConnectionDeserializer() { + this(null); + } + + protected ConnectionDeserializer(Class vc) { + super(vc); + } + + @Override + public ConnectionConfig deserialize(JsonParser jp, DeserializationContext ctxt) throws IOException { + JsonNode node = jp.getCodec().readTree(jp); + if (node.isTextual()) { + final var connectionName = node.asText(); + if (!connections.containsKey(connectionName)) + throw new IllegalStateException(MessageFormat.format("Unknown connection name: {0}", connectionName)); + return connections.get(connectionName); + } else { + ConnectionConfig connectionConfig = ctxt.readValue(jp, ConnectionConfig.class); + if (connections.containsKey(connectionConfig.name())) + assert connections.get(connectionConfig.name()) == connectionConfig; + else connections.put(connectionConfig.name(), connectionConfig); + return connectionConfig; + } + } + } + + class HumanReadableDurationDeserializer extends StdDeserializer { + + public HumanReadableDurationDeserializer() { + this(null); + } + + protected HumanReadableDurationDeserializer(Class vc) { + super(vc); + } + + @Override + public Duration deserialize(JsonParser jp, DeserializationContext ctxt) throws IOException { + var durationString = jp.getValueAsString() + .toLowerCase() + .replaceAll("\\s+", "") + .replace("years", "y") + .replace("year", "y") + .replace("months", "m") + .replace("month", "m") + .replace("weeks", "w") + .replace("week", "w") + .replace("days", "d") + .replace("day", "d") + .replace("mins", "m") + .replace("min", "m") + .replace("hrs", "h") + .replace("hr", "h") + .replace("secs", "s") + .replace("sec", "s") + .replaceFirst("(\\d+d)", "P$1T"); + if ((durationString.charAt(0) != 'P')) durationString = "PT" + durationString; + return Duration.parse(durationString); + } + } + + mapper = new ObjectMapper(factory).registerModule(new JavaTimeModule()) + .registerModule(new SimpleModule() + .addDeserializer(DatasetConfig.class, new DatasetDeserializer()) + .addDeserializer(ConnectionConfig.class, new ConnectionDeserializer()) + .addDeserializer(Duration.class, new HumanReadableDurationDeserializer())); + + final String suiteID = Instant.now().getEpochSecond() + "-" + Integer.toUnsignedString(input.hashCode()); // convert to unsigned, so that there is no double -- minus in the string + return new Suite(suiteID, mapper.readValue(input, Suite.Config.class)); + } + + /** + * Preparses the datasets field in a IGUANA configuration file and adds a custom Deserializer to mapper to enable retrieving already parsed datasets by name. + * + * @param mapper The ObjectMapper instance used for parsing the configuration file. + * @param input The input String containing the configuration file content. + * @return A Map of DatasetConfig objects, where the key is the dataset name and the value is the corresponding DatasetConfig object. + * @throws JsonProcessingException If there is an error during JSON processing. + */ + private static Map preparseDataset(ObjectMapper mapper, String input) throws JsonProcessingException { + @JsonIgnoreProperties(ignoreUnknown = true) + record PreparsingDatasets(@JsonProperty(required = true) List datasets) {} + final var preparsingDatasets = mapper.readValue(input, PreparsingDatasets.class); + + return preparsingDatasets.datasets().stream().collect(Collectors.toMap(DatasetConfig::name, Function.identity())); + } + + private static Map preparseConnections(ObjectMapper mapper, String input) throws JsonProcessingException { + @JsonIgnoreProperties(ignoreUnknown = true) + record PreparsingConnections(@JsonProperty(required = true) List connections) {} + final var preparsingConnections = mapper.readValue(input, PreparsingConnections.class); + + return preparsingConnections.connections().stream().collect(Collectors.toMap(ConnectionConfig::name, Function.identity())); + } + +} diff --git a/src/main/java/org/aksw/iguana/cc/suite/Suite.java b/src/main/java/org/aksw/iguana/cc/suite/Suite.java new file mode 100644 index 000000000..7e2e50025 --- /dev/null +++ b/src/main/java/org/aksw/iguana/cc/suite/Suite.java @@ -0,0 +1,104 @@ +package org.aksw.iguana.cc.suite; + +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonProperty; +import org.aksw.iguana.cc.config.elements.ConnectionConfig; +import org.aksw.iguana.cc.config.elements.DatasetConfig; +import org.aksw.iguana.cc.config.elements.StorageConfig; +import org.aksw.iguana.cc.metrics.Metric; +import org.aksw.iguana.cc.metrics.impl.*; +import org.aksw.iguana.cc.storage.Storage; +import org.aksw.iguana.cc.storage.impl.CSVStorage; +import org.aksw.iguana.cc.storage.impl.RDFFileStorage; +import org.aksw.iguana.cc.storage.impl.TriplestoreStorage; +import org.aksw.iguana.cc.tasks.impl.Stresstest; +import org.aksw.iguana.cc.tasks.Task; +import org.aksw.iguana.cc.worker.ResponseBodyProcessor; +import org.aksw.iguana.cc.worker.ResponseBodyProcessorInstances; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.ArrayList; +import java.util.List; + +public class Suite { + + public record Config( + @JsonIgnore + List datasets, /* Will already be consumed and ignored herein */ + @JsonIgnore + List connections, /* Will already be consumed and ignored herein */ + @JsonProperty(required = true) + List tasks, + List storages, + List metrics, + @JsonProperty List responseBodyProcessors + ) {} + + + private final String suiteId; + private final Config config; + private final ResponseBodyProcessorInstances responseBodyProcessorInstances; + + private final static Logger LOGGER = LoggerFactory.getLogger(Suite.class); + + private final List tasks = new ArrayList<>(); + + Suite(String suiteId, Config config) { + this.suiteId = suiteId; + this.config = config; + long taskID = 0; + + responseBodyProcessorInstances = new ResponseBodyProcessorInstances(config.responseBodyProcessors); + List metrics = initialiseMetrics(this.config.metrics); + List storages = initialiseStorages(this.config.storages, metrics, this.suiteId); + + for (Task.Config task : config.tasks()) { + if (task instanceof Stresstest.Config) { + tasks.add(new Stresstest(this.suiteId, taskID++, (Stresstest.Config) task, responseBodyProcessorInstances, storages, metrics)); + } + } + } + + private static List initialiseMetrics(List metrics) { + if (metrics != null && !metrics.isEmpty()) { + return metrics; + } + + final List out = new ArrayList<>(); + out.add(new QPS()); + out.add(new AvgQPS()); + out.add(new NoQPH()); + out.add(new AggregatedExecutionStatistics()); + out.add(new EachExecutionStatistic()); + out.add(new NoQ()); + out.add(new QMPH()); + return out; + } + + private static List initialiseStorages(List configs, List metrics, String suiteID) { + List out = new ArrayList<>(); + for (var storageConfig : configs) { + if (storageConfig instanceof CSVStorage.Config) { + out.add(new CSVStorage((CSVStorage.Config) storageConfig, metrics, suiteID)); + } + else if (storageConfig instanceof TriplestoreStorage.Config) { + out.add(new TriplestoreStorage((TriplestoreStorage.Config) storageConfig)); + } + else if (storageConfig instanceof RDFFileStorage.Config) { + out.add(new RDFFileStorage((RDFFileStorage.Config) storageConfig)); + } + } + return out; + } + + public void run() { + for (int i = 0; i < tasks.size(); i++) { + LOGGER.info("Task/{} {} starting.", tasks.get(i).getTaskName(), i); + tasks.get(i).run(); + LOGGER.info("Task/{} {} finished.", tasks.get(i).getTaskName(), i); + } + } +} + + diff --git a/src/main/java/org/aksw/iguana/cc/tasks/Task.java b/src/main/java/org/aksw/iguana/cc/tasks/Task.java new file mode 100644 index 000000000..5c5901fcc --- /dev/null +++ b/src/main/java/org/aksw/iguana/cc/tasks/Task.java @@ -0,0 +1,18 @@ +package org.aksw.iguana.cc.tasks; + +import com.fasterxml.jackson.annotation.JsonSubTypes; +import com.fasterxml.jackson.annotation.JsonTypeInfo; +import org.aksw.iguana.cc.tasks.impl.Stresstest; + +public interface Task { + @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, + include = JsonTypeInfo.As.PROPERTY, + property = "type") + @JsonSubTypes({ + @JsonSubTypes.Type(value = Stresstest.Config.class, name = "stresstest"), + }) + interface Config {} + + void run(); + String getTaskName(); +} diff --git a/src/main/java/org/aksw/iguana/cc/tasks/impl/Stresstest.java b/src/main/java/org/aksw/iguana/cc/tasks/impl/Stresstest.java new file mode 100644 index 000000000..1e93882e1 --- /dev/null +++ b/src/main/java/org/aksw/iguana/cc/tasks/impl/Stresstest.java @@ -0,0 +1,119 @@ +package org.aksw.iguana.cc.tasks.impl; + +import com.fasterxml.jackson.annotation.JsonProperty; +import org.aksw.iguana.cc.metrics.Metric; +import org.aksw.iguana.cc.storage.Storage; +import org.aksw.iguana.cc.tasks.Task; +import org.aksw.iguana.cc.worker.HttpWorker; +import org.aksw.iguana.cc.worker.ResponseBodyProcessorInstances; +import org.aksw.iguana.cc.worker.impl.SPARQLProtocolWorker; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.*; +import java.util.concurrent.*; + + +/** + * Stresstest. + * Will stresstest a connection using several Workers (simulated Users) each in one thread. + */ +public class Stresstest implements Task { + + public record Config( + List warmupWorkers, + @JsonProperty(required = true) List workers + ) implements Task.Config {} + + public record Result( + List workerResults, + Calendar startTime, + Calendar endTime + ) {} + + + private static final Logger LOGGER = LoggerFactory.getLogger(Stresstest.class); + + private final List warmupWorkers = new ArrayList<>(); + private final List workers = new ArrayList<>(); + + private final StresstestResultProcessor srp; + + + public Stresstest(String suiteID, long stresstestID, Config config, ResponseBodyProcessorInstances responseBodyProcessorInstances, List storages, List metrics) { + + // initialize workers + if (config.warmupWorkers() != null) { + long workerId = 0; + for (HttpWorker.Config workerConfig : config.warmupWorkers()) { + for (int i = 0; i < workerConfig.number(); i++) { + var responseBodyProcessor = (workerConfig.parseResults()) ? responseBodyProcessorInstances.getProcessor(workerConfig.acceptHeader()) : null; + warmupWorkers.add(new SPARQLProtocolWorker(workerId++, responseBodyProcessor, (SPARQLProtocolWorker.Config) workerConfig)); + } + } + } + + for (HttpWorker.Config workerConfig : config.workers()) { + long workerId = 0; + for (int i = 0; i < workerConfig.number(); i++) { + var responseBodyProcessor = (workerConfig.parseResults()) ? responseBodyProcessorInstances.getProcessor(workerConfig.acceptHeader()) : null; + workers.add(new SPARQLProtocolWorker(workerId++, responseBodyProcessor, (SPARQLProtocolWorker.Config) workerConfig)); + } + } + + // retrieve all query ids + Set queryIDs = new HashSet<>(); + for (HttpWorker.Config wConfig : config.workers) { + if (wConfig instanceof SPARQLProtocolWorker.Config) { + queryIDs.addAll(List.of((wConfig).queries().getAllQueryIds())); + } + } + + srp = new StresstestResultProcessor( + suiteID, + stresstestID, + this.workers, + new ArrayList<>(queryIDs), + metrics, + storages, + responseBodyProcessorInstances.getResults() + ); + } + + public void run() { + if (!warmupWorkers.isEmpty()) { + SPARQLProtocolWorker.initHttpClient(warmupWorkers.size()); + var warmupResults = executeWorkers(warmupWorkers); // warmup results will be dismissed + SPARQLProtocolWorker.closeHttpClient(); + } + + SPARQLProtocolWorker.initHttpClient(workers.size()); + var results = executeWorkers(workers); + SPARQLProtocolWorker.closeHttpClient(); + + srp.process(results.workerResults); + srp.calculateAndSaveMetrics(results.startTime, results.endTime); + } + + private Result executeWorkers(List workers) { + List results = new ArrayList<>(workers.size()); + Calendar startTime = Calendar.getInstance(); // TODO: Calendar is outdated + var futures = workers.stream().map(HttpWorker::start).toList(); + CompletableFuture.allOf(futures.toArray(new CompletableFuture[0])).join(); + Calendar endTime = Calendar.getInstance(); + for (CompletableFuture future : futures) { + try { + results.add(future.get()); + } catch (ExecutionException e) { + LOGGER.error("Unexpected error during execution of worker.", e); + } catch (InterruptedException ignored) {} + + } + return new Result(results, startTime, endTime); + } + + @Override + public String getTaskName() { + return "stresstest"; + } +} diff --git a/src/main/java/org/aksw/iguana/cc/tasks/impl/StresstestResultProcessor.java b/src/main/java/org/aksw/iguana/cc/tasks/impl/StresstestResultProcessor.java new file mode 100644 index 000000000..c748f3244 --- /dev/null +++ b/src/main/java/org/aksw/iguana/cc/tasks/impl/StresstestResultProcessor.java @@ -0,0 +1,293 @@ +package org.aksw.iguana.cc.tasks.impl; + +import org.aksw.iguana.cc.lang.LanguageProcessor; +import org.aksw.iguana.cc.metrics.*; +import org.aksw.iguana.cc.storage.Storage; +import org.aksw.iguana.cc.worker.HttpWorker; +import org.aksw.iguana.commons.rdf.IGUANA_BASE; +import org.aksw.iguana.commons.rdf.IONT; +import org.aksw.iguana.commons.rdf.IPROP; +import org.aksw.iguana.commons.rdf.IRES; +import org.aksw.iguana.commons.time.TimeUtils; +import org.apache.jena.rdf.model.*; +import org.apache.jena.vocabulary.RDF; +import org.apache.jena.vocabulary.RDFS; + +import java.math.BigInteger; +import java.time.ZonedDateTime; +import java.util.*; +import java.util.function.Supplier; + +public class StresstestResultProcessor { + + private record StartEndTimePair ( + ZonedDateTime startTime, + ZonedDateTime endTime + ) {} + + private final List metrics; + private final List workers; + private final List queryIDs; + private final List storages; + private final Supplier>> lpResults; + + /** + * This array contains each query execution of a worker grouped to its queries. + * The outer array is indexed with the workerID and the inner array with the numeric queryID that the query has + * inside that worker. + */ + private final List[][] workerQueryExecutions; + + /** This map contains each query execution, grouped by each queryID of the task. */ + private final Map> taskQueryExecutions; + + + /** Stores the start and end time for each workerID. */ + private final StartEndTimePair[] workerStartEndTime; + + private final IRES.Factory iresFactory; + + + public StresstestResultProcessor(String suiteID, + long taskID, + List worker, + List queryIDs, + List metrics, + List storages, + Supplier>> lpResults) { + this.workers = worker; + this.queryIDs = queryIDs; + this.storages = storages; + this.metrics = metrics; + this.lpResults = lpResults; + + this.workerQueryExecutions = new ArrayList[workers.size()][]; + for (int i = 0; i < workers.size(); i++) { + this.workerQueryExecutions[i] = new ArrayList[workers.get(i).config().queries().getQueryCount()]; + for (int j = 0; j < workers.get(i).config().queries().getQueryCount(); j++) { + this.workerQueryExecutions[i][j] = new ArrayList<>(); + } + } + + this.taskQueryExecutions = new HashMap<>(); + for (String queryID : queryIDs) { + this.taskQueryExecutions.put(queryID, new ArrayList<>()); + } + + this.iresFactory = new IRES.Factory(suiteID, taskID); + this.workerStartEndTime = new StartEndTimePair[worker.size()]; + } + + /** + * This method stores the given query executions statistics from a worker to their appropriate data location. + * + * @param data the query execution statistics that should be stored + */ + public void process(Collection data) { + for (HttpWorker.Result result : data) { + for (var stat : result.executionStats()) { + workerQueryExecutions[(int) result.workerID()][stat.queryID()].add(stat); + String queryID = workers.get((int) result.workerID()).config().queries().getQueryId(stat.queryID()); + taskQueryExecutions.get(queryID).add(stat); + } + workerStartEndTime[Math.toIntExact(result.workerID())] = new StartEndTimePair(result.startTime(), result.endTime()); // Naively assumes that there won't be more than Integer.MAX workers + } + } + + /** + * This method calculates the metrics and creates the RDF model of the result, which will be sent to the storages. + * It uses the given data that was passed with the 'processQueryExecutions' method. + * + * @param start the start date of the task + * @param end the end date of the task + */ + public void calculateAndSaveMetrics(Calendar start, Calendar end) { + Model m = ModelFactory.createDefaultModel().setNsPrefixes(IGUANA_BASE.PREFIX_MAP); + Resource suiteRes = iresFactory.getSuiteResource(); + Resource taskRes = iresFactory.getTaskResource(); + + m.add(suiteRes, RDF.type, IONT.suite); + m.add(suiteRes, IPROP.task, taskRes); + m.add(taskRes, RDF.type, IONT.task); + m.add(taskRes, RDF.type, IONT.stresstest); + m.add(taskRes, IPROP.noOfWorkers, toInfinitePrecisionIntegerLiteral(workers.size())); + + for (HttpWorker worker : workers) { + HttpWorker.Config config = worker.config(); + + Resource workerRes = iresFactory.getWorkerResource(worker); + Resource connectionRes = IRES.getResource(config.connection().name()); + if (config.connection().dataset() != null) { + Resource datasetRes = IRES.getResource(config.connection().dataset().name()); + m.add(connectionRes, IPROP.dataset, datasetRes); + m.add(datasetRes, RDFS.label, ResourceFactory.createTypedLiteral(config.connection().dataset().name())); + m.add(datasetRes, RDF.type, IONT.dataset); + } + + m.add(taskRes, IPROP.workerResult, workerRes); + m.add(workerRes, RDF.type, IONT.worker); + m.add(workerRes, IPROP.workerID, toInfinitePrecisionIntegerLiteral(worker.getWorkerID())); + m.add(workerRes, IPROP.workerType, ResourceFactory.createTypedLiteral(worker.getClass().getSimpleName())); + m.add(workerRes, IPROP.noOfQueries, toInfinitePrecisionIntegerLiteral(config.queries().getQueryCount())); + m.add(workerRes, IPROP.timeOut, TimeUtils.createTypedDurationLiteral(config.timeout())); + if (config.completionTarget() instanceof HttpWorker.QueryMixes) + m.add(workerRes, IPROP.noOfQueryMixes, toInfinitePrecisionIntegerLiteral(((HttpWorker.QueryMixes) config.completionTarget()).number())); + if (config.completionTarget() instanceof HttpWorker.TimeLimit) + m.add(workerRes, IPROP.timeLimit, TimeUtils.createTypedDurationLiteral(((HttpWorker.TimeLimit) config.completionTarget()).duration())); + m.add(workerRes, IPROP.connection, connectionRes); + + m.add(connectionRes, RDF.type, IONT.connection); + m.add(connectionRes, RDFS.label, ResourceFactory.createTypedLiteral(config.connection().name())); + if (config.connection().version() != null) { + m.add(connectionRes, IPROP.version, ResourceFactory.createTypedLiteral(config.connection().version())); + } + } + + // Create Query nodes with their respective queryIDs + for (String queryID : queryIDs) { + Resource queryRes = IRES.getResource(queryID); + m.add(queryRes, RDF.type, IONT.query); + m.add(queryRes, IPROP.fullID, ResourceFactory.createTypedLiteral(queryID)); + m.add(queryRes, IPROP.id, ResourceFactory.createTypedLiteral(BigInteger.valueOf(Long.parseLong(queryID.split(":")[1])))); + } + + // Connect task and workers to the Query nodes, that store the triple stats. + for (var worker : workers) { + var config = worker.config(); + var workerQueryIDs = config.queries().getAllQueryIds(); + for (int i = 0; i < config.queries().getQueryCount(); i++) { + Resource workerQueryRes = iresFactory.getWorkerQueryResource(worker, i); + Resource queryRes = IRES.getResource(workerQueryIDs[i]); + m.add(workerQueryRes, IPROP.queryID, queryRes); + } + + var taskQueryIDs = this.queryIDs.toArray(String[]::new); // make elements accessible by index + for (String taskQueryID : taskQueryIDs) { + Resource taskQueryRes = iresFactory.getTaskQueryResource(taskQueryID); + Resource queryRes = IRES.getResource(taskQueryID); + m.add(taskQueryRes, IPROP.queryID, queryRes); + } + } + + for (Metric metric : metrics) { + m.add(this.createMetricModel(metric)); + } + + // Task to queries + for (String queryID : queryIDs) { + m.add(taskRes, IPROP.query, iresFactory.getTaskQueryResource(queryID)); + } + + for (var worker : workers) { + Resource workerRes = iresFactory.getWorkerResource(worker); + + // Worker to queries + for (int i = 0; i < worker.config().queries().getAllQueryIds().length; i++) { + m.add(workerRes, IPROP.query, iresFactory.getWorkerQueryResource(worker, i)); + } + + // start and end times for the workers + final var timePair = workerStartEndTime[Math.toIntExact(worker.getWorkerID())]; + m.add(workerRes, IPROP.startDate, TimeUtils.createTypedZonedDateTimeLiteral(timePair.startTime)); + m.add(workerRes, IPROP.endDate, TimeUtils.createTypedZonedDateTimeLiteral(timePair.endTime)); + } + + m.add(taskRes, IPROP.startDate, ResourceFactory.createTypedLiteral(start)); + m.add(taskRes, IPROP.endDate, ResourceFactory.createTypedLiteral(end)); + + for (var storage : storages) { + storage.storeResult(m); + } + + // Store results of language processors (this shouldn't throw an error if the map is empty) + for (var languageProcessor: lpResults.get().keySet()) { + for (var data : lpResults.get().get(languageProcessor)) { + for (var storage : storages) { + storage.storeData(data); + } + } + } + } + + /** + * For a given metric this method calculates the metric with the stored data and creates the appropriate + * RDF related to that metric. + * + * @param metric the metric that should be calculated + * @return the result model of the metric + */ + private Model createMetricModel(Metric metric) { + Model m = ModelFactory.createDefaultModel(); + Property metricProp = IPROP.createMetricProperty(metric); + Resource metricRes = IRES.getMetricResource(metric); + Resource taskRes = iresFactory.getTaskResource(); + + if (metric instanceof ModelWritingMetric) { + m.add(((ModelWritingMetric) metric).createMetricModel(this.workers, + this.workerQueryExecutions, + this.iresFactory)); + m.add(((ModelWritingMetric) metric).createMetricModel(this.workers, + this.taskQueryExecutions, + this.iresFactory)); + } + + if (metric instanceof TaskMetric) { + Number metricValue = ((TaskMetric) metric).calculateTaskMetric(this.workers, workerQueryExecutions); + if (metricValue != null) { + Literal lit = ResourceFactory.createTypedLiteral(metricValue); + m.add(taskRes, metricProp, lit); + } + m.add(taskRes, IPROP.metric, metricRes); + } + + if (metric instanceof WorkerMetric) { + for (var worker : workers) { + Resource workerRes = iresFactory.getWorkerResource(worker); + Number metricValue = ((WorkerMetric) metric).calculateWorkerMetric( + worker.config(), + workerQueryExecutions[(int) worker.getWorkerID()]); + if (metricValue != null) { + Literal lit = ResourceFactory.createTypedLiteral(metricValue); + m.add(workerRes, metricProp, lit); + } + m.add(workerRes, IPROP.metric, metricRes); + } + } + + if (metric instanceof QueryMetric) { + // queries grouped by worker + for (var worker : workers) { + for (int i = 0; i < worker.config().queries().getQueryCount(); i++) { + Number metricValue = ((QueryMetric) metric).calculateQueryMetric(workerQueryExecutions[(int) worker.getWorkerID()][i]); + if (metricValue != null) { + Literal lit = ResourceFactory.createTypedLiteral(metricValue); + Resource queryRes = iresFactory.getWorkerQueryResource(worker, i); + m.add(queryRes, metricProp, lit); + } + } + } + + // queries grouped by task + for (String queryID : queryIDs) { + Number metricValue = ((QueryMetric) metric).calculateQueryMetric(taskQueryExecutions.get(queryID)); + if (metricValue != null) { + Literal lit = ResourceFactory.createTypedLiteral(metricValue); + Resource queryRes = iresFactory.getTaskQueryResource(queryID); + m.add(queryRes, metricProp, lit); + } + } + } + + m.add(metricRes, RDFS.label, metric.getName()); + m.add(metricRes, RDFS.label, metric.getAbbreviation()); + m.add(metricRes, RDFS.comment, metric.getDescription()); + m.add(metricRes, RDF.type, IONT.getMetricClass(metric)); + m.add(metricRes, RDF.type, IONT.metric); + + return m; + } + + private static Literal toInfinitePrecisionIntegerLiteral(long value) { + return ResourceFactory.createTypedLiteral(BigInteger.valueOf(value)); + } +} diff --git a/src/main/java/org/aksw/iguana/cc/utils/files/FileUtils.java b/src/main/java/org/aksw/iguana/cc/utils/files/FileUtils.java new file mode 100644 index 000000000..cea3b542f --- /dev/null +++ b/src/main/java/org/aksw/iguana/cc/utils/files/FileUtils.java @@ -0,0 +1,129 @@ +package org.aksw.iguana.cc.utils.files; + +import java.io.*; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.List; + +/** + * Methods to work easier with Files. + * + * @author f.conrads + * + */ +public class FileUtils { + + public static int getHashcodeFromFileContent(Path filepath) { + int hashcode; + try { + String fileContents = readFile(filepath); + hashcode = Math.abs(fileContents.hashCode()); + } catch (IOException e) { + hashcode = 0; + } + return hashcode; + } + + public static String readFile(Path path) throws IOException { + return Files.readString(path, StandardCharsets.UTF_8); + } + + /** + * This method detects and returns the line-ending used in a file.
+ * It reads the whole first line until it detects one of the following line-endings: + *

    + *
  • \r\n - Windows
  • + *
  • \n - Linux
  • + *
  • \r - old macOS
  • + *
+ * + * If the file doesn't contain a line ending, it defaults to System.lineSeparator(). + * + * @param filepath this string that contains the path of the file + * @return the line ending used in the given file + * @throws IOException if an I/O error occurs opening the file + */ + public static String getLineEnding(Path filepath) throws IOException { + if (filepath == null) + throw new IllegalArgumentException("Filepath must not be null."); + try(BufferedReader br = Files.newBufferedReader(filepath)) { + char c; + while ((c = (char) br.read()) != (char) -1) { + if (c == '\n') + return "\n"; + else if (c == '\r') { + if ((char) br.read() == '\n') + return "\r\n"; + return "\r"; + } + } + } + + // fall back if there is no line end in the file + return System.lineSeparator(); + } + + private static int[] computePrefixTable(byte[] pattern) { + int[] prefixTable = new int[pattern.length]; + + int prefixIndex = 0; + for (int i = 1; i < pattern.length; i++) { + while (prefixIndex > 0 && pattern[prefixIndex] != pattern[i]) { + prefixIndex = prefixTable[prefixIndex - 1]; + } + + if (pattern[prefixIndex] == pattern[i]) { + prefixIndex++; + } + + prefixTable[i] = prefixIndex; + } + + return prefixTable; + } + + public static List indexStream(String separator, InputStream is) throws IOException { + // basically Knuth-Morris-Pratt + List indices = new ArrayList<>(); + + + final byte[] sepArray = separator.getBytes(StandardCharsets.UTF_8); + final int[] prefixTable = computePrefixTable(sepArray); + + long itemStart = 0; + + long byteOffset = 0; + int patternIndex = 0; + byte[] currentByte = new byte[1]; + while (is.read(currentByte) == 1) { + // skipping fast-forward with the prefixTable + while (patternIndex > 0 && currentByte[0] != sepArray[patternIndex]) { + patternIndex = prefixTable[patternIndex - 1]; + } + + + if (currentByte[0] == sepArray[patternIndex]) { + patternIndex++; + + if (patternIndex == sepArray.length) { // match found + patternIndex = 0; + final long itemEnd = byteOffset - sepArray.length + 1; + final long len = itemEnd - itemStart; + indices.add(new long[]{itemStart, len}); + + itemStart = byteOffset + 1; + } + } + + byteOffset++; + } + + final long itemEnd = byteOffset; + final long len = itemEnd - itemStart; + indices.add(new long[]{itemStart, len}); + + return indices; + } +} diff --git a/src/main/java/org/aksw/iguana/cc/utils/files/IndexedQueryReader.java b/src/main/java/org/aksw/iguana/cc/utils/files/IndexedQueryReader.java new file mode 100644 index 000000000..2e9c84f46 --- /dev/null +++ b/src/main/java/org/aksw/iguana/cc/utils/files/IndexedQueryReader.java @@ -0,0 +1,156 @@ +package org.aksw.iguana.cc.utils.files; + +import org.apache.commons.io.input.AutoCloseInputStream; +import org.apache.commons.io.input.BoundedInputStream; + +import java.io.*; +import java.nio.ByteBuffer; +import java.nio.channels.Channels; +import java.nio.channels.FileChannel; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.StandardOpenOption; +import java.util.*; +import java.util.stream.Collectors; + +/** + * This class creates objects, that index the start positions characters in between two given separators. + * A separator can be, for example "\n", which is the equivalent of indexing every line.
+ * The beginning and the end of the file count as separators too. + *
+ * Empty content in between two separators won't be indexed.
+ * The start positions and the length of each indexed content will be stored in an internal array for later accessing. + */ +public class IndexedQueryReader { + + /** + * This list stores the start position and the length of each indexed content. + */ + private final List indices; + + /** + * The file whose content should be indexed. + */ + private final Path path; + + /** + * Indexes each content in between two of the given separators (including the beginning and end of the file). The + * given separator isn't allowed to be empty. + * + * @param filepath path to the file + * @param separator the separator line that is used in the file (isn't allowed to be empty) + * @return reader to access the indexed content + * @throws IllegalArgumentException the given separator was empty + * @throws IOException + */ + public static IndexedQueryReader makeWithStringSeparator(Path filepath, String separator) throws IOException { + if (separator.isEmpty()) + throw new IllegalArgumentException("Separator for makeWithStringSeparator can not be empty."); + return new IndexedQueryReader(filepath, separator); + } + + /** + * Indexes every bundle of lines inside the file, that are in between two empty lines (including the beginning and + * end of the file).
+ * It uses the doubled line ending of the file as a separator, for example "\n\n". + * + * @param filepath path to the file + * @return reader to access the indexed content + * @throws IOException + */ + public static IndexedQueryReader makeWithEmptyLines(Path filepath) throws IOException { + String lineEnding = FileUtils.getLineEnding(filepath); + return new IndexedQueryReader(filepath, lineEnding + lineEnding); + } + + /** + * Indexes every non-empty line inside the given file. It uses the line ending of the file as a separator. + * + * @param filepath path to the file + * @return reader to access the indexed lines + * @throws IOException + */ + public static IndexedQueryReader make(Path filepath) throws IOException { + return new IndexedQueryReader(filepath, FileUtils.getLineEnding(filepath)); + } + + /** + * Creates an object that indexes each content in between two of the given separators (including the beginning and + * end of the given file).
+ * + * @param filepath path to the file + * @param separator the separator for each query + * @throws IOException + */ + private IndexedQueryReader(Path filepath, String separator) throws IOException { + path = filepath; + indices = indexFile(path, separator); + } + + /** + * Returns the indexed content with the given index. + * + * @param index the index of the searched content + * @return the searched content + * @throws IOException + */ + public String readQuery(int index) throws IOException { + // Indexed queries can't be larger than ~2GB + try (FileChannel channel = FileChannel.open(path, StandardOpenOption.READ)) { + final ByteBuffer buffer = ByteBuffer.allocate((int) indices.get(index)[1]); + final var read = channel.read(buffer, indices.get(index)[0]); + assert read == indices.get(index)[1]; + return new String(buffer.array(), StandardCharsets.UTF_8); + } + } + + public InputStream streamQuery(int index) throws IOException { + return new AutoCloseInputStream( + new BufferedInputStream( + new BoundedInputStream( + Channels.newInputStream( + FileChannel.open(path, StandardOpenOption.READ) + .position(this.indices.get(index)[0] /* offset */)), + this.indices.get(index)[1] /* length */))); + } + + /** + * This method returns a list of strings that contains every indexed content. + * + * @return list of lines + * @throws IOException + */ + public List readQueries() throws IOException { + ArrayList out = new ArrayList<>(); + for (int i = 0; i < indices.size(); i++) { + out.add(this.readQuery(i)); + } + return out; + } + + /** + * Returns the number of indexed content. + * + * @return number of indexed objects + */ + public int size() { + return this.indices.size(); + } + + /** + * Indexes every content in between two of the given separator. The beginning and the end of the file count as + * separators too. + * + * @param separator the custom separator + * @return the Indexes + * @throws IOException + */ + private static List indexFile(Path filepath, String separator) throws IOException { + try (InputStream fi = Files.newInputStream(filepath, StandardOpenOption.READ); + BufferedInputStream bis = new BufferedInputStream(fi)) { + return FileUtils.indexStream(separator, bis) + .stream().filter((long[] e) -> e[1] > 0 /* Only elements with length > 0 */).collect(Collectors.toList()); + } + } +} diff --git a/src/main/java/org/aksw/iguana/cc/utils/http/RequestFactory.java b/src/main/java/org/aksw/iguana/cc/utils/http/RequestFactory.java new file mode 100644 index 000000000..6966f87f4 --- /dev/null +++ b/src/main/java/org/aksw/iguana/cc/utils/http/RequestFactory.java @@ -0,0 +1,144 @@ +package org.aksw.iguana.cc.utils.http; + +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonValue; +import org.aksw.iguana.cc.config.elements.ConnectionConfig; +import org.aksw.iguana.cc.query.handler.QueryHandler; +import org.aksw.iguana.cc.worker.HttpWorker; +import org.aksw.iguana.cc.worker.impl.SPARQLProtocolWorker; +import org.apache.hc.core5.http.HttpHeaders; +import org.apache.hc.core5.http.nio.AsyncRequestProducer; +import org.apache.hc.core5.http.nio.entity.BasicAsyncEntityProducer; +import org.apache.hc.core5.http.nio.support.AsyncRequestBuilder; +import org.apache.hc.core5.net.URIBuilder; + +import java.io.IOException; +import java.io.InputStream; +import java.net.URISyntaxException; +import java.net.URLEncoder; +import java.nio.charset.StandardCharsets; +import java.util.*; +import java.util.function.Supplier; +import java.util.stream.Collectors; + +/** + * A factory for creating HTTP requests. + * The factory can create requests for different types of HTTP methods and different types of SPARQL queries. + * The factory can also cache requests to avoid creating the same request multiple times. + */ +public final class RequestFactory { + public enum RequestType { + GET_QUERY("get query"), + POST_URL_ENC_QUERY("post url-enc query"), + POST_QUERY("post query"), + POST_URL_ENC_UPDATE("post url-enc update"), + POST_UPDATE("post update"); + + private final String value; + + @JsonCreator + RequestType(String value) { + this.value = Objects.requireNonNullElse(value, "get query"); + } + + @JsonValue + public String value() { + return value; + } + } + + private final RequestType requestType; + private final Map cache = new HashMap<>(); + + public RequestFactory(RequestType requestType) { + this.requestType = requestType; + } + + private static String urlEncode(List parameters) { + return parameters.stream() + .map(e -> e[0] + "=" + URLEncoder.encode(e[1], StandardCharsets.UTF_8)) + .collect(Collectors.joining("&")); + } + + private static String urlEncode(String name, String value) { + return name + "=" + URLEncoder.encode(value, StandardCharsets.UTF_8); + } + + /** + * Builds an HTTP request for a given query. + * If the query has been cached by the query handler, its content will be fully read by the entity producer into a + * byte buffer, which will then be reused on consecutive request executions. + * Cached requests will be sent non-chunked. + * If the query has not been cached by the query handler, the entity producer will use the query stream supplier to + * send the query in chunks. + * + * @param queryHandle the query handle containing the query and its index + * @param connection the connection to send the request to + * @param requestHeader the request header + * @return the request as an AsyncRequestProducer + * @throws URISyntaxException if the URI is invalid + * @throws IOException if the query stream cannot be read + */ + public AsyncRequestProducer buildHttpRequest(QueryHandler.QueryStreamWrapper queryHandle, + ConnectionConfig connection, + String requestHeader) throws URISyntaxException, IOException { + if (queryHandle.cached() && cache.containsKey(queryHandle.index())) + return cache.get(queryHandle.index()); + + AsyncRequestBuilder asyncRequestBuilder; + Supplier queryStreamSupplier; + InputStream queryStream; + + try { + queryStreamSupplier = queryHandle.queryInputStreamSupplier(); + queryStream = queryStreamSupplier.get(); + } catch (RuntimeException e) { + throw new IOException(e); + } + + switch (this.requestType) { + case GET_QUERY -> asyncRequestBuilder = AsyncRequestBuilder.get(new URIBuilder(connection.endpoint()) + .addParameter("query", new String(queryStream.readAllBytes(), StandardCharsets.UTF_8)) + .build() + ); + case POST_URL_ENC_QUERY -> asyncRequestBuilder = AsyncRequestBuilder.post(connection.endpoint()) + // manually set content type, because otherwise the + // entity producer would set it to "application/x-www-form-urlencoded; charset=ISO-8859-1" + .setHeader(HttpHeaders.CONTENT_TYPE, "application/x-www-form-urlencoded") + .setEntity(new BasicAsyncEntityProducer(urlEncode("query", new String(queryStream.readAllBytes(), StandardCharsets.UTF_8)), null, !queryHandle.cached())); + case POST_QUERY -> asyncRequestBuilder = AsyncRequestBuilder.post(connection.endpoint()) + .setEntity(new StreamEntityProducer(queryStreamSupplier, !queryHandle.cached(), "application/sparql-query")); + case POST_URL_ENC_UPDATE -> asyncRequestBuilder = AsyncRequestBuilder.post(connection.endpoint()) + .setHeader(HttpHeaders.CONTENT_TYPE, "application/x-www-form-urlencoded") + .setEntity(new BasicAsyncEntityProducer(urlEncode("update", new String(queryStream.readAllBytes(), StandardCharsets.UTF_8)), null, !queryHandle.cached())); + case POST_UPDATE -> asyncRequestBuilder = AsyncRequestBuilder.post(connection.endpoint()) + .setEntity(new StreamEntityProducer(queryStreamSupplier, !queryHandle.cached(), "application/sparql-update")); + default -> throw new IllegalStateException("Unexpected value: " + this.requestType); + } + + if (requestHeader != null) + asyncRequestBuilder.addHeader("Accept", requestHeader); + if (connection.authentication() != null && connection.authentication().user() != null) + asyncRequestBuilder.addHeader("Authorization", + HttpWorker.basicAuth(connection.authentication().user(), + Optional.ofNullable(connection.authentication().password()).orElse(""))); + + if (queryHandle.cached()) + cache.put(queryHandle.index(), asyncRequestBuilder.build()); + + return asyncRequestBuilder.build(); + } + + /** + * Get a cached request by the index of the query. + * If the request is not in the cache, an IllegalArgumentException is thrown. + * + * @param index the index of the query + * @return the request as an AsyncRequestProducer + */ + public AsyncRequestProducer getCachedRequest(int index) { + if (!cache.containsKey(index)) + throw new IllegalArgumentException("No request with index " + index + " found in cache."); + return cache.get(index); + } +} diff --git a/src/main/java/org/aksw/iguana/cc/utils/http/StreamEntityProducer.java b/src/main/java/org/aksw/iguana/cc/utils/http/StreamEntityProducer.java new file mode 100644 index 000000000..f05c1c191 --- /dev/null +++ b/src/main/java/org/aksw/iguana/cc/utils/http/StreamEntityProducer.java @@ -0,0 +1,161 @@ +package org.aksw.iguana.cc.utils.http; + +import org.apache.hc.core5.http.nio.AsyncEntityProducer; +import org.apache.hc.core5.http.nio.DataStreamChannel; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.io.InputStream; +import java.nio.ByteBuffer; +import java.util.Set; +import java.util.function.Supplier; + +/** + * An entity producer that produces the entity data from an input stream supplier. + * The entity data can optionally be sent in chunks. + * If the entity data is supposed to be sent non-chunked, the whole stream will be read into a byte buffer. + * The stream supplier should be repeatable, as this producer might be reused multiple times to create the entity data. + */ +public class StreamEntityProducer implements AsyncEntityProducer { + + private static final Logger logger = LoggerFactory.getLogger(StreamEntityProducer.class); + + private final Supplier streamSupplier; + private final boolean chunked; + private final String contentType; + + private ByteBuffer content; // used for non-chunked request, stores the whole content in reusable buffer + + private final static int BUFFER_SIZE = 8192; + private final byte[] buffer = new byte[BUFFER_SIZE]; + + private InputStream currentStream; // used for chunked request, stores the current stream to read from + + /** + * Creates a new entity producer that produces the entity data from the given input stream supplier. + * + * @param streamSupplier the input stream supplier, should be repeatable + * @param chunked whether the entity data should be sent in chunks + */ + public StreamEntityProducer(Supplier streamSupplier, boolean chunked, String contentType) throws IOException { + this.streamSupplier = streamSupplier; + this.chunked = chunked; + this.contentType = contentType; + + if (!chunked) { + content = ByteBuffer.wrap(streamSupplier.get().readAllBytes()); + } + } + + @Override + public boolean isRepeatable() { + return true; + } + + @Override + public void failed(Exception cause) { + logger.error("Failed to produce entity data", cause); + if (currentStream != null) { + try { + currentStream.close(); + } catch (IOException e) { + logger.error("Failed to close input stream", e); + } + } + } + + @Override + public boolean isChunked() { + return chunked; + } + + @Override + public Set getTrailerNames() { + return null; + } + + @Override + public long getContentLength() { + // if the content length is known (non-chunked request), return it + if (content != null) { + return content.limit(); + } + + // if the content length is unknown (chunked request), return -1 + return -1; + } + + @Override + public String getContentType() { + return contentType; + } + + @Override + public String getContentEncoding() { + return null; + } + + @Override + public void releaseResources() { + if (content != null) { + content.clear(); + } + + if (currentStream != null) { + try { + currentStream.close(); + } catch (IOException e) { + logger.error("Failed to close input stream", e); + } + } + } + + @Override + public int available() { + // If content is not null, it means the whole entity data has been read into the buffer from a stream that was + // taken from the stream supplier and that the content will be sent non-chunked. + // In this case, the remaining bytes in the buffer are returned. + if (content != null) { + return content.remaining(); + } + + // Otherwise, the data is sent in chunks. If there is currently a stream open, from which the data is being read + // from, the available bytes from that stream are returned. + if (currentStream != null) { + try { + return currentStream.available(); + } catch (IOException e) { + logger.error("Failed to get available bytes from input stream", e); + } + } + return 0; + } + + @Override + public void produce(DataStreamChannel channel) throws IOException { + // handling of non-chunked request + if (content != null) { + channel.write(content); + if (!content.hasRemaining()) { + channel.endStream(); + } + return; + } + + // handling of chunked request + if (chunked && currentStream == null) { + currentStream = streamSupplier.get(); + } + + int bytesRead; + while ((bytesRead = currentStream.read(buffer)) > 0) { + ByteBuffer byteBuffer = ByteBuffer.wrap(buffer, 0, bytesRead); + channel.write(byteBuffer); + } + + if (bytesRead == -1) { + channel.endStream(); + } + } +} \ No newline at end of file diff --git a/src/main/java/org/aksw/iguana/cc/worker/HttpWorker.java b/src/main/java/org/aksw/iguana/cc/worker/HttpWorker.java new file mode 100644 index 000000000..6e0242d9a --- /dev/null +++ b/src/main/java/org/aksw/iguana/cc/worker/HttpWorker.java @@ -0,0 +1,161 @@ +package org.aksw.iguana.cc.worker; + +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonSubTypes; +import com.fasterxml.jackson.annotation.JsonTypeInfo; +import org.aksw.iguana.cc.config.elements.ConnectionConfig; +import org.aksw.iguana.cc.query.handler.QueryHandler; +import org.aksw.iguana.cc.query.selector.QuerySelector; +import org.aksw.iguana.cc.tasks.impl.Stresstest; +import org.aksw.iguana.cc.worker.impl.SPARQLProtocolWorker; + +import java.net.http.HttpTimeoutException; +import java.time.Duration; +import java.time.Instant; +import java.time.ZonedDateTime; +import java.util.Base64; +import java.util.List; +import java.util.Optional; +import java.util.OptionalLong; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.ExecutionException; + +/** + * Interface for the Worker Thread used in the {@link Stresstest} + */ +public abstract class HttpWorker { + + @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, + include = JsonTypeInfo.As.PROPERTY, + property = "type") + @JsonSubTypes({ + @JsonSubTypes.Type(value = SPARQLProtocolWorker.Config.class, name = "SPARQLProtocolWorker"), + }) + public interface Config { + CompletionTarget completionTarget(); + + String acceptHeader(); + + /** + * Returns the number of workers with this configuration that will be started. + * + * @return the number of workers + */ + Integer number(); + + /** + * Determines whether the results should be parsed based on the acceptHeader. + * + * @return true if the results should be parsed, false otherwise + */ + Boolean parseResults(); + + QueryHandler queries(); + + ConnectionConfig connection(); + + Duration timeout(); + } + + public record ExecutionStats( + int queryID, + Instant startTime, + Duration duration, // should always exist + Optional httpStatusCode, + OptionalLong contentLength, + OptionalLong responseBodyHash, + Optional error + ) { + public enum END_STATE { + SUCCESS(0), + TIMEOUT(110), // ETIMEDOUT - Connection timed out + HTTP_ERROR(111), // ECONNREFUSED - Connection refused + MISCELLANEOUS_EXCEPTION(1); + + public final int value; + END_STATE(int value) { + this.value = value; + } + } + + public END_STATE endState() { + if (successful()) { + return END_STATE.SUCCESS; + } else if (timeout()) { + return END_STATE.TIMEOUT; + } else if (httpError()) { + return END_STATE.HTTP_ERROR; + } else { + return END_STATE.MISCELLANEOUS_EXCEPTION; + } + } + + public boolean completed() { + return httpStatusCode().isPresent(); + } + + public boolean successful() { + return error.isEmpty() && httpStatusCode.orElse(0) / 100 == 2; + } + + public boolean timeout() { + boolean timeout = false; + if (!successful() && error().isPresent()) { + timeout |= error().get() instanceof java.util.concurrent.TimeoutException; + if (error().get() instanceof ExecutionException exec) { + timeout = exec.getCause() instanceof HttpTimeoutException; + } + } + return timeout; + } + + public boolean httpError() { + if (httpStatusCode.isEmpty()) + return false; + return httpStatusCode().orElse(0) / 100 != 2; + } + + public boolean miscellaneousException() { + return error().isPresent() && !timeout() && !httpError(); + } + } + + public record Result(long workerID, List executionStats, ZonedDateTime startTime, ZonedDateTime endTime) {} + + @JsonTypeInfo(use = JsonTypeInfo.Id.DEDUCTION) + @JsonSubTypes({ + @JsonSubTypes.Type(value = TimeLimit.class), + @JsonSubTypes.Type(value = QueryMixes.class) + }) + sealed public interface CompletionTarget permits TimeLimit, QueryMixes {} + + public record TimeLimit(@JsonProperty(required = true) Duration duration) implements CompletionTarget {} + + public record QueryMixes(@JsonProperty(required = true) int number) implements CompletionTarget {} + + final protected long workerID; + final protected Config config; + final protected ResponseBodyProcessor responseBodyProcessor; + final protected QuerySelector querySelector; + + public HttpWorker(long workerID, ResponseBodyProcessor responseBodyProcessor, Config config) { + this.workerID = workerID; + this.responseBodyProcessor = responseBodyProcessor; + this.config = config; + this.querySelector = this.config.queries().getQuerySelectorInstance(); + } + + public static String basicAuth(String username, String password) { + return "Basic " + Base64.getEncoder().encodeToString((username + ":" + password).getBytes()); + } + + public abstract CompletableFuture start(); + + public Config config() { + return this.config; + } + + public long getWorkerID() { + return this.workerID; + } +} diff --git a/src/main/java/org/aksw/iguana/cc/worker/ResponseBodyProcessor.java b/src/main/java/org/aksw/iguana/cc/worker/ResponseBodyProcessor.java new file mode 100644 index 000000000..6dcec479d --- /dev/null +++ b/src/main/java/org/aksw/iguana/cc/worker/ResponseBodyProcessor.java @@ -0,0 +1,91 @@ +package org.aksw.iguana.cc.worker; + +import org.aksw.iguana.cc.lang.LanguageProcessor; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.InputStream; +import java.text.MessageFormat; +import java.time.Duration; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.concurrent.*; + +public class ResponseBodyProcessor { + public record Config(String contentType, Integer threads, Duration timeout) { + public Config(String contentType, Integer threads, Duration timeout) { + this.contentType = contentType; + this.threads = threads == null ? 1 : threads; + this.timeout = timeout == null ? Duration.ofMinutes(10) : timeout; + } + } + + public record Key(long contentLength, long xxh64) {} + + public ResponseBodyProcessor(Config config) { + this.executor = (ThreadPoolExecutor) Executors.newFixedThreadPool(config.threads == null ? 1 : config.threads); + this.languageProcessor = LanguageProcessor.getInstance(config.contentType); + this.timeout = config.timeout; + } + + public ResponseBodyProcessor(String contentType) { + this(new Config(contentType, null, null)); + } + + private static final Logger LOGGER = LoggerFactory.getLogger(ResponseBodyProcessor.class); + + private final Duration timeout; + + private final ConcurrentHashMap.KeySetView seenResponseBodies = ConcurrentHashMap.newKeySet(); + + private final List responseDataMetrics = Collections.synchronizedList(new ArrayList<>()); + private final LanguageProcessor languageProcessor; + + private final ThreadPoolExecutor executor; + private final ScheduledExecutorService executorHandler = Executors.newScheduledThreadPool(1); + + public boolean add(long contentLength, long xxh64, InputStream responseBodyStream) { + final var key = new Key(contentLength, xxh64); + if (seenResponseBodies.add(key)) { + submit(key, responseBodyStream); + return true; + } + return false; + } + + private void submit(Key key, InputStream responseBodyStream) { + final var future = executor.submit(() -> { + var processingResult = languageProcessor.process(responseBodyStream, key.xxh64); + responseDataMetrics.add(processingResult); + }); + executorHandler.schedule(() -> { + if (!future.isDone()) { + future.cancel(true); + LOGGER.warn("ResponseBodyProcessor timed out for key: {}", key); + } + }, timeout.toSeconds(), TimeUnit.SECONDS); + } + + public List getResponseDataMetrics() { + if (executor.isTerminated()) { + return responseDataMetrics; + } + + LOGGER.info(MessageFormat.format("Shutting down ResponseBodyProcessor with {0} min timeout to finish processing. {1} tasks remaining.", timeout.toMinutes() + "." + (timeout.toSecondsPart() / (double) 60), executor.getQueue().size())); + boolean noTimeout; + try { + executor.shutdown(); + noTimeout = executor.awaitTermination(timeout.toSeconds(), TimeUnit.SECONDS); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + if (noTimeout) LOGGER.info("ResponseBodyProcessor completed."); + else LOGGER.warn("ResponseBodyProcessor timed out."); + return responseDataMetrics; + } + + public LanguageProcessor getLanguageProcessor() { + return this.languageProcessor; + } +} diff --git a/src/main/java/org/aksw/iguana/cc/worker/ResponseBodyProcessorInstances.java b/src/main/java/org/aksw/iguana/cc/worker/ResponseBodyProcessorInstances.java new file mode 100644 index 000000000..95be1f0e9 --- /dev/null +++ b/src/main/java/org/aksw/iguana/cc/worker/ResponseBodyProcessorInstances.java @@ -0,0 +1,44 @@ +package org.aksw.iguana.cc.worker; + +import org.aksw.iguana.cc.lang.LanguageProcessor; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.function.Supplier; + +public class ResponseBodyProcessorInstances { + final private Map processors = new HashMap<>(); + + public ResponseBodyProcessorInstances() {} + + public ResponseBodyProcessorInstances(List configs) { + if (configs == null) return; + for (var config : configs) { + processors.put(config.contentType(), new ResponseBodyProcessor(config)); + } + } + + public ResponseBodyProcessor getProcessor(String contentType) { + if (!processors.containsKey(contentType)) { + processors.put(contentType, new ResponseBodyProcessor(contentType)); + } + return processors.get(contentType); + } + + /** + * Returns a Supplier that returns the results of all ResponseBodyProcessors. A supplier is used for data + * abstraction. + * + * @return supplier for all results + */ + public Supplier>> getResults() { + return () -> { // TODO: consider removing the languageProcessor as the key, it's only used right now for creating strings for naming + Map> out = new HashMap<>(); + for (var processor : processors.values()) { + out.put(processor.getLanguageProcessor(), processor.getResponseDataMetrics()); + } + return out; + }; + } +} diff --git a/src/main/java/org/aksw/iguana/cc/worker/impl/SPARQLProtocolWorker.java b/src/main/java/org/aksw/iguana/cc/worker/impl/SPARQLProtocolWorker.java new file mode 100644 index 000000000..7745ddb96 --- /dev/null +++ b/src/main/java/org/aksw/iguana/cc/worker/impl/SPARQLProtocolWorker.java @@ -0,0 +1,516 @@ +package org.aksw.iguana.cc.worker.impl; + +import com.fasterxml.jackson.annotation.JsonProperty; +import net.jpountz.xxhash.StreamingXXHash64; +import net.jpountz.xxhash.XXHashFactory; +import org.aksw.iguana.cc.config.elements.ConnectionConfig; +import org.aksw.iguana.cc.query.handler.QueryHandler; +import org.aksw.iguana.cc.query.selector.impl.LinearQuerySelector; +import org.aksw.iguana.cc.utils.http.RequestFactory; +import org.aksw.iguana.cc.worker.ResponseBodyProcessor; +import org.aksw.iguana.cc.worker.HttpWorker; +import org.aksw.iguana.commons.io.BigByteArrayOutputStream; +import org.aksw.iguana.commons.io.ByteArrayListOutputStream; +import org.aksw.iguana.commons.io.ReversibleOutputStream; +import org.apache.hc.client5.http.async.methods.AbstractBinResponseConsumer; +import org.apache.hc.client5.http.config.RequestConfig; +import org.apache.hc.client5.http.impl.DefaultConnectionKeepAliveStrategy; +import org.apache.hc.client5.http.impl.async.CloseableHttpAsyncClient; +import org.apache.hc.client5.http.impl.async.HttpAsyncClients; +import org.apache.hc.client5.http.impl.nio.PoolingAsyncClientConnectionManagerBuilder; +import org.apache.hc.client5.http.nio.AsyncClientConnectionManager; +import org.apache.hc.core5.http.ContentType; +import org.apache.hc.core5.http.HttpException; +import org.apache.hc.core5.http.HttpResponse; +import org.apache.hc.core5.http.nio.AsyncRequestProducer; +import org.apache.hc.core5.reactor.IOReactorConfig; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.slf4j.helpers.MessageFormatter; + +import java.io.IOException; +import java.net.URISyntaxException; +import java.nio.ByteBuffer; +import java.time.Duration; +import java.time.Instant; +import java.time.ZonedDateTime; +import java.time.temporal.ChronoUnit; +import java.util.*; +import java.util.concurrent.*; + +public class SPARQLProtocolWorker extends HttpWorker { + + public record Config( + Integer number, + QueryHandler queries, + CompletionTarget completionTarget, + ConnectionConfig connection, + Duration timeout, + String acceptHeader /* e.g. application/sparql-results+json */, + RequestFactory.RequestType requestType, + Boolean parseResults + ) implements HttpWorker.Config { + public Config(Integer number, + @JsonProperty(required = true) QueryHandler queries, + @JsonProperty(required = true) CompletionTarget completionTarget, + @JsonProperty(required = true) ConnectionConfig connection, + @JsonProperty(required = true) Duration timeout, + String acceptHeader, + RequestFactory.RequestType requestType, + Boolean parseResults) { + this.number = number == null ? 1 : number; + this.queries = queries; + this.completionTarget = completionTarget; + this.connection = connection; + this.timeout = timeout; + this.acceptHeader = acceptHeader; + this.requestType = requestType == null ? RequestFactory.RequestType.GET_QUERY : requestType; + this.parseResults = parseResults == null || parseResults; + } + } + + record HttpExecutionResult( + int queryID, + Optional response, + Instant requestStart, + Duration duration, + Optional outputStream, + OptionalLong actualContentLength, + OptionalLong hash, + Optional exception + ) { + public boolean completed() { + return response.isPresent(); + } + + public boolean successful() { + if (response.isPresent() && exception.isEmpty()) + return (response.get().getCode() / 100) == 2; + return false; + } + } + + + private static CloseableHttpAsyncClient httpClient; + private static AsyncClientConnectionManager connectionManager; + private final ThreadPoolExecutor executor; + + private final XXHashFactory hasherFactory = XXHashFactory.fastestJavaInstance(); + private final RequestFactory requestFactory; + + private final ResponseBodyProcessor responseBodyProcessor; + + // used to read the http response body + private final byte[] buffer = new byte[BUFFER_SIZE]; + private static final int BUFFER_SIZE = 4096; + + private final static Logger LOGGER = LoggerFactory.getLogger(SPARQLProtocolWorker.class); + + @Override + public Config config() { + return (SPARQLProtocolWorker.Config) config; + } + + public SPARQLProtocolWorker(long workerId, ResponseBodyProcessor responseBodyProcessor, Config config) { + super(workerId, responseBodyProcessor, config); + this.responseBodyProcessor = responseBodyProcessor; + this.executor = (ThreadPoolExecutor) Executors.newFixedThreadPool(1); + this.requestFactory = new RequestFactory(config().requestType()); + } + + /** + * Initializes the http client with the given thread count. + * All workers will use the same http client instance. + * + * @param threadCount the number of threads to be used by the http client + */ + public static void initHttpClient(int threadCount) { + connectionManager = PoolingAsyncClientConnectionManagerBuilder.create() + .setMaxConnTotal(threadCount * 1000) + .setMaxConnPerRoute(threadCount * 1000) + .build(); + final var ioReactorConfig = IOReactorConfig.custom() + .setTcpNoDelay(true) + .setSoKeepAlive(true) + .build(); + httpClient = HttpAsyncClients.custom() + .setConnectionManager(connectionManager) + .setIOReactorConfig(ioReactorConfig) + .setKeepAliveStrategy(new DefaultConnectionKeepAliveStrategy()) + .setDefaultRequestConfig(RequestConfig.custom() + .setContentCompressionEnabled(false) + .setHardCancellationEnabled(true) + .build()) + .build(); + httpClient.start(); + } + + /** + * Closes the http client and the connection manager. + */ + public static void closeHttpClient() { + try { + httpClient.close(); + connectionManager.close(); + } catch (IOException e) { + LOGGER.error("Failed to close http client.", e); + } + } + + /** + * Builds every request once, so that the requests can be loaded into the cache, if the queries themselves are + * cached. + * This is done to avoid the overhead of building (url-encoding) the requests during the benchmark. + */ + private void preloadRequests() { + final var selector = new LinearQuerySelector(config().queries().getQueryCount()); + for (int i = 0; i < config().queries().getQueryCount(); i++) { + try { + // build request and discard it + requestFactory.buildHttpRequest(config().queries().getNextQueryStream(selector), config().connection(), config().acceptHeader()); + } catch (IOException | URISyntaxException e) { + LOGGER.error("Failed to preload request.", e); + } + } + } + + /** + * Starts the worker and returns a CompletableFuture, which will be completed, when the worker has finished the + * completion target. The CompletableFuture will contain a Result object, which contains the execution stats of the + * worker. The execution stats contain the execution time, the http status code, the content length and the hash of + * the response body. If the worker failed to execute a query, the execution stats will contain an exception. + * If the worker failed to execute a query, because of a set time limit in the worker configuration, the result + * of that execution will be discarded. + * + * @return the CompletableFuture the contains the results of the worker. + */ + public CompletableFuture start() { + preloadRequests(); + return CompletableFuture.supplyAsync(() -> { + ZonedDateTime startTime = ZonedDateTime.now(); + List executionStats = new ArrayList<>(); + if (config().completionTarget() instanceof QueryMixes queryMixes) { + for (int i = 0; i < queryMixes.number(); i++) { + for (int j = 0; j < config().queries().getQueryCount(); j++) { + ExecutionStats execution = executeQuery(config().timeout(), false); + if (execution == null) throw new RuntimeException("Execution returned null at a place, where it should have never been null."); + logExecution(execution); + executionStats.add(execution); + } + LOGGER.info("{}\t:: Completed {} out of {} querymixes.", this, i + 1, queryMixes.number()); + } + } else if (config().completionTarget() instanceof TimeLimit timeLimit) { + final var startNanos = System.nanoTime(); + long queryExecutionCount = 0; + int queryMixExecutionCount = 0; + int queryMixSize = config().queries().getQueryCount(); + long now; + while ((now = System.nanoTime()) - startNanos < ((TimeLimit) config.completionTarget()).duration().toNanos()) { + final var timeLeft = ((TimeLimit) config.completionTarget()).duration().toNanos() - (now - startNanos); + final var reducedTimeout = timeLeft < config.timeout().toNanos(); + final Duration actualQueryTimeOut = reducedTimeout ? Duration.of(timeLeft, ChronoUnit.NANOS) : config.timeout(); + ExecutionStats execution = executeQuery(actualQueryTimeOut, reducedTimeout); + if (execution != null){ // If timeout is reduced, the execution result might be discarded if it failed and executeQuery returns null. + logExecution(execution); + executionStats.add(execution); + } + + if ((++queryExecutionCount) >= queryMixSize) { + queryExecutionCount = 0; + queryMixExecutionCount++; + LOGGER.info("{}\t:: Completed {} querymixes.", this, queryMixExecutionCount); + } + } + LOGGER.info("{}\t:: Reached time limit of {}.", this, timeLimit.duration()); + } + ZonedDateTime endTime = ZonedDateTime.now(); + return new Result(this.workerID, executionStats, startTime, endTime); + }, executor); + } + + /** + * Executes the next query given by the query selector from the query handler. If the execution fails and + * discardOnFailure is true, the execution will be discarded and null will be returned. If the execution fails and + * discardOnFailure is false, the execution statistic with the failed results will be returned. + * + * @param timeout the timeout for the execution + * @param discardOnFailure if true, this method will return null, if the execution fails + * @return the execution statistic of the execution + */ + private ExecutionStats executeQuery(Duration timeout, boolean discardOnFailure) { + // execute the request + HttpExecutionResult result = executeHttpRequest(timeout); + + // process result + Optional statuscode = Optional.empty(); + if (result.response().isPresent()) + statuscode = Optional.of(result.response().get().getCode()); + + if (result.successful() && this.config.parseResults()) { // 2xx + if (result.actualContentLength.isEmpty() || result.hash.isEmpty() || result.outputStream.isEmpty()) { + throw new RuntimeException("Response body is null, but execution was successful."); // This should never happen, just here for fixing the warning. + } + + // process result + responseBodyProcessor.add(result.actualContentLength().getAsLong(), result.hash().getAsLong(), result.outputStream.get().toInputStream()); + } + + if (!result.successful() && discardOnFailure) { + LOGGER.debug("{}\t:: Discarded execution, because the time limit has been reached: [queryID={}]", this, result.queryID); + return null; + } + + return new ExecutionStats( + result.queryID(), + result.requestStart(), + result.duration(), + statuscode, + result.actualContentLength(), + result.hash, + result.exception() + ); + } + + /** + * Executes the next query given by the query selector from the query handler. + * It uses the http client to execute the request and returns the result of the execution. + * + * @param timeout the timeout for the execution + * @return the execution result of the execution + */ + private HttpExecutionResult executeHttpRequest(Duration timeout) { + // get the next query and request + final AsyncRequestProducer request; + final int queryIndex; + if (config().queries().getConfig().caching()) { + queryIndex = querySelector.getNextIndex(); + request = requestFactory.getCachedRequest(queryIndex); + } else { + final QueryHandler.QueryStreamWrapper queryHandle; + try { + queryHandle = config().queries().getNextQueryStream(this.querySelector); + } catch (IOException e) { + return createFailedResultBeforeRequest(this.querySelector.getCurrentIndex(), e); + } + + try { + request = requestFactory.buildHttpRequest( + queryHandle, + config().connection(), + config().acceptHeader() + ); + } catch (IOException | URISyntaxException e) { + return createFailedResultBeforeRequest(queryHandle.index(), e); + } + + // set queryIndex to the index of the queryHandle, so that the result can be associated with the query + queryIndex = queryHandle.index(); + } + + // execute the request + final Instant timeStamp = Instant.now(); + final var requestStart = System.nanoTime(); + final var future = httpClient.execute(request, new AbstractBinResponseConsumer() { + + private HttpResponse response; + private final StreamingXXHash64 hasher = hasherFactory.newStreamingHash64(0); + private long responseSize = 0; // will be used if parseResults is false + private long responseEnd = 0; // time in nanos + private ReversibleOutputStream responseBody = null; + + @Override + public void releaseResources() {} // nothing to release + + @Override + protected int capacityIncrement() { + return Integer.MAX_VALUE - 8; // get as much data in as possible + } + + /** + * Triggered to pass incoming data packet to the data consumer. + * + * @param src the data packet. + * @param endOfStream flag indicating whether this data packet is the last in the data stream. + */ + @Override + protected void data(ByteBuffer src, boolean endOfStream) throws IOException { + if (endOfStream) + responseEnd = System.nanoTime(); + + if (responseBody == null) + responseBody = new ByteArrayListOutputStream(); + + responseSize += src.remaining(); + if (config.parseResults()) { + // if the buffer uses an array, use the array directly + if (src.hasArray()) { + hasher.update(src.array(), src.position() + src.arrayOffset(), src.remaining()); + responseBody.write(src.array(), src.position() + src.arrayOffset(), src.remaining()); + } else { // otherwise, copy the buffer to an array + int readCount; + while (src.hasRemaining()) { + readCount = Math.min(BUFFER_SIZE, src.remaining()); + src.get(buffer, 0, readCount); + hasher.update(buffer, 0, readCount); + responseBody.write(buffer, 0, readCount); + } + } + } + } + + /** + * Triggered to signal the beginning of response processing. + * + * @param response the response message head + * @param contentType the content type of the response body, + * or {@code null} if the response does not enclose a response entity. + */ + @Override + protected void start(HttpResponse response, ContentType contentType) { + this.response = response; + final var contentLengthHeader = response.getFirstHeader("Content-Length"); + Long contentLength = contentLengthHeader != null ? Long.parseLong(contentLengthHeader.getValue()) : null; + // if the content length is known, create a BigByteArrayOutputStream with the known length + if (contentLength != null && responseBody == null && config.parseResults()) { + responseBody = new BigByteArrayOutputStream(contentLength); + } + } + + /** + * Triggered to generate an object that represents a result of response message processing. + * + * @return the result of response message processing + */ + @Override + protected HttpExecutionResult buildResult() { + // if the responseEnd hasn't been set yet, set it to the current time + if (responseEnd == 0) + responseEnd = System.nanoTime(); + + // duration of the execution + final var duration = Duration.ofNanos(responseEnd - requestStart); + + // check for http error + if (response.getCode() / 100 != 2) { + return createFailedResultDuringResponse(queryIndex, response, timeStamp, duration, null); + } + + // check content length + final var contentLengthHeader = response.getFirstHeader("Content-Length"); + Long contentLength = contentLengthHeader != null ? Long.parseLong(contentLengthHeader.getValue()) : null; + if (contentLength != null) { + if ((!config.parseResults() && responseSize != contentLength) // if parseResults is false, the responseSize will be used + || (config.parseResults() && responseBody.size() != contentLength)) { // if parseResults is true, the size of the bbaos will be used + if (responseSize != responseBody.size()) + LOGGER.error("Error during copying the response data. (expected written data size = {}, actual written data size = {}, Content-Length-Header = {})", responseSize, responseBody.size(), contentLengthHeader.getValue()); + final var exception = new HttpException(String.format("Content-Length header value doesn't match actual content length. (Content-Length-Header = %s, written data size = %s)", contentLength, config.parseResults() ? responseBody.size() : responseSize)); + return createFailedResultDuringResponse(queryIndex, response, timeStamp, duration, exception); + } + } + + // check timeout + if (duration.compareTo(timeout) > 0) { + return createFailedResultDuringResponse(queryIndex, response, timeStamp, duration, new TimeoutException()); + } + + // return successful result + return new HttpExecutionResult( + queryIndex, + Optional.of(response), + timeStamp, + Duration.ofNanos(responseEnd - requestStart), + Optional.of(responseBody), + OptionalLong.of(config.parseResults() ? responseBody.size() : responseSize), + OptionalLong.of(config.parseResults() ? hasher.getValue() : 0), + Optional.empty() + ); + } + }, null); // the callback is used to handle the end state of the request, but it's not needed here + + try { + // Wait for the request to finish, but don't wait longer than the timeout. + // The timeout from the configuration is used instead of the timeout from the parameter. + // The timeout from the parameter might be reduced if the end of the time limit is near + // and it might be so small that it causes issues. + return future.get(config.timeout().toNanos(), TimeUnit.NANOSECONDS); + } catch (InterruptedException | ExecutionException e) { + // This will close the connection and cancel the request if it's still running. + future.cancel(true); + return createFailedResultBeforeRequest(queryIndex, e); + } catch (TimeoutException e) { + if (future.isDone()) { + LOGGER.warn("Request finished immediately after timeout but will still be counted as timed out."); + try { + return future.get(); + } catch (InterruptedException | ExecutionException ex) { + return createFailedResultBeforeRequest(queryIndex, ex); + } + } else { + future.cancel(true); + return createFailedResultBeforeRequest(queryIndex, e); + } + } + } + + /** + * Creates a failed result for a query execution that failed before the request. + * + * @param queryIndex the index of the query + * @param e the exception that caused the error + * @return the failed result + */ + private static HttpExecutionResult createFailedResultBeforeRequest(int queryIndex, Exception e) { + return new HttpExecutionResult( + queryIndex, + Optional.empty(), + Instant.now(), + Duration.ZERO, + Optional.empty(), + OptionalLong.empty(), + OptionalLong.empty(), + Optional.ofNullable(e) + ); + } + + /** + * Creates a failed result for a query execution that failed during the response. + * + * @param queryIndex the index of the query + * @param response the response of the query + * @param timestamp the start time of the query + * @param duration the duration of the query until error + * @param e the exception that caused the error, can be null + * @return the failed result + */ + private static HttpExecutionResult createFailedResultDuringResponse( + int queryIndex, + HttpResponse response, + Instant timestamp, + Duration duration, + Exception e) { + return new HttpExecutionResult( + queryIndex, + Optional.ofNullable(response), + timestamp, + duration, + Optional.empty(), + OptionalLong.empty(), + OptionalLong.empty(), + Optional.ofNullable(e) + ); + } + + private void logExecution(ExecutionStats execution) { + switch (execution.endState()) { + case SUCCESS -> LOGGER.debug("{}\t:: Successfully executed query: [queryID={}].", this, execution.queryID()); + case TIMEOUT -> LOGGER.warn("{}\t:: Timeout during query execution: [queryID={}, duration={}].", this, execution.queryID(), execution.duration()); // TODO: look for a possibility to add the query string for better logging + case HTTP_ERROR -> LOGGER.warn("{}\t:: HTTP Error occurred during query execution: [queryID={}, httpError={}].", this, execution.queryID(), execution.httpStatusCode().orElse(-1)); + case MISCELLANEOUS_EXCEPTION -> LOGGER.warn("{}\t:: Miscellaneous exception occurred during query execution: [queryID={}, exception={}].", this, execution.queryID(), execution.error().orElse(null)); + } + } + + @Override + public String toString() { + return MessageFormatter.format("[{}-{}]", SPARQLProtocolWorker.class.getSimpleName(), this.workerID).getMessage(); + } +} diff --git a/src/main/java/org/aksw/iguana/commons/io/BigByteArrayInputStream.java b/src/main/java/org/aksw/iguana/commons/io/BigByteArrayInputStream.java new file mode 100644 index 000000000..1559fd7f1 --- /dev/null +++ b/src/main/java/org/aksw/iguana/commons/io/BigByteArrayInputStream.java @@ -0,0 +1,163 @@ +package org.aksw.iguana.commons.io; + +import java.io.IOException; +import java.io.InputStream; +import java.util.Objects; + +import static java.lang.Math.min; + +public class BigByteArrayInputStream extends InputStream { + + final private BigByteArrayOutputStream bbaos; + + private byte[] currentBuffer; + private int currentBufferSize = -1; + private int posInCurrentBuffer = 0; + + private boolean ended = true; + + public BigByteArrayInputStream(byte[] bytes) throws IOException { + bbaos = new BigByteArrayOutputStream(); + bbaos.write(bytes); + activateNextBuffer(); + } + + /** + * The given bbaos will be closed, when read from it. + * + * @param bbaos + */ + public BigByteArrayInputStream(BigByteArrayOutputStream bbaos) { + this.bbaos = bbaos; + activateNextBuffer(); + } + + + @Override + public int read() throws IOException { + this.bbaos.close(); + + if (ended) return -1; + final var ret = currentBuffer[posInCurrentBuffer++]; + if (availableBytes() == 0) + activateNextBuffer(); + return ret & 0xFF; // convert byte (-128...127) to (0...255) + } + + @Override + public int read(byte[] b, int off, int len) throws IOException { + this.bbaos.close(); + Objects.checkFromIndexSize(off, len, b.length); + + if (ended) return -1; + + final var copyLength1 = min(availableBytes(), len); + System.arraycopy(currentBuffer, posInCurrentBuffer, b, off, copyLength1); + posInCurrentBuffer += copyLength1; + off += copyLength1; + if (availableBytes() == 0) + activateNextBuffer(); + + // check if b is already filled up or if there is nothing left to read + if (copyLength1 == len || ended) return copyLength1; + + // there might be the rare case, where reading one additional baos might not be enough to fill the buffer, + // because there are different array size limitations across different JVMs + final var copyLength2 = min(availableBytes(), len - copyLength1); + System.arraycopy(currentBuffer, posInCurrentBuffer, b, off, copyLength2); + posInCurrentBuffer += copyLength2; + + if (availableBytes() == 0) + activateNextBuffer(); + + return copyLength1 + copyLength2; + } + + @Override + public int readNBytes(byte[] b, int off, int len) throws IOException { + this.bbaos.close(); + Objects.checkFromIndexSize(off, len, b.length); + + if (ended) return 0; + + final var copyLength1 = min(availableBytes(), len); + System.arraycopy(currentBuffer, posInCurrentBuffer, b, off, copyLength1); + posInCurrentBuffer += copyLength1; + off += copyLength1; + if (availableBytes() == 0) + activateNextBuffer(); + + // check if b is already filled up or if there is nothing left to read + if (copyLength1 == len || ended) return copyLength1; + + // there might be the rare case, where reading one additional baos might not be enough to fill the buffer, + // because there are different array size limitations across different JVMs + final var copyLength2 = min(availableBytes(), len - copyLength1); + System.arraycopy(currentBuffer, posInCurrentBuffer, b, off, copyLength2); + posInCurrentBuffer += copyLength2; + + if (availableBytes() == 0) + activateNextBuffer(); + + return copyLength1 + copyLength2; + } + + @Override + public byte[] readAllBytes() throws IOException { + throw new IOException("Reading all bytes from a BigByteArrayInputStream is prohibited because it might exceed the array capacity"); + } + + @Override + public long skip(long n) throws IOException { + if (n <= 0) return 0; + long skipped = 0; + while (skipped < n) { + long thisSkip = min(availableBytes(), n - skipped); + skipped += thisSkip; + posInCurrentBuffer += (int) thisSkip; // conversion to int is lossless, because skipped is at maximum INT_MAX big + if (availableBytes() == 0) + if (!activateNextBuffer()) + return skipped; + } + return skipped; + } + + /** + * Activate the next buffer the underlying BigByteArrayOutputStream. + * + * @return true if the next buffer was activated, false if there are no more buffers available + */ + private boolean activateNextBuffer() { + // check if another buffer is available + if (bbaos.getBaos().isEmpty()) { + currentBuffer = null; // release memory + currentBufferSize = 0; + posInCurrentBuffer = 0; + ended = true; + return false; + } + + // activate next buffer + currentBuffer = bbaos.getBaos().get(0).getBuffer(); + currentBufferSize = bbaos.getBaos().get(0).size(); + posInCurrentBuffer = 0; + + // remove the current buffer from the list to save memory + bbaos.getBaos().remove(0); + + // check if the new buffer contains anything + if (currentBuffer.length == 0) + return ended = activateNextBuffer(); + ended = false; + return true; + } + + /** + * Returns the number of available bytes in the current buffer. + * + * @return the number of available bytes in the current buffer + */ + private int availableBytes() { + return currentBufferSize - posInCurrentBuffer; + } +} diff --git a/src/main/java/org/aksw/iguana/commons/io/BigByteArrayOutputStream.java b/src/main/java/org/aksw/iguana/commons/io/BigByteArrayOutputStream.java new file mode 100644 index 000000000..02ee4f446 --- /dev/null +++ b/src/main/java/org/aksw/iguana/commons/io/BigByteArrayOutputStream.java @@ -0,0 +1,210 @@ +package org.aksw.iguana.commons.io; + +import org.apache.hadoop.hbase.io.ByteArrayOutputStream; + +import java.io.IOException; +import java.io.InputStream; +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; +import java.util.stream.IntStream; + +/** + * This class represents a ByteArrayOutputStream that can hold a large amount of byte data. + * It is designed to overcome the limitations of the standard ByteArrayOutputStream, which + * has a fixed internal byte array and can run into out of memory errors when trying to write + * a large amount of data. + *

+ * The BigByteArrayOutputStream works by using an ArrayList of ByteArrayOutputStreams to store + * the byte data. When the current ByteArrayOutputStream fills up, a new one is created with the + * maximum array size (Integer.MAX_VALUE - 8) as its initial capacity and added to the list. + * Writing data to the stream involves writing to the current active ByteArrayOutputStream. When + * the stream is cleared, all the internal ByteArrayOutputStreams are cleared and a new one is + * added to the list. + */ +public class BigByteArrayOutputStream extends ReversibleOutputStream { + + /** + * The maximum size limit for an array. This is no limit to the amount of bytes {@code BigByteArrayOutputStream} can consume. + */ + public final static int ARRAY_SIZE_LIMIT = Integer.MAX_VALUE - 8; + + /** + * Holds a list of ByteArrayOutputStream objects. + */ + private final List baosList; + + /** + * The index of a ByteArrayOutputStream in the List baosList. + */ + private int baosListIndex; + + /** + * Represents the current ByteArrayOutputStream used for writing data. + */ + private ByteArrayOutputStream currentBaos; + + private boolean closed = false; + + /** + * Initializes a new instance of the BigByteArrayOutputStream class with default buffer size. + */ + public BigByteArrayOutputStream() { + baosList = new ArrayList<>(); + baosList.add(new ByteArrayOutputStream()); + try { + reset(); + } catch (IOException ignored) {} + } + + /** + * Initializes a new instance of the BigByteArrayOutputStream class with buffer size. + * + * @param bufferSize initial guaranteed buffer size + */ + public BigByteArrayOutputStream(int bufferSize) { + if (bufferSize < 0) + throw new IllegalArgumentException("Negative initial size: " + bufferSize); + baosList = new ArrayList<>(1); + baosList.add(new ByteArrayOutputStream(bufferSize)); + try { + reset(); + } catch (IOException ignored) {} + } + + /** + * Initializes a new instance of the BigByteArrayOutputStream class with buffer size. + * + * @param bufferSize initial guaranteed buffer size + */ + public BigByteArrayOutputStream(long bufferSize) { + if (bufferSize < 0) + throw new IllegalArgumentException("Negative initial size: " + bufferSize); + if (bufferSize <= ARRAY_SIZE_LIMIT) { + baosList = new ArrayList<>(1); + baosList.add(new ByteArrayOutputStream((int) bufferSize)); + } else { + final var requiredBaoss = (int) ((bufferSize - 1) / ARRAY_SIZE_LIMIT) + 1; // -1 to prevent creating a fully sized, but empty baos at the end if the buffer size is a multiple of ARRAY_SIZE_LIMIT + baosList = new ArrayList<>(requiredBaoss); + IntStream.range(0, requiredBaoss).forEachOrdered(i -> baosList.add(new ByteArrayOutputStream(ARRAY_SIZE_LIMIT))); + } + try { + reset(); + } catch (IOException ignored) {} + } + + + public List getBaos() { + return baosList; + } + + public void write(BigByteArrayOutputStream bbaos) throws IOException { + write(bbaos.toByteArray()); + } + + @Override + public long size() { + return baosList.stream().mapToLong(ByteArrayOutputStream::size).sum(); + } + + public byte[][] toByteArray() { + byte[][] ret = new byte[baosList.size()][]; + for (int i = 0; i < baosList.size(); i++) { + ret[i] = baosList.get(i).toByteArray(); + } + return ret; + } + + @Override + public void write(byte[] b, int off, int len) throws IOException { + if (closed) throw new IOException("Tried to write to a closed stream"); + + Objects.checkFromIndexSize(off, len, b.length); + final var space = ensureSpace(); + final var writeLength = Math.min(len, space); + this.currentBaos.write(b, off, writeLength); + final var remainingBytes = len - writeLength; + if (remainingBytes > 0) { + ensureSpace(); + this.currentBaos.write(b, off + writeLength, remainingBytes); + } + } + + public void write(byte[][] byteArray) throws IOException { + for (byte[] arr : byteArray) { + write(arr); + } + } + + public void write(byte b) throws IOException { + if (closed) throw new IOException("Tried to write to a closed stream"); + + ensureSpace(); + this.currentBaos.write(b); + } + + @Override + public void write(int i) throws IOException { + if (closed) throw new IOException("Tried to write to a closed stream"); + + ensureSpace(); + this.currentBaos.write(i); + } + + + private int ensureSpace() { + var space = ARRAY_SIZE_LIMIT - currentBaos.size(); + if (space == 0) { + space = ARRAY_SIZE_LIMIT; + if (baosListIndex == baosList.size() - 1) { + baosListIndex++; + currentBaos = new ByteArrayOutputStream(ARRAY_SIZE_LIMIT); + baosList.add(currentBaos); + } else { + baosListIndex++; + currentBaos = baosList.get(baosListIndex); + currentBaos.reset(); + } + } + return space; + } + + /** + * Resets the state of the object by setting the baosListIndex to zero + * and assigning the first ByteArrayOutputStream in the baosList to the + * currentBaos variable. No {@link ByteArrayOutputStream}s are actually removed. + */ + public void reset() throws IOException { + if (closed) throw new IOException("Tried to reset to a closed stream"); + + currentBaos = baosList.get(baosListIndex = 0); + for (var baos : baosList) { + baos.reset(); + } + } + + /** + * Clears the state of the object by removing all {@link ByteArrayOutputStream}s + * from the baosList except for the first one. The baosListIndex is set to 1 + * and the currentBaos variable is reassigned to the first ByteArrayOutputStream + * in the baosList. + */ + public void clear() throws IOException { + if (closed) throw new IOException("Tried to clear to a closed stream"); + + if (baosList.size() > 1) + baosList.subList(1, this.baosList.size()).clear(); + currentBaos = baosList.get(baosListIndex = 0); + currentBaos.reset(); + } + + @Override + public void close() throws IOException { + this.closed = true; + } + + @Override + public InputStream toInputStream() { + return new BigByteArrayInputStream(this); + } +} \ No newline at end of file diff --git a/src/main/java/org/aksw/iguana/commons/io/ByteArrayListInputStream.java b/src/main/java/org/aksw/iguana/commons/io/ByteArrayListInputStream.java new file mode 100644 index 000000000..813e77161 --- /dev/null +++ b/src/main/java/org/aksw/iguana/commons/io/ByteArrayListInputStream.java @@ -0,0 +1,163 @@ +package org.aksw.iguana.commons.io; + +import java.io.EOFException; +import java.io.IOException; +import java.io.InputStream; +import java.nio.ByteBuffer; +import java.util.Iterator; +import java.util.List; +import java.util.Objects; + +/** + * An InputStream that reads from a list of byte arrays. + */ +public class ByteArrayListInputStream extends InputStream { + + private final List data; + private Iterator iterator; + private ByteBuffer currentBuffer; + private boolean closed = false; + + /** + * Creates a new ByteArrayListInputStream that reads from the given list of byte arrays. + * The list is not copied, so it should not be modified while the stream is in use. + * + * @param data the list of byte arrays to read from + */ + public ByteArrayListInputStream(List data) { + this.data = data; + this.iterator = data.iterator(); + if (iterator.hasNext()) { + this.currentBuffer = ByteBuffer.wrap(iterator.next()); + } else { + this.currentBuffer = null; + } + } + + private boolean checkBuffer() { + if (currentBuffer != null && currentBuffer.hasRemaining()) { + return true; + } + if (!iterator.hasNext()) { + return false; + } + currentBuffer = ByteBuffer.wrap(iterator.next()); + return true; + } + + private void checkNotClosed() throws IOException { + if (closed) { + throw new IOException("Stream closed"); + } + } + + private int read(byte[] b, int off, int len, int eofCode) throws IOException { + Objects.checkFromIndexSize(off, len, b.length); + if (!checkBuffer()) + return eofCode; + + int read = 0; + int remaining = len; + int bufferRemaining; + while (remaining > 0 && checkBuffer()) { + bufferRemaining = currentBuffer.remaining(); + + // current buffer has enough bytes + if (bufferRemaining >= remaining) { + currentBuffer.get(b, off + read, remaining); + read += remaining; + break; + } + + // else + currentBuffer.get(b, off + read, bufferRemaining); + currentBuffer = null; + read += bufferRemaining; + remaining -= bufferRemaining; + } + return read; + } + + @Override + public int read(byte[] b, int off, int len) throws IOException { + checkNotClosed(); + return read(b, off, len, -1); + } + + @Override + public byte[] readAllBytes() throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public int readNBytes(byte[] b, int off, int len) throws IOException { + checkNotClosed(); + return read(b, off, len, 0); + } + + @Override + public long skip(long n) throws IOException { + checkNotClosed(); + long skipped = 0; + long remaining = n; + while (remaining > 0) { + if (!checkBuffer()) + break; + int bufferRemaining = currentBuffer.remaining(); + if (bufferRemaining >= remaining) { + currentBuffer.position(currentBuffer.position() + (int) remaining); + skipped += remaining; + break; + } + currentBuffer = null; + skipped += bufferRemaining; + remaining -= bufferRemaining; + } + return skipped; + } + + @Override + public void skipNBytes(long n) throws IOException { + long skipped = skip(n); + if (skipped != n) { + throw new EOFException(); + } + } + + @Override + public int available() throws IOException { + return (int) Math.min(Integer.MAX_VALUE, availableLong()); + } + + public long availableLong() throws IOException { + checkNotClosed(); + if (!checkBuffer()) + return 0; + long sum = 0; + boolean foundCurrentBuffer = false; + for (byte[] arr : data) { + if (foundCurrentBuffer) { + sum += arr.length; + } else { + if (arr == currentBuffer.array()) { + foundCurrentBuffer = true; + } + } + } + sum += currentBuffer != null ? currentBuffer.remaining() : 0; + return sum; + } + + @Override + public void close() throws IOException { + closed = true; + } + + @Override + public int read() throws IOException { + checkNotClosed(); + if (!checkBuffer()) + return -1; + return currentBuffer.get() & 0xFF; + } +} diff --git a/src/main/java/org/aksw/iguana/commons/io/ByteArrayListOutputStream.java b/src/main/java/org/aksw/iguana/commons/io/ByteArrayListOutputStream.java new file mode 100644 index 000000000..74d00949b --- /dev/null +++ b/src/main/java/org/aksw/iguana/commons/io/ByteArrayListOutputStream.java @@ -0,0 +1,136 @@ +package org.aksw.iguana.commons.io; + +import java.io.IOException; +import java.io.InputStream; +import java.nio.ByteBuffer; +import java.util.LinkedList; +import java.util.List; +import java.util.Objects; + +/** + * An OutputStream that writes to a list of byte arrays. + * The buffers have a minimum size. + * If a write operation is smaller than the minimum size, the data is stored in a separate buffer. + * This buffer will be filled up by subsequent writings to the minimum size before another buffer is created. + */ +public class ByteArrayListOutputStream extends ReversibleOutputStream { + + private final int MIN_BUFFER_SIZE; + private ByteBuffer currentBuffer; + private final LinkedList bufferList = new LinkedList<>(); + private boolean closed = false; + + /** + * Creates a new ByteArrayListOutputStream with a minimum buffer size of 4096 bytes. + */ + public ByteArrayListOutputStream() { + MIN_BUFFER_SIZE = 4096; + } + + /** + * Creates a new ByteArrayListOutputStream with the given minimum buffer size. + * + * @param minBufferSize the minimum buffer size + */ + public ByteArrayListOutputStream(int minBufferSize) { + if (minBufferSize < 1) { + throw new IllegalArgumentException("minBufferSize must be bigger than 1"); + } + MIN_BUFFER_SIZE = minBufferSize; + } + + private void checkNotClosed() throws IOException { + if (closed) { + throw new IOException("Stream closed"); + } + } + + @Override + public void write(byte[] b, int off, int len) throws IOException { + checkNotClosed(); + Objects.checkFromIndexSize(off, len, b.length); + if (currentBuffer == null) { + if (len < MIN_BUFFER_SIZE) { + currentBuffer = ByteBuffer.allocate(MIN_BUFFER_SIZE); + currentBuffer.put(b, off, len); + } else { + final var buffer = new byte[len]; + System.arraycopy(b, off, buffer, 0, len); + bufferList.add(buffer); + } + return; + } + + final var spaceRemaining = currentBuffer.remaining(); + if (spaceRemaining >= len) { + currentBuffer.put(b, off, len); + } else { + currentBuffer.put(b, off, spaceRemaining); + bufferList.add(currentBuffer.array()); + currentBuffer = null; + + if (len - spaceRemaining < MIN_BUFFER_SIZE) { + currentBuffer = ByteBuffer.allocate(MIN_BUFFER_SIZE); + currentBuffer.put(b, off + spaceRemaining, len - spaceRemaining); + } else { + final var buffer = new byte[len - spaceRemaining]; + System.arraycopy(b, off + spaceRemaining, buffer, 0, len - spaceRemaining); + bufferList.add(buffer); + } + } + } + + @Override + public void write(int b) throws IOException { + checkNotClosed(); + if (currentBuffer == null) { + currentBuffer = ByteBuffer.allocate(MIN_BUFFER_SIZE); + } + if (currentBuffer.remaining() == 0) { + bufferList.add(currentBuffer.array()); + currentBuffer = ByteBuffer.allocate(MIN_BUFFER_SIZE); + } + currentBuffer.put((byte) b); + } + + @Override + public long size() { + long sum = 0; + for (var buffer : bufferList) { + sum += buffer.length; + } + return sum + (currentBuffer == null ? 0 : currentBuffer.position()); + } + + /** + * Returns the list of buffers. + * The list does not contain the current buffer. + * If the stream is closed, the current buffer is trimmed to the actual size and then added to the list. + * + * @return the list of buffers + */ + public List getBuffers() { + return bufferList; + } + + @Override + public void close() throws IOException { + closed = true; + if (currentBuffer != null) { + // trim buffer + final var temp = currentBuffer.array(); + final var buffer = new byte[currentBuffer.position()]; + System.arraycopy(temp, 0, buffer, 0, buffer.length); + bufferList.add(buffer); + currentBuffer = null; + } + } + + @Override + public InputStream toInputStream() { + try { + this.close(); + } catch (IOException ignored) {} // doesn't throw + return new ByteArrayListInputStream(bufferList); + } +} diff --git a/src/main/java/org/aksw/iguana/commons/io/ReversibleOutputStream.java b/src/main/java/org/aksw/iguana/commons/io/ReversibleOutputStream.java new file mode 100644 index 000000000..0a78acade --- /dev/null +++ b/src/main/java/org/aksw/iguana/commons/io/ReversibleOutputStream.java @@ -0,0 +1,13 @@ +package org.aksw.iguana.commons.io; + +import java.io.InputStream; +import java.io.OutputStream; + +/** + * An OutputStream that can be converted to an InputStream. + * The size of the data can be queried. + */ +public abstract class ReversibleOutputStream extends OutputStream { + public abstract InputStream toInputStream(); + public abstract long size(); +} diff --git a/src/main/java/org/aksw/iguana/commons/rdf/IGUANA_BASE.java b/src/main/java/org/aksw/iguana/commons/rdf/IGUANA_BASE.java new file mode 100644 index 000000000..7dab2cb8f --- /dev/null +++ b/src/main/java/org/aksw/iguana/commons/rdf/IGUANA_BASE.java @@ -0,0 +1,27 @@ +package org.aksw.iguana.commons.rdf; + +import java.util.Map; + +public class IGUANA_BASE { + public static final String NS = "http://iguana-benchmark.eu" + "/"; + public static final String PREFIX = "iguana"; + + private IGUANA_BASE() { + } + + /** + * The RDF-friendly version of the IGUANA namespace + * with trailing / character. + */ + public static String getURI() { + return NS; + } + + public static Map PREFIX_MAP = Map.of( + IGUANA_BASE.PREFIX, IGUANA_BASE.NS, + IONT.PREFIX, IONT.NS, + IPROP.PREFIX, IPROP.NS, + IRES.PREFIX, IRES.NS, + "lsqr", "http://lsq.aksw.org/res/" + ); +} diff --git a/src/main/java/org/aksw/iguana/commons/rdf/IONT.java b/src/main/java/org/aksw/iguana/commons/rdf/IONT.java new file mode 100644 index 000000000..aecaaee24 --- /dev/null +++ b/src/main/java/org/aksw/iguana/commons/rdf/IONT.java @@ -0,0 +1,26 @@ +package org.aksw.iguana.commons.rdf; + +import org.aksw.iguana.cc.metrics.Metric; +import org.apache.jena.rdf.model.Resource; +import org.apache.jena.rdf.model.ResourceFactory; + +public class IONT { + public static final String NS = IGUANA_BASE.NS + "class" + "/"; + public static final String PREFIX = "iont"; + + public static final Resource suite = ResourceFactory.createResource(NS + "Suite"); + public static final Resource dataset = ResourceFactory.createResource(NS + "Dataset"); + public static final Resource task = ResourceFactory.createResource(NS + "Task"); + public static final Resource connection = ResourceFactory.createResource(NS + "Connection"); + public static final Resource stresstest = ResourceFactory.createResource(NS + "Stresstest"); + public static final Resource worker = ResourceFactory.createResource(NS + "Worker"); + public static final Resource executedQuery = ResourceFactory.createResource(NS + "ExecutedQuery"); + public static final Resource queryExecution = ResourceFactory.createResource(NS + "QueryExecution"); + public static final Resource responseBody = ResourceFactory.createResource(NS + "ResponseBody"); + public static final Resource query = ResourceFactory.createResource(NS + "Query"); + public static final Resource metric = ResourceFactory.createResource(NS + "Metric"); + + public static Resource getMetricClass(Metric metric) { + return ResourceFactory.createResource(NS + "metric/" + metric.getAbbreviation()); + } +} diff --git a/src/main/java/org/aksw/iguana/commons/rdf/IPROP.java b/src/main/java/org/aksw/iguana/commons/rdf/IPROP.java new file mode 100644 index 000000000..298f9b06d --- /dev/null +++ b/src/main/java/org/aksw/iguana/commons/rdf/IPROP.java @@ -0,0 +1,71 @@ +package org.aksw.iguana.commons.rdf; + +import org.aksw.iguana.cc.metrics.Metric; +import org.apache.jena.rdf.model.Property; +import org.apache.jena.rdf.model.ResourceFactory; + +public class IPROP { + public static final String NS = IGUANA_BASE.NS + "properties" + "/"; + public static final String PREFIX = "iprop"; + + /** + * The RDF-friendly version of the IPROP namespace + * with trailing / character. + */ + public static String getURI() { + return NS; + } + + public static Property createMetricProperty(Metric metric) { + return ResourceFactory.createProperty(NS + metric.getAbbreviation()); + } + + public static final Property succeeded = ResourceFactory.createProperty(NS, "succeeded"); + + public static final Property responseBodyHash = ResourceFactory.createProperty(NS, "responseBodyHash"); + public static final Property responseBody = ResourceFactory.createProperty(NS, "responseBody"); + public static final Property startTime = ResourceFactory.createProperty(NS, "startTime"); + public static final Property httpCode = ResourceFactory.createProperty(NS, "httpCode"); + + public static final Property dataset = ResourceFactory.createProperty(NS, "dataset"); + public static final Property task = ResourceFactory.createProperty(NS, "task"); + public static final Property connection = ResourceFactory.createProperty(NS, "connection"); + public static final Property query = ResourceFactory.createProperty(NS, "query"); + public static final Property metric = ResourceFactory.createProperty(NS, "metric"); + public static final Property workerResult = ResourceFactory.createProperty(NS, "workerResult"); + public static final Property version = ResourceFactory.createProperty(NS, "version"); + public static final Property timeLimit = ResourceFactory.createProperty(NS, "timeLimit"); + public static final Property noOfQueryMixes = ResourceFactory.createProperty(NS, "noOfQueryMixes"); + public static final Property noOfWorkers = ResourceFactory.createProperty(NS, "noOfWorkers"); + public static final Property workerID = ResourceFactory.createProperty(NS, "workerID"); + public static final Property workerType = ResourceFactory.createProperty(NS, "workerType"); + public static final Property noOfQueries = ResourceFactory.createProperty(NS, "noOfQueries"); + public static final Property timeOut = ResourceFactory.createProperty(NS, "timeOut"); + public static final Property startDate = ResourceFactory.createProperty(NS, "startDate"); + public static final Property endDate = ResourceFactory.createProperty(NS, "endDate"); + + // Language Processor + public static final Property results = ResourceFactory.createProperty(NS, "results"); + public static final Property bindings = ResourceFactory.createProperty(NS, "bindings"); + public static final Property variable = ResourceFactory.createProperty(NS, "variable"); + public static final Property exception = ResourceFactory.createProperty(NS, "exception"); + + + // Query Stats + public static final Property failed = ResourceFactory.createProperty(NS, "failed"); + public static final Property queryExecution = ResourceFactory.createProperty(NS, "queryExecution"); + public static final Property timeOuts = ResourceFactory.createProperty(NS, "timeOuts"); + public static final Property totalTime = ResourceFactory.createProperty(NS, "totalTime"); + public static final Property unknownException = ResourceFactory.createProperty(NS, "unknownException"); + public static final Property wrongCodes = ResourceFactory.createProperty(NS, "wrongCodes"); + public static final Property fullID = ResourceFactory.createProperty(NS, "fullID"); + public static final Property id = ResourceFactory.createProperty(NS, "id"); + + // Each Query Stats + public static final Property code = ResourceFactory.createProperty(NS, "code"); + public static final Property queryID = ResourceFactory.createProperty(NS, "queryID"); + public static final Property resultSize = ResourceFactory.createProperty(NS, "resultSize"); + public static final Property run = ResourceFactory.createProperty(NS, "run"); + public static final Property success = ResourceFactory.createProperty(NS, "success"); + public static final Property time = ResourceFactory.createProperty(NS, "time"); +} diff --git a/src/main/java/org/aksw/iguana/commons/rdf/IRES.java b/src/main/java/org/aksw/iguana/commons/rdf/IRES.java new file mode 100644 index 000000000..c24768f68 --- /dev/null +++ b/src/main/java/org/aksw/iguana/commons/rdf/IRES.java @@ -0,0 +1,63 @@ +package org.aksw.iguana.commons.rdf; + +import org.aksw.iguana.cc.metrics.Metric; +import org.aksw.iguana.cc.worker.HttpWorker; +import org.apache.jena.rdf.model.Resource; +import org.apache.jena.rdf.model.ResourceFactory; + +import java.math.BigInteger; + +/** + * Class containing the IRES vocabulary and methods to create RDF resources. + */ +public class IRES { + public static final String NS = IGUANA_BASE.NS + "resource" + "/"; + public static final String PREFIX = "ires"; + + public static Resource getResource(String id) { + return ResourceFactory.createResource(NS + id); + } + + public static Resource getMetricResource(Metric metric) { + return ResourceFactory.createResource(NS + metric.getAbbreviation()); + } + + public static Resource getResponsebodyResource(long hash) { + return ResourceFactory.createResource(NS + "responseBody" + "/" + hash); + } + + public static class Factory { + + private final String suiteID; + private final String taskURI; + + public Factory(String suiteID, long taskID) { + this.suiteID = suiteID; + this.taskURI = NS + suiteID + "/" + taskID; + } + + public Resource getSuiteResource() { + return ResourceFactory.createResource(NS + suiteID); + } + + public Resource getTaskResource() { + return ResourceFactory.createResource(this.taskURI); + } + + public Resource getWorkerResource(HttpWorker worker) { + return ResourceFactory.createResource(this.taskURI + "/" + worker.getWorkerID()); + } + + public Resource getTaskQueryResource(String queryID) { + return ResourceFactory.createResource(this.taskURI + "/" + queryID); + } + + public Resource getWorkerQueryResource(HttpWorker worker, int index) { + return ResourceFactory.createResource(this.taskURI + "/" + worker.getWorkerID() + "/" + worker.config().queries().getQueryId(index)); + } + + public Resource getWorkerQueryRunResource(HttpWorker worker, int index, BigInteger run) { + return ResourceFactory.createResource(this.taskURI + "/" + worker.getWorkerID() + "/" + worker.config().queries().getQueryId(index) + "/" + run); + } + } +} diff --git a/src/main/java/org/aksw/iguana/commons/time/DurationLiteral.java b/src/main/java/org/aksw/iguana/commons/time/DurationLiteral.java new file mode 100644 index 000000000..f4ce8c272 --- /dev/null +++ b/src/main/java/org/aksw/iguana/commons/time/DurationLiteral.java @@ -0,0 +1,91 @@ +package org.aksw.iguana.commons.time; + +import org.apache.jena.datatypes.DatatypeFormatException; +import org.apache.jena.datatypes.RDFDatatype; +import org.apache.jena.graph.impl.LiteralLabel; +import org.apache.jena.vocabulary.XSD; + +import java.time.Duration; + +/** + * This class is used to convert a Java Duration object to a typed RDF literal. The literal is typed as + * xsd:dayTimeDuration.
+ * TODO: This class temporarily fixes an issue with Jena. + */ +public class DurationLiteral implements RDFDatatype { + + private final Duration duration; + + public DurationLiteral(Duration duration) { + this.duration = duration; + } + + public String getLexicalForm() { + return duration.toString(); + } + + @Override + public String getURI() { + return XSD.getURI() + "duration"; + } + + @Override + public String unparse(Object value) { + return ((DurationLiteral) value).getLexicalForm(); + } + + @Override + public Object parse(String lexicalForm) throws DatatypeFormatException { + return new DurationLiteral(Duration.parse(lexicalForm)); + } + + @Override + public boolean isValid(String lexicalForm) { + try { + Duration.parse(lexicalForm); + return true; + } catch (Exception e) { + return false; + } + } + + @Override + public boolean isValidValue(Object valueForm) { + return valueForm instanceof DurationLiteral; + } + + @Override + public boolean isValidLiteral(LiteralLabel lit) { + return lit.getDatatype() instanceof DurationLiteral; + } + + @Override + public boolean isEqual(LiteralLabel value1, LiteralLabel value2) { + return value1.getDatatype() == value2.getDatatype() && value1.getValue().equals(value2.getValue()); + } + + @Override + public int getHashCode(LiteralLabel lit) { + return lit.getValue().hashCode(); + } + + @Override + public Class getJavaClass() { + return DurationLiteral.class; + } + + @Override + public Object cannonicalise(Object value) { + return value; + } + + @Override + public Object extendedTypeDefinition() { + return null; + } + + @Override + public RDFDatatype normalizeSubType(Object value, RDFDatatype dt) { + return dt; + } +} diff --git a/src/main/java/org/aksw/iguana/commons/time/TimeUtils.java b/src/main/java/org/aksw/iguana/commons/time/TimeUtils.java new file mode 100644 index 000000000..653d7ff38 --- /dev/null +++ b/src/main/java/org/aksw/iguana/commons/time/TimeUtils.java @@ -0,0 +1,45 @@ +package org.aksw.iguana.commons.time; + +import org.apache.jena.datatypes.xsd.impl.XSDDateTimeStampType; +import org.apache.jena.rdf.model.Literal; +import org.apache.jena.rdf.model.ResourceFactory; + +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.Duration; +import java.time.Instant; +import java.time.ZonedDateTime; +import java.time.format.DateTimeFormatter; + +/** + * Class related to the conversion of Java time objects to RDF literals. + */ +public class TimeUtils { + + public static Literal createTypedDurationLiteralInSeconds(Duration duration) { + var seconds = "PT" + new BigDecimal(BigInteger.valueOf(duration.toNanos()), 9).toPlainString() + "S"; + + // cut trailing zeros + while (seconds.lastIndexOf("0") == seconds.length() - 2 /* The last character is S */) { + seconds = seconds.substring(0, seconds.length() - 2) + "S"; + } + + if (seconds.endsWith(".S")) { + seconds = seconds.substring(0, seconds.length() - 2) + "S"; + } + + return ResourceFactory.createTypedLiteral(seconds, new DurationLiteral(duration)); + } + + public static Literal createTypedDurationLiteral(Duration duration) { + return ResourceFactory.createTypedLiteral(duration.toString(), new DurationLiteral(duration)); + } + + public static Literal createTypedInstantLiteral(Instant time) { + return ResourceFactory.createTypedLiteral(new XSDDateTimeStampType(null).parse(time.toString())); + } + + public static Literal createTypedZonedDateTimeLiteral(ZonedDateTime time) { + return ResourceFactory.createTypedLiteral(new XSDDateTimeStampType(null).parse(time.format(DateTimeFormatter.ISO_OFFSET_DATE_TIME))); + } +} diff --git a/src/main/resources/iguana-schema.json b/src/main/resources/iguana-schema.json new file mode 100644 index 000000000..4750e59df --- /dev/null +++ b/src/main/resources/iguana-schema.json @@ -0,0 +1,391 @@ +{ + "$schema": "http://json-schema.org/draft-06/schema#", + "$ref": "#/definitions/root", + "definitions": { + "root": { + "title": "root", + "type": "object", + "additionalProperties": false, + "properties": { + "datasets": { + "type": "array", + "items": { + "$ref": "#/definitions/Dataset" + }, + "minItems": 1 + }, + "connections": { + "type": "array", + "items": { + "$ref": "#/definitions/Connection" + }, + "minItems": 1 + }, + "tasks": { + "type": "array", + "items": { + "$ref": "#/definitions/Task" + }, + "minItems": 1 + }, + "storages": { + "type": "array", + "items": { + "$ref": "#/definitions/Storage" + }, + "minItems": 1 + }, + "responseBodyProcessors": { + "type": "array", + "items": { + "$ref": "#/definitions/ResponseBodyProcessor" + } + }, + "metrics": { + "type": "array", + "items": { + "$ref": "#/definitions/Metric" + } + } + }, + "required": [ + "connections", + "datasets", + "storages", + "tasks" + ] + }, + + "Connection": { + "type": "object", + "additionalProperties": false, + "properties": { + "name": { + "type": "string" + }, + "version": { + "type": "string" + }, + "endpoint": { + "type": "string", + "format": "uri" + }, + "updateEndpoint": { + "type": "string", + "format": "uri" + }, + "authentication": { + "$ref": "#/definitions/Authentication" + }, + "updateAuthentication": { + "$ref": "#/definitions/Authentication" + }, + "dataset": { + "type": "string" + } + }, + "required": [ + "endpoint", + "name" + ], + "title": "Connection" + }, + "Authentication": { + "type": "object", + "additionalProperties": false, + "properties": { + "user": { + "type": "string" + }, + "password": { + "type": "string" + } + }, + "required": [ + "password", + "user" + ], + "title": "Authentication" + }, + "Dataset": { + "type": "object", + "additionalProperties": false, + "properties": { + "name": { + "type": "string" + }, + "file": { + "type": "string" + } + }, + "required": [ + "name" + ], + "title": "Dataset" + }, + "Metric": { + "type": "object", + "additionalProperties": false, + "properties": { + "type": { + "type": "string", + "enum": [ "AES", "AvgQPS", "EachQuery", "NoQ", "NoQPH", "PAvgQPS", "PQPS", "QMPH", "QPS" ] + }, + "penalty": { + "type": "integer", + "minimum": 0 + } + }, + "required": [ + "type" + ], + "title": "Metric" + }, + "ResponseBodyProcessor": { + "type": "object", + "additionalProperties": false, + "properties": { + "contentType": { + "type": "string" + }, + "threads": { + "type": "integer", + "minimum": 1 + }, + "timeout" : { + "type": "string" + } + }, + "required": [ + "contentType" + ], + "title": "ResponseBodyProcessor" + }, + "Storage": { + "type": "object", + "oneOf": [ + { "$ref": "#/definitions/CSVStorage" }, + { "$ref": "#/definitions/RDFFileStorage" }, + { "$ref": "#/definitions/TriplestoreStorage" } + ], + "title": "Storage" + }, + "CSVStorage": { + "type": "object", + "unevaluatedProperties": false, + "properties": { + "type": { + "type": "string", + "const": "csv file" + }, + "directory": { + "type": "string" + } + }, + "required": [ + "type", + "directory" + ], + "title": "CSVStorage" + }, + "RDFFileStorage": { + "type": "object", + "unevaluatedProperties": false, + "properties": { + "type": { + "type": "string", + "const": "rdf file" + }, + "path": { + "type": "string" + } + }, + "required": [ + "type", + "path" + ], + "title": "RDFFileStorage" + }, + "TriplestoreStorage": { + "type": "object", + "unevaluatedProperties": false, + "properties": { + "type": { + "type": "string", + "const": "triplestore" + }, + "endpoint": { + "type": "string", + "format": "uri" + }, + "user": { + "type": "string" + }, + "password": { + "type": "string" + }, + "baseUri": { + "type": "string", + "format": "uri" + } + }, + "required": [ + "type", + "endpoint" + ], + "title": "TriplestoreStorage" + }, + "Task": { + "type": "object", + "oneOf": [ { "$ref": "#/definitions/Stresstest" } ], + "title": "Task" + }, + "Stresstest": { + "type": "object", + "unevaluatedProperties": false, + "properties": { + "type": { + "type": "string", + "const": "stresstest" + }, + "warmupWorkers": { + "type": "array", + "items": { + "$ref": "#/definitions/Worker" + } + }, + "workers": { + "type": "array", + "items": { + "$ref": "#/definitions/Worker" + }, + "minItems": 1 + } + }, + "required": [ + "type", + "workers" + ], + "title": "Stresstest" + }, + "Worker": { + "type": "object", + "oneOf": [ { "$ref": "#/definitions/SPARQLWorker" } ], + "title": "Worker" + }, + "SPARQLWorker" : { + "type": "object", + "unevaluatedProperties": false, + "properties": { + "type": { + "type": "string", + "const": "SPARQLProtocolWorker" + }, + "number": { + "type": "integer", + "minimum": 1 + }, + "requestType": { + "type": "string", + "enum": [ "post query", "get query", "post url-enc query", "post url-enc update", "post update" ] + }, + "queries": { + "$ref": "#/definitions/Queries" + }, + "timeout": { + "type": "string" + }, + "connection": { + "type": "string" + }, + "completionTarget": { + "$ref": "#/definitions/CompletionTarget" + }, + "parseResults": { + "type": "boolean" + }, + "acceptHeader": { + "type": "string" + } + }, + "required": [ + "type", + "completionTarget", + "connection", + "queries", + "timeout" + ], + "title": "SPARQLWorker" + }, + "CompletionTarget": { + "type": "object", + "oneOf": [ + { "$ref": "#/definitions/TimeLimit" }, + { "$ref": "#/definitions/QueryMixes" } + ], + "title": "CompletionTarget" + }, + "TimeLimit": { + "properties": { + "duration": { + "type": "string" + } + }, + "title": "TimeLimit", + "type": "object", + "unevaluatedProperties": false, + "required": [ + "duration" + ] + }, + "QueryMixes": { + "properties": { + "number": { + "type": "integer", + "minimum": 1 + } + }, + "title": "QueryMixes", + "type": "object", + "unevaluatedProperties": false, + "required": [ + "number" + ] + }, + "Queries": { + "type": "object", + "additionalProperties": false, + "properties": { + "path": { + "type": "string" + }, + "format": { + "type": "string", + "enum": [ "one-per-line", "separator", "folder" ] + }, + "separator": { + "type": "string" + }, + "caching": { + "type": "boolean" + }, + "order": { + "type": "string", + "enum": [ "random", "linear" ] + }, + "seed": { + "type": "integer" + }, + "lang": { + "type": "string", + "enum": [ "", "SPARQL" ] + } + }, + "required": [ + "path" + + ], + "title": "Queries" + } + } +} diff --git a/src/main/resources/logback.xml b/src/main/resources/logback.xml new file mode 100644 index 000000000..d80cde084 --- /dev/null +++ b/src/main/resources/logback.xml @@ -0,0 +1,27 @@ + + + + + + + + + + + %d{HH:mm:ss.SSS} %highlight(%-5level) [%thread] %logger{0} -- %msg%n + + + + + iguana.log + true + + %d{HH:mm:ss.SSS} %-5level [%thread] %logger{0} -- %msg%n + + + + + + + + \ No newline at end of file diff --git a/src/main/resources/start-iguana.sh b/src/main/resources/start-iguana.sh old mode 100755 new mode 100644 index 04671553f..69eff9b27 --- a/src/main/resources/start-iguana.sh +++ b/src/main/resources/start-iguana.sh @@ -1,7 +1,7 @@ #!/bin/bash if [ -z "$IGUANA_JVM" ] then - java -jar iguana-${project.version}.jar "$1" + java -jar iguana.jar "$1" else - java "$IGUANA_JVM" -jar iguana-${project.version}.jar "$1" + java "$IGUANA_JVM" -jar iguana.jar "$1" fi \ No newline at end of file diff --git a/src/test/java/org/aksw/iguana/cc/config/elements/ConnectionConfigTest.java b/src/test/java/org/aksw/iguana/cc/config/elements/ConnectionConfigTest.java new file mode 100644 index 000000000..32034d23e --- /dev/null +++ b/src/test/java/org/aksw/iguana/cc/config/elements/ConnectionConfigTest.java @@ -0,0 +1,185 @@ +package org.aksw.iguana.cc.config.elements; + +import com.fasterxml.jackson.databind.ObjectMapper; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + +import java.net.URI; +import java.util.stream.Stream; + +import static org.junit.jupiter.api.Assertions.*; + +class ConnectionConfigTest { + private final ObjectMapper mapper = new ObjectMapper(); + + private static Stream testDeserializationData() { + return Stream.of( + Arguments.of(new ConnectionConfig( + "endpoint01", + "0.1", + null, + URI.create("http://example.com/sparql"), + null, null, null), + """ + { + "name":"endpoint01", + "endpoint":"http://example.com/sparql", + "version":"0.1", + "authentication": null, + "updateEndpoint":null, + "updateAuthentication":null, + "dataset":null + } + """ + ), + Arguments.of(new ConnectionConfig( + "endpoint01", + "0.1", + new DatasetConfig("MyData", "some.ttl"), + URI.create("http://example.com/sparql"), + null, null, null), + """ + {"name":"endpoint01","endpoint":"http://example.com/sparql","version":"0.1","authentication": null,"updateEndpoint":null,"dataset":{"name":"MyData","file":"some.ttl"}, "updateAuthentication": null} + """ + ), + Arguments.of(new ConnectionConfig( // test default values + "endpoint01", + null, + null, + URI.create("http://example.com/sparql"), + null, null, null), + """ + { + "name":"endpoint01", + "endpoint":"http://example.com/sparql" + } + """ + ), + Arguments.of(new ConnectionConfig( // test setting everything + "endpoint01", + "v2", + new DatasetConfig("dataset1", "some.ttl"), + URI.create("http://example.com/sparql"), + new ConnectionConfig.Authentication("user", "pass"), + URI.create("http://example.com/update"), + new ConnectionConfig.Authentication("user_update", "pass_update")), + """ + { + "name":"endpoint01", + "version": "v2", + "endpoint":"http://example.com/sparql", + "authentication": { + "user": "user", + "password": "pass" + }, + "updateEndpoint": "http://example.com/update", + "updateAuthentication": { + "user": "user_update", + "password": "pass_update" + }, + "dataset": { + "name": "dataset1", + "file": "some.ttl" + } + } + """ + ) + ); + } + + private static Stream testSerializationData() { + return Stream.of( + Arguments.of(new ConnectionConfig( + "endpoint01", + "0.1", + null, + URI.create("http://example.com/sparql"), + null, null, null), + """ + { + "name":"endpoint01", + "endpoint":"http://example.com/sparql", + "version":"0.1", + "authentication": null, + "updateEndpoint":null, + "updateAuthentication":null, + "dataset":null + } + """ + ), + Arguments.of(new ConnectionConfig( + "endpoint01", + "0.1", + new DatasetConfig("MyData", "some.ttl"), + URI.create("http://example.com/sparql"), + null, null, null), + """ + {"name":"endpoint01","endpoint":"http://example.com/sparql","version":"0.1","authentication": null,"updateEndpoint":null,"dataset":{"name":"MyData","file":"some.ttl"}, "updateAuthentication": null} + """ + ), + Arguments.of(new ConnectionConfig( // test default values + "endpoint01", + null, + null, + URI.create("http://example.com/sparql"), + null, null, null), + """ + { + "name":"endpoint01", + "endpoint":"http://example.com/sparql", + "version": null, + "dataset": null, + "authentication": null, + "updateAuthentication": null, + "updateEndpoint": null + } + """ + ), + Arguments.of(new ConnectionConfig( // test setting everything + "endpoint01", + "v2", + new DatasetConfig("dataset1", "some.ttl"), + URI.create("http://example.com/sparql"), + new ConnectionConfig.Authentication("user", "pass"), + URI.create("http://example.com/update"), + new ConnectionConfig.Authentication("user_update", "pass_update")), + """ + { + "name":"endpoint01", + "version": "v2", + "endpoint":"http://example.com/sparql", + "authentication": { + "user": "user", + "password": "pass" + }, + "updateEndpoint": "http://example.com/update", + "updateAuthentication": { + "user": "user_update", + "password": "pass_update" + }, + "dataset": { + "name": "dataset1", + "file": "some.ttl" + } + } + """ + ) + ); + } + + @ParameterizedTest + @MethodSource("testSerializationData") + public void testSerialization(ConnectionConfig config, String expectedJson) throws Exception { + final String actual = mapper.writeValueAsString(config); + assertEquals(mapper.readTree(expectedJson), mapper.readTree(actual)); + } + + @ParameterizedTest + @MethodSource("testDeserializationData") + public void testDeserialization(ConnectionConfig expected, String json) throws Exception { + final var actual = mapper.readValue(json, ConnectionConfig.class); + assertEquals(expected, actual); + } +} \ No newline at end of file diff --git a/src/test/java/org/aksw/iguana/cc/config/elements/DatasetConfigTest.java b/src/test/java/org/aksw/iguana/cc/config/elements/DatasetConfigTest.java new file mode 100644 index 000000000..9d09b7cb6 --- /dev/null +++ b/src/test/java/org/aksw/iguana/cc/config/elements/DatasetConfigTest.java @@ -0,0 +1,39 @@ +package org.aksw.iguana.cc.config.elements; + +import com.fasterxml.jackson.databind.ObjectMapper; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + +import java.util.stream.Stream; + +import static org.junit.jupiter.api.Assertions.*; + +class DatasetConfigTest { + + private final ObjectMapper mapper = new ObjectMapper(); + + private static Stream testData() { + return Stream.of( + Arguments.of( + new DatasetConfig("MyData", "some.ttl"), + """ + {"name":"MyData","file":"some.ttl"} + """ + ), + Arguments.of( + new DatasetConfig("MyData", null), + """ + {"name":"MyData"} + """ + ) + ); + } + + @ParameterizedTest + @MethodSource("testData") + public void testDeserialization(DatasetConfig expectedConfig, String json) throws Exception { + final var actualConfig = mapper.readValue(json, DatasetConfig.class); + assertEquals(expectedConfig, actualConfig); + } +} \ No newline at end of file diff --git a/src/test/java/org/aksw/iguana/cc/config/elements/StorageConfigTest.java b/src/test/java/org/aksw/iguana/cc/config/elements/StorageConfigTest.java new file mode 100644 index 000000000..0c99a46dd --- /dev/null +++ b/src/test/java/org/aksw/iguana/cc/config/elements/StorageConfigTest.java @@ -0,0 +1,51 @@ +package org.aksw.iguana.cc.config.elements; + +import com.fasterxml.jackson.databind.ObjectMapper; +import org.aksw.iguana.cc.storage.impl.CSVStorage; +import org.aksw.iguana.cc.storage.impl.RDFFileStorage; +import org.aksw.iguana.cc.storage.impl.TriplestoreStorage; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + +import java.util.stream.Stream; + +import static org.junit.jupiter.api.Assertions.*; + +class StorageConfigTest { + private final ObjectMapper mapper = new ObjectMapper(); + + private static Stream testData() { + return Stream.of( + Arguments.of(new RDFFileStorage.Config("some.ttl"), + """ + {"type":"rdf file","path":"some.ttl"} + """ + ), + Arguments.of(new CSVStorage.Config("csv_results/"), + """ + {"type":"csv file","directory":"csv_results/"} + """ + ), + Arguments.of(new TriplestoreStorage.Config("http://example.com/sparql", "user", "pass", "http://example.com/"), + """ + {"type":"triplestore","endpoint":"http://example.com/sparql", "user": "user", "password": "pass", "baseUri": "http://example.com/"} + """ + ) + ); + } + + @ParameterizedTest + @MethodSource("testData") + public void testSerialization(StorageConfig config, String expectedJson) throws Exception { + final String actual = mapper.writeValueAsString(config); + assertEquals(mapper.readTree(expectedJson), mapper.readTree(actual)); + } + + @ParameterizedTest + @MethodSource("testData") + public void testDeserialization(StorageConfig expectedConfig, String json) throws Exception { + final var actualConfig = mapper.readValue(json, StorageConfig.class); + assertEquals(expectedConfig, actualConfig); + } +} \ No newline at end of file diff --git a/src/test/java/org/aksw/iguana/cc/mockup/MockupConnection.java b/src/test/java/org/aksw/iguana/cc/mockup/MockupConnection.java new file mode 100644 index 000000000..3e6d7bb05 --- /dev/null +++ b/src/test/java/org/aksw/iguana/cc/mockup/MockupConnection.java @@ -0,0 +1,19 @@ +package org.aksw.iguana.cc.mockup; + +import org.aksw.iguana.cc.config.elements.ConnectionConfig; + +import java.net.URI; + +public class MockupConnection { + + /** + * Creates a connection config with the given parameters + * + * @param name The name of the connection + * @param endpoint The endpoint of the connection + * @param datasetName The name of the dataset + */ + public static ConnectionConfig createConnectionConfig(String name, String datasetName, String endpoint) { + return new ConnectionConfig(name, "", null, URI.create(endpoint), null, null, null); + } +} diff --git a/src/test/java/org/aksw/iguana/cc/mockup/MockupQueryHandler.java b/src/test/java/org/aksw/iguana/cc/mockup/MockupQueryHandler.java new file mode 100644 index 000000000..6988f0ab9 --- /dev/null +++ b/src/test/java/org/aksw/iguana/cc/mockup/MockupQueryHandler.java @@ -0,0 +1,41 @@ +package org.aksw.iguana.cc.mockup; + +import org.aksw.iguana.cc.query.handler.QueryHandler; +import org.aksw.iguana.cc.query.selector.QuerySelector; +import org.aksw.iguana.cc.query.selector.impl.LinearQuerySelector; + + +public class MockupQueryHandler extends QueryHandler { + private final int id; + private final int queryNumber; + + public MockupQueryHandler(int id, int queryNumber) { + super(); + this.queryNumber = queryNumber; + this.id = id; + } + + @Override + public String getQueryId(int i) { + return "MockQueryHandler" + this.id + ":" + i; + } + + @Override + public String[] getAllQueryIds() { + String[] out = new String[queryNumber]; + for (int i = 0; i < queryNumber; i++) { + out[i] = getQueryId(i); + } + return out; + } + + @Override + public int getQueryCount() { + return queryNumber; + } + + @Override + public QuerySelector getQuerySelectorInstance() { + return new LinearQuerySelector(queryNumber); + } +} diff --git a/src/test/java/org/aksw/iguana/cc/mockup/MockupStorage.java b/src/test/java/org/aksw/iguana/cc/mockup/MockupStorage.java new file mode 100644 index 000000000..ef15adf9e --- /dev/null +++ b/src/test/java/org/aksw/iguana/cc/mockup/MockupStorage.java @@ -0,0 +1,18 @@ +package org.aksw.iguana.cc.mockup; + +import org.aksw.iguana.cc.storage.Storage; +import org.apache.jena.rdf.model.Model; +import org.apache.jena.rdf.model.ModelFactory; + +public class MockupStorage implements Storage { + private Model resultModel = ModelFactory.createDefaultModel(); + + @Override + public void storeResult(Model data) { + resultModel = data; + } + + public Model getResultModel() { + return resultModel; + } +} diff --git a/src/test/java/org/aksw/iguana/cc/mockup/MockupWorker.java b/src/test/java/org/aksw/iguana/cc/mockup/MockupWorker.java new file mode 100644 index 000000000..9950c9f9d --- /dev/null +++ b/src/test/java/org/aksw/iguana/cc/mockup/MockupWorker.java @@ -0,0 +1,118 @@ +package org.aksw.iguana.cc.mockup; + +import org.aksw.iguana.cc.config.elements.ConnectionConfig; +import org.aksw.iguana.cc.config.elements.DatasetConfig; +import org.aksw.iguana.cc.query.handler.QueryHandler; +import org.aksw.iguana.cc.worker.HttpWorker; + +import java.time.Duration; +import java.time.Instant; +import java.time.ZoneId; +import java.time.ZonedDateTime; +import java.util.ArrayList; +import java.util.List; +import java.util.Optional; +import java.util.OptionalLong; +import java.util.concurrent.CompletableFuture; + +public class MockupWorker extends HttpWorker { + public record Config( + CompletionTarget completionTarget, + String acceptHeader, + Integer number, + Boolean parseResults, + QueryHandler queries, + ConnectionConfig connection, + Duration timeout + ) implements HttpWorker.Config {} + + + /** + * All values except the workerID and queries may be null. I recommend to use the MockupQueryHandler. + * I would also recommend to set the connection, if you want to use the StresstestResultProcessor. + */ + public MockupWorker(long workerID, CompletionTarget target, String acceptHeader, Integer number, Boolean parseResults, QueryHandler queries, ConnectionConfig connection, Duration timeout) { + super(workerID, null, new Config( + target, + acceptHeader, + number, + parseResults, + queries, + connection, + timeout + ) + ); + } + + /** + * All other values will be set to null. This is the bare minimum to make it work with the StresstestResultProcessor. + */ + public MockupWorker(long workerID, QueryHandler queries, String connectionName, String connectionVersion, String datasetName, Duration timeout) { + super(workerID, null, new Config( + null, + null, + null, + null, + queries, + new ConnectionConfig(connectionName, connectionVersion, new DatasetConfig(datasetName, null), null, null, null, null), + timeout + )); + } + + @Override + public CompletableFuture start() { + return null; + } + + public static List createWorkerResults(QueryHandler queries, List workers) { + final var startTime = ZonedDateTime.of(2023, 10, 11, 14, 14, 10, 0, ZoneId.of("UTC")); + final var endTime = ZonedDateTime.of(2023, 10, 12, 15, 15, 15, 0, ZoneId.of("UTC")); + + final var queryNumber = queries.getQueryCount(); + + Instant time = Instant.parse("2023-10-21T20:48:06.399Z"); + + final var results = new ArrayList(); + for (var worker : workers) { + final var exectutionStats = new ArrayList(); + for (int queryID = 0; queryID < queryNumber; queryID++) { + // successful execution + final var sucHttpCode = Optional.of(200); + final var sucDuration = Duration.ofSeconds(2); + final var sucLength = OptionalLong.of(1000); + final var responseBodyHash = OptionalLong.of(123); + time = time.plusSeconds(1); + exectutionStats.add(new ExecutionStats(queryID, time, sucDuration, sucHttpCode, sucLength, responseBodyHash, Optional.empty())); + + // failed execution (http error) + var failHttpCode = Optional.of(404); + var failDuration = Duration.ofMillis(500); + var failLength = OptionalLong.empty(); + var failResponseBodyHash = OptionalLong.empty(); + var failException = new Exception("httperror"); + time = time.plusSeconds(1); + exectutionStats.add(new ExecutionStats(queryID, time, failDuration, failHttpCode, failLength, failResponseBodyHash, Optional.of(failException))); + + // failed execution + failHttpCode = Optional.of(200); + failDuration = Duration.ofSeconds(5); + failLength = OptionalLong.empty(); + failResponseBodyHash = OptionalLong.of(456); + failException = new Exception("io_exception"); + time = time.plusSeconds(1); + exectutionStats.add(new ExecutionStats(queryID, time, failDuration, failHttpCode, failLength, failResponseBodyHash, Optional.of(failException))); + } + results.add(new Result(worker.getWorkerID(), exectutionStats, startTime, endTime)); + } + return results; + } + + public static List createWorkers(int idOffset, int workerNumber, QueryHandler queries, String connectionName, String connectionVersion, String datasetName) { + final var workers = new ArrayList(); + for (int i = idOffset; i < workerNumber + idOffset; i++) { + workers.add(new MockupWorker(i, queries, connectionName, connectionVersion, datasetName, Duration.ofSeconds(2))); + } + return workers; + } + +} diff --git a/src/test/java/org/aksw/iguana/cc/query/handler/QueryHandlerConfigTest.java b/src/test/java/org/aksw/iguana/cc/query/handler/QueryHandlerConfigTest.java new file mode 100644 index 000000000..ace718d9f --- /dev/null +++ b/src/test/java/org/aksw/iguana/cc/query/handler/QueryHandlerConfigTest.java @@ -0,0 +1,141 @@ +package org.aksw.iguana.cc.query.handler; + +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + +import java.util.stream.Stream; + +import static org.junit.jupiter.api.Assertions.*; + +class QueryHandlerConfigTest { + private final ObjectMapper mapper = new ObjectMapper().setSerializationInclusion(JsonInclude.Include.NON_NULL); + + + + private static Stream testDeserializationData() { + return Stream.of( + Arguments.of(new QueryHandler.Config("some.queries", + QueryHandler.Config.Format.FOLDER, + "", + true, + QueryHandler.Config.Order.LINEAR, + 100L, + QueryHandler.Config.Language.SPARQL + ), + """ + {"path":"some.queries","format":"folder","caching":true,"order":"linear","seed": 100, "lang":"SPARQL"} + """ + ), + Arguments.of(new QueryHandler.Config("some.queries", + QueryHandler.Config.Format.ONE_PER_LINE, + "", + true, + QueryHandler.Config.Order.LINEAR, + 0L, + QueryHandler.Config.Language.SPARQL + ), + """ + {"path":"some.queries"} + """ + ), + Arguments.of(new QueryHandler.Config("some.queries", + QueryHandler.Config.Format.FOLDER, + "", + true, + QueryHandler.Config.Order.RANDOM, + 42L, + QueryHandler.Config.Language.SPARQL + ), + """ + {"path":"some.queries","format":"folder","caching":true,"order":"random","seed":42,"lang":"SPARQL"} + """ + ), + Arguments.of(new QueryHandler.Config("some.queries", + QueryHandler.Config.Format.SEPARATOR, + "\n", + true, + QueryHandler.Config.Order.RANDOM, + 42L, + QueryHandler.Config.Language.SPARQL + ), + """ + {"path":"some.queries","format":"separator", "separator": "\\n", "caching":true,"order":"random","seed":42,"lang":"SPARQL"} + """ + ) + ); + } + + private static Stream testSerializationData() { + return Stream.of( + Arguments.of(new QueryHandler.Config("some.queries", + QueryHandler.Config.Format.FOLDER, + "", + true, + QueryHandler.Config.Order.LINEAR, + 100L, + QueryHandler.Config.Language.SPARQL + ), + """ + {"path":"some.queries","separator": "", "format":"folder","caching":true,"order":"linear","seed": 100, "lang":"SPARQL"} + """ + ), + Arguments.of(new QueryHandler.Config("some.queries", + QueryHandler.Config.Format.ONE_PER_LINE, + "", + true, + QueryHandler.Config.Order.LINEAR, + 0L, + QueryHandler.Config.Language.SPARQL + ), + """ + {"path":"some.queries", "format":"one-per-line","separator":"","caching":true,"order":"linear","seed":0,"lang":"SPARQL"} + """ + ), + Arguments.of(new QueryHandler.Config("some.queries", + QueryHandler.Config.Format.FOLDER, + "", + true, + QueryHandler.Config.Order.RANDOM, + 42L, + QueryHandler.Config.Language.SPARQL + ), + """ + {"path":"some.queries","format":"folder","separator":"","caching":true,"order":"random","seed":42,"lang":"SPARQL"} + """ + ), + Arguments.of(new QueryHandler.Config("some.queries", + QueryHandler.Config.Format.SEPARATOR, + "\n", + true, + QueryHandler.Config.Order.RANDOM, + 42L, + QueryHandler.Config.Language.SPARQL + ), + """ + {"path":"some.queries","format":"separator", "separator": "\\n", "caching":true,"order":"random","seed":42,"lang":"SPARQL"} + """ + ) + ); + } + + @ParameterizedTest + @MethodSource("testSerializationData") + public void testSerialisation(QueryHandler.Config config, String expectedJson) throws Exception { + + final String actual = mapper.writeValueAsString(config); + System.out.println(actual); + assertEquals(mapper.readTree(expectedJson), mapper.readTree(actual)); + } + + @ParameterizedTest + @MethodSource("testDeserializationData") + public void testDeserialization(QueryHandler.Config expected, String json) throws Exception { + final var actual = mapper.readValue(json, QueryHandler.Config.class); + + assertEquals(expected, actual); + } +} \ No newline at end of file diff --git a/src/test/java/org/aksw/iguana/cc/query/handler/QueryHandlerTest.java b/src/test/java/org/aksw/iguana/cc/query/handler/QueryHandlerTest.java new file mode 100644 index 000000000..152fe2c7b --- /dev/null +++ b/src/test/java/org/aksw/iguana/cc/query/handler/QueryHandlerTest.java @@ -0,0 +1,185 @@ +package org.aksw.iguana.cc.query.handler; + +import com.fasterxml.jackson.databind.ObjectMapper; +import org.aksw.iguana.cc.query.selector.impl.LinearQuerySelector; +import org.aksw.iguana.cc.query.selector.impl.RandomQuerySelector; +import org.aksw.iguana.cc.query.source.QuerySource; +import org.aksw.iguana.cc.query.source.impl.FileLineQuerySource; +import org.aksw.iguana.cc.query.source.impl.FileSeparatorQuerySource; +import org.aksw.iguana.cc.query.source.impl.FolderQuerySource; +import org.aksw.iguana.cc.query.source.impl.FolderQuerySourceTest; +import org.junit.jupiter.api.*; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.StandardOpenOption; +import java.util.*; + +import static org.junit.jupiter.api.Assertions.*; + +public class QueryHandlerTest { + + static Path parentFolder; + static Path tempDir; + static Path tempFileSep; + static Path tempFileLine; + + static List queries; + static List folderQueries; + + public static List data() { + final var out = new ArrayList(); + final var caching = List.of(true, false); + + for (var cache : caching) { + out.add(Arguments.of(String.format(""" + {"path":"%s","format":"folder","order":"linear","lang":"SPARQL", "caching": %s} + """, tempDir.toString().replaceAll("\\\\", "\\\\\\\\"), cache), + FolderQuerySource.class)); + out.add(Arguments.of(String.format(""" + {"path":"%s","format":"one-per-line","order":"linear","lang":"SPARQL", "caching": %s} + """, tempFileLine.toString().replaceAll("\\\\", "\\\\\\\\"), cache), + FileLineQuerySource.class)); + out.add(Arguments.of(String.format(""" + {"path":"%s","format":"separator", "separator": "\\n###\\n", "order":"linear","lang":"SPARQL", "caching": %s} + """, tempFileSep.toString().replaceAll("\\\\", "\\\\\\\\"), cache), + FileSeparatorQuerySource.class)); + } + + return out; + } + + @BeforeAll + public static void createFolder() throws IOException { + parentFolder = Files.createTempDirectory("iguana-query-handler-test"); + tempDir = Files.createTempDirectory(parentFolder, "folder-query-source-test-dir"); + tempFileSep = Files.createTempFile(parentFolder, "Query", ".txt"); + tempFileLine = Files.createTempFile(parentFolder, "Query", ".txt"); + + queries = new LinkedList<>(); + folderQueries = new LinkedList<>(); + + for (int i = 0; i < 10; i++) { + final Path queryFile = Files.createTempFile(tempDir, "Query", ".txt"); + final String content = UUID.randomUUID().toString(); + Files.writeString(queryFile, content); + Files.writeString(tempFileSep, content + "\n###\n", StandardCharsets.UTF_8, StandardOpenOption.APPEND); + Files.writeString(tempFileLine, content + "\n", StandardCharsets.UTF_8, StandardOpenOption.APPEND); + queries.add(new FolderQuerySourceTest.Query(queryFile, content)); + folderQueries.add(new FolderQuerySourceTest.Query(queryFile, content)); + } + // Queries in the folder are expected in alphabetic order of the file names. + Collections.sort(folderQueries); + } + + @AfterAll + public static void removeFolder() throws IOException { + org.apache.commons.io.FileUtils.deleteDirectory(parentFolder.toFile()); + } + + @ParameterizedTest + @MethodSource("data") + public void testDeserialization(String json, Class sourceType) throws Exception { + final var mapper = new ObjectMapper(); + QueryHandler queryHandler = assertDoesNotThrow(() -> mapper.readValue(json, QueryHandler.class)); + final var selector = queryHandler.getQuerySelectorInstance(); + assertInstanceOf(LinearQuerySelector.class, selector); + assertEquals(queries.size(), queryHandler.getQueryCount()); + assertNotEquals(0, queryHandler.hashCode()); + for (int i = 0; i < queryHandler.getQueryCount(); i++) { + final var wrapper = queryHandler.getNextQuery(selector); + assertEquals(i, selector.getCurrentIndex()); + if (FolderQuerySource.class.isAssignableFrom(sourceType)) + assertEquals(folderQueries.get(i).content(), wrapper.query()); + else + assertEquals(queries.get(i).content(), wrapper.query()); + assertEquals(i, wrapper.index()); + } + } + + @ParameterizedTest + @MethodSource("data") + public void testQueryStreamWrapper(String json, Class sourceType) throws IOException { + final var mapper = new ObjectMapper(); + QueryHandler queryHandler = assertDoesNotThrow(() -> mapper.readValue(json, QueryHandler.class)); + final var selector = queryHandler.getQuerySelectorInstance(); + assertTrue(selector instanceof LinearQuerySelector); + assertEquals(queries.size(), queryHandler.getQueryCount()); + assertNotEquals(0, queryHandler.hashCode()); + for (int i = 0; i < queryHandler.getQueryCount(); i++) { + final var wrapper = queryHandler.getNextQueryStream(selector); + assertEquals(i, selector.getCurrentIndex()); + final var acutalQuery = new String(wrapper.queryInputStreamSupplier().get().readAllBytes(), StandardCharsets.UTF_8); + if (FolderQuerySource.class.isAssignableFrom(sourceType)) + assertEquals(folderQueries.get(i).content(), acutalQuery); + else + assertEquals(queries.get(i).content(), acutalQuery); + assertEquals(i, wrapper.index()); + } + } + + @ParameterizedTest + @MethodSource("data") + public void testQueryStringWrapper(String json, Class sourceType) throws IOException { + final var mapper = new ObjectMapper(); + QueryHandler queryHandler = assertDoesNotThrow(() -> mapper.readValue(json, QueryHandler.class)); + final var selector = queryHandler.getQuerySelectorInstance(); + assertInstanceOf(LinearQuerySelector.class, selector); + assertEquals(queries.size(), queryHandler.getQueryCount()); + assertNotEquals(0, queryHandler.hashCode()); + for (int i = 0; i < queryHandler.getQueryCount(); i++) { + final var wrapper = queryHandler.getNextQuery(selector); + assertEquals(i, selector.getCurrentIndex()); + if (FolderQuerySource.class.isAssignableFrom(sourceType)) + assertEquals(folderQueries.get(i).content(), wrapper.query()); + else + assertEquals(queries.get(i).content(), wrapper.query()); + assertEquals(i, wrapper.index()); + } + } + + @ParameterizedTest + @MethodSource("data") + public void testQueryIDs(String json, Class sourceType) { + final var mapper = new ObjectMapper(); + QueryHandler queryHandler = assertDoesNotThrow(() -> mapper.readValue(json, QueryHandler.class)); + final var selector = queryHandler.getQuerySelectorInstance(); + assertInstanceOf(LinearQuerySelector.class, selector); + assertEquals(queries.size(), queryHandler.getQueryCount()); + assertNotEquals(0, queryHandler.hashCode()); + final var allQueryIDs = queryHandler.getAllQueryIds(); + for (int i = 0; i < queryHandler.getQueryCount(); i++) { + assertEquals(queryHandler.hashCode() + ":" + i, allQueryIDs[i]); + assertEquals(allQueryIDs[i], queryHandler.getQueryId(i)); + } + } + + @Test + public void testRandomQuerySelectorSeedConsistency() throws IOException { + String[] json = new String[2]; + json[0] = String.format(""" + {"path":"%s","format":"folder","order":"random", "seed": 100,"lang":"SPARQL"} + """, tempDir.toString().replaceAll("\\\\", "\\\\\\\\")); // windows + json[1] = String.format(""" + {"path":"%s","format":"one-per-line","order":"random", "seed": 100,"lang":"SPARQL"} + """, tempFileLine.toString().replaceAll("\\\\", "\\\\\\\\")); // this tests need to different configuration, because instances of the query handler are cached + + final var mapper = new ObjectMapper(); + List[] indices = new ArrayList[2]; + for (int i = 0; i < 2; i++) { + QueryHandler queryHandler = mapper.readValue(json[i], QueryHandler.class); + final var selector = queryHandler.getQuerySelectorInstance(); + assertInstanceOf(RandomQuerySelector.class, selector); + indices[i] = new ArrayList<>(); + for (int j = 0; j < 100000; j++) { + indices[i].add(selector.getNextIndex()); + } + } + assertEquals(indices[0], indices[1]); + } +} \ No newline at end of file diff --git a/src/test/java/org/aksw/iguana/cc/query/list/QueryListTest.java b/src/test/java/org/aksw/iguana/cc/query/list/QueryListTest.java new file mode 100644 index 000000000..92c5ad6d2 --- /dev/null +++ b/src/test/java/org/aksw/iguana/cc/query/list/QueryListTest.java @@ -0,0 +1,145 @@ +package org.aksw.iguana.cc.query.list; + +import org.aksw.iguana.cc.query.list.impl.FileBasedQueryList; +import org.aksw.iguana.cc.query.list.impl.InMemQueryList; +import org.aksw.iguana.cc.query.source.QuerySource; +import org.aksw.iguana.cc.query.source.impl.FileLineQuerySource; +import org.aksw.iguana.cc.query.source.impl.FileSeparatorQuerySource; +import org.aksw.iguana.cc.query.source.impl.FolderQuerySource; +import org.apache.commons.io.FileUtils; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Named; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + +import java.io.IOException; +import java.lang.reflect.InvocationTargetException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.List; +import java.util.UUID; + +import static org.junit.jupiter.api.Assertions.*; + +class QueryListTest { + private enum QuerySourceType { + FILE_LINE, + FILE_SEPARATOR, + FOLDER, + } + + static Path tempDir; + static List cachedArguments = null; + + private static QueryList createQueryList(Class queryListClass,QuerySource querySource) { + try { + return (QueryList) queryListClass.getConstructor(QuerySource.class).newInstance(querySource); + } catch (InstantiationException | IllegalAccessException | InvocationTargetException | NoSuchMethodException e) { + throw new RuntimeException(e); + } + } + + @BeforeAll + public static void createFolder() throws IOException { + tempDir = Files.createTempDirectory("iguana-folder-query-source-test-dir"); + } + + @AfterAll + public static void deleteFolder() throws IOException { + FileUtils.deleteDirectory(tempDir.toFile()); + } + + public static List data() throws IOException { + if (cachedArguments != null) + return cachedArguments; + + final var queryListClasses = List.of(InMemQueryList.class, FileBasedQueryList.class); + final var querySources = List.of(QuerySourceType.FILE_SEPARATOR, QuerySourceType.FILE_LINE, QuerySourceType.FOLDER); + final var sizes = List.of(1, 2, 10, 100, 1000); + + final var out = new ArrayList(); + for (var size : sizes) { + for (var querySourceType : querySources) { + for (var queryListClass : queryListClasses) { + final var queries = new ArrayList(); + for (int i = 0; i < size; i++) { + final String queryString = UUID.randomUUID().toString(); + queries.add(queryString); + } + QuerySource querySource = null; + switch (querySourceType) { + case FOLDER -> { + final var queryDir = Files.createTempDirectory(tempDir, "query-dir"); + for (int i = 0; i < size; i++) { + String filePrefix = String.format("Query-%09d.txt", i); // to ensure that the order from the queries List is the same as the order of the files in the folder + final Path queryFile = Files.createTempFile(queryDir, filePrefix, ".txt"); + Files.write(queryFile, queries.get(i).getBytes()); + } + querySource = new FolderQuerySource(queryDir); + } + case FILE_LINE -> { + final var queryFile = Files.createTempFile(tempDir, "Query", ".txt"); + Files.write(queryFile, String.join("\n", queries).getBytes()); + querySource = new FileLineQuerySource(queryFile); + } + case FILE_SEPARATOR -> { + final var queryFile = Files.createTempFile(tempDir, "Query", ".txt"); + Files.write(queryFile, String.join("\n###\n", queries).getBytes()); + querySource = new FileSeparatorQuerySource(queryFile, "\n###\n"); + } + } + String querySourceConfigString = String.format("[ type=%s, size=%d ]", querySourceType, size); + out.add(Arguments.of(Named.of(queryListClass.getSimpleName(), queryListClass), Named.of(querySourceConfigString, querySource), queries)); + } + } + } + cachedArguments = out; + return out; + } + + @Test + public void testIllegalArguments() { + assertThrows(IllegalArgumentException.class, () -> new InMemQueryList(null)); + assertThrows(IllegalArgumentException.class, () -> new FileBasedQueryList(null)); + } + + @ParameterizedTest(name = "[{index}] queryListClass={0}, querySourceConfig={1}") + @MethodSource("data") + public void testGetQuery(Class queryListClass, QuerySource querySource, List expectedQueries) throws IOException { + final var queryList = createQueryList(queryListClass, querySource); + for (int i = 0; i < expectedQueries.size(); i++) { + final var expectedQuery = expectedQueries.get(i); + assertEquals(expectedQuery, queryList.getQuery(i)); + } + } + + @ParameterizedTest(name = "[{index}] queryListClass={0}, querySourceConfig={1}") + @MethodSource("data") + public void testGetQueryStream(Class queryListClass, QuerySource querySource, List expectedQueries) throws IOException { + final var queryList = createQueryList(queryListClass, querySource); + for (int i = 0; i < expectedQueries.size(); i++) { + final var expectedQuery = expectedQueries.get(i); + final var queryString = new String(queryList.getQueryStream(i).readAllBytes(), StandardCharsets.UTF_8); + assertEquals(expectedQuery, queryString); + } + } + + @ParameterizedTest(name = "[{index}] queryListClass={0}, querySourceConfig={1}") + @MethodSource("data") + public void testSize(Class queryListClass, QuerySource querySource, List expectedQueries) { + final var queryList = createQueryList(queryListClass, querySource); + assertEquals(expectedQueries.size(), queryList.size()); + } + + @ParameterizedTest(name = "[{index}] queryListClass={0}, querySourceConfig={1}") + @MethodSource("data") + public void testHashcode(Class queryListClass, QuerySource querySource, List expectedQueries) { + final var queryList = createQueryList(queryListClass, querySource); + assertTrue(queryList.hashCode() != 0); + } +} \ No newline at end of file diff --git a/src/test/java/org/aksw/iguana/cc/query/selector/impl/LinearQuerySelectorTest.java b/src/test/java/org/aksw/iguana/cc/query/selector/impl/LinearQuerySelectorTest.java new file mode 100644 index 000000000..ca508685b --- /dev/null +++ b/src/test/java/org/aksw/iguana/cc/query/selector/impl/LinearQuerySelectorTest.java @@ -0,0 +1,29 @@ +package org.aksw.iguana.cc.query.selector.impl; + +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ValueSource; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; + +public class LinearQuerySelectorTest { + + @ParameterizedTest() + @ValueSource(ints = {1, 2, 3, 4}) + public void getNextIndexTest(int size) { + final var linearQuerySelector = new LinearQuerySelector(size); + assertEquals(-1, linearQuerySelector.getCurrentIndex()); + for (int i = 0; i < 10; i++) { + int currentIndex = linearQuerySelector.getNextIndex(); + assertEquals(i % size, currentIndex); + assertEquals(currentIndex, linearQuerySelector.getCurrentIndex()); + } + } + + @Test + public void ThrowOnLinearQuerySelectorSizeZero() { + final var size = 0; + assertThrows(IllegalArgumentException.class, () -> new LinearQuerySelector(size)); + } +} \ No newline at end of file diff --git a/src/test/java/org/aksw/iguana/cc/query/selector/impl/RandomQuerySelectorTest.java b/src/test/java/org/aksw/iguana/cc/query/selector/impl/RandomQuerySelectorTest.java new file mode 100644 index 000000000..2353d983c --- /dev/null +++ b/src/test/java/org/aksw/iguana/cc/query/selector/impl/RandomQuerySelectorTest.java @@ -0,0 +1,39 @@ +package org.aksw.iguana.cc.query.selector.impl; + +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.ValueSource; + +import static org.junit.jupiter.api.Assertions.*; + +public class RandomQuerySelectorTest { + + @Test + public void testGetIndex() { + final var selector = new RandomQuerySelector(100, 10); + for (int i = 0; i < 10000; i++) { + int currentIndex = selector.getNextIndex(); + assertTrue(0 <= currentIndex && currentIndex < 100); + assertEquals(currentIndex, selector.getCurrentIndex()); + } + } + + @ParameterizedTest + @ValueSource(ints = {-1, 0}) + public void testThrowingOnIllegalSize(int size) { + assertThrows(IllegalArgumentException.class, () -> new RandomQuerySelector(-1, 0)); + assertThrows(IllegalArgumentException.class, () -> new RandomQuerySelector(0, 0)); + } + + @ParameterizedTest + @ValueSource(ints = {1, 2, 3, 100000}) + public void testSeedConsistency(int size) { + final var selector = new RandomQuerySelector(size, 0); + final var selector2 = new RandomQuerySelector(size, 0); + for (int i = 0; i < 100000; i++) { + final var nextIndex = selector.getNextIndex(); + final var nextIndex2 = selector2.getNextIndex(); + assert nextIndex == nextIndex2; + } + } +} diff --git a/src/test/java/org/aksw/iguana/cc/query/source/impl/FileLineQuerySourceTest.java b/src/test/java/org/aksw/iguana/cc/query/source/impl/FileLineQuerySourceTest.java new file mode 100644 index 000000000..521ec2df4 --- /dev/null +++ b/src/test/java/org/aksw/iguana/cc/query/source/impl/FileLineQuerySourceTest.java @@ -0,0 +1,114 @@ +package org.aksw.iguana.cc.query.source.impl; + +import org.apache.commons.io.FileUtils; +import org.apache.commons.text.StringEscapeUtils; +import org.junit.jupiter.api.*; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.List; +import java.util.function.Function; +import java.util.stream.IntStream; + +import static org.junit.jupiter.api.Assertions.*; + + +public class FileLineQuerySourceTest { + private record SourceConfig(int size, String lineEnding, boolean overshoot) { + @Override + public String toString() { + return "{ size: " + size + ", overshoot: " + overshoot + ", line_ending: " + StringEscapeUtils.escapeJava(lineEnding) + " }"; + } + } + + private final static Function queryTemplate = (i) -> "Query " + i + " {still query " + i + "}"; + + private static Path directory; + private static List cachedArguments = null; + + private static String createFileContent(int size, String lineEnding, boolean overshoot) { + final var stringBuilder = new StringBuilder(); + int limit = overshoot ? size : size - 1; + for (int i = 0; i < limit; i++) { + stringBuilder.append(queryTemplate.apply(i)).append(lineEnding); + } + if (!overshoot) { + stringBuilder.append(queryTemplate.apply(size - 1)); + } + return stringBuilder.toString(); + } + + public static List createTestSource() throws IOException { + if (cachedArguments != null) { + return cachedArguments; + } + List output = new ArrayList<>(); + int[] sizes = { 1, 1000 }; + String[] lineEndings = { "\n", "\r\n", "\r" }; + boolean[] overshoots = { false, true }; + for (int size : sizes) { + for (String lineEnding : lineEndings) { + for (boolean overshoot : overshoots) { + final var fileContent = createFileContent(size, lineEnding, overshoot); + final var filePath = Files.createTempFile(directory, "Query", ".txt"); + Files.writeString(filePath, fileContent); + final var querySource = new FileLineQuerySource(filePath); + output.add(Arguments.of(querySource, new SourceConfig(size, lineEnding, overshoot))); + } + } + } + cachedArguments = output; + return output; + } + + @BeforeAll + public static void createTempDirectory() throws IOException { + directory = Files.createTempDirectory("iguana-file-line-query-source-test-dir"); + } + + @AfterAll + public static void deleteTempDirectory() throws IOException { + FileUtils.deleteDirectory(directory.toFile()); + } + + @Test + public void testInitialization() throws IOException { + assertThrows(IllegalArgumentException.class, () -> new FileLineQuerySource(null)); + assertDoesNotThrow(() -> new FileLineQuerySource(Files.createTempFile(directory, "Query", ".txt"))); + final var notEmptyFile = Files.createTempFile(directory, "Query", ".txt"); + Files.writeString(notEmptyFile, "not empty"); + assertDoesNotThrow(() -> new FileLineQuerySource(notEmptyFile)); + } + + @ParameterizedTest(name = "[{index}] config = {1}") + @MethodSource("createTestSource") + public void sizeTest(FileLineQuerySource querySource, SourceConfig config) throws IOException { + assertEquals(config.size, querySource.size()); + } + + @ParameterizedTest(name = "[{index}] config = {1}") + @MethodSource("createTestSource") + public void getQueryTest(FileLineQuerySource querySource, SourceConfig config) throws IOException { + for (int i = 0; i < config.size; i++) { + assertEquals(queryTemplate.apply(i), querySource.getQuery(i)); + } + } + + @ParameterizedTest(name = "[{index}] config = {1}") + @MethodSource("createTestSource") + public void getAllQueriesTest(FileLineQuerySource querySource, SourceConfig config) throws IOException { + List expected = IntStream.range(0, config.size).mapToObj(i -> queryTemplate.apply(i)).toList(); + assertEquals(expected, querySource.getAllQueries()); + } + + @ParameterizedTest(name = "[{index}] config = {1}") + @MethodSource("createTestSource") + public void getHashcodeTest(FileLineQuerySource querySource, SourceConfig config) { + assertTrue(querySource.hashCode() != 0); + } +} \ No newline at end of file diff --git a/src/test/java/org/aksw/iguana/cc/query/source/impl/FileSeparatorQuerySourceTest.java b/src/test/java/org/aksw/iguana/cc/query/source/impl/FileSeparatorQuerySourceTest.java new file mode 100644 index 000000000..dca076593 --- /dev/null +++ b/src/test/java/org/aksw/iguana/cc/query/source/impl/FileSeparatorQuerySourceTest.java @@ -0,0 +1,129 @@ +package org.aksw.iguana.cc.query.source.impl; + +import org.apache.commons.io.FileUtils; +import org.apache.commons.text.StringEscapeUtils; +import org.junit.jupiter.api.*; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.List; +import java.util.function.BiFunction; +import java.util.stream.IntStream; + +import static org.junit.jupiter.api.Assertions.*; + + +public class FileSeparatorQuerySourceTest { + private record SourceConfig(int size, String lineEnding, boolean overshoot, String separator) { + @Override + public String toString() { + return "{ size: " + size + + ", overshoot: " + overshoot + + ", line_ending: " + StringEscapeUtils.escapeJava(lineEnding) + + ", separator: " + StringEscapeUtils.escapeJava(separator) + " }"; + } + } + + private final static BiFunction queryTemplate = (i, le) -> "Query " + i + " {" + le + "still query " + i + le + "}"; + + private static Path directory; + private static List cachedArguments = null; + + private static String createFileContent(int size, String lineEnding, boolean overshoot, String separator) { + final var stringBuilder = new StringBuilder(); + int limit = overshoot ? size : size - 1; + for (int i = 0; i < limit; i++) { + stringBuilder.append(queryTemplate.apply(i, lineEnding)).append(separator); + } + if (!overshoot) { + stringBuilder.append(queryTemplate.apply(size - 1, lineEnding)); + } + return stringBuilder.toString(); + } + + public static List createTestSource() throws IOException { + if (cachedArguments != null) { + return cachedArguments; + } + List output = new ArrayList<>(); + int[] sizes = { 1, 1000 }; + String[] lineEndings = { "\n", "\r\n", "\r" }; + boolean[] overshoots = { false, true }; // tests if there is no empty query at the end + String[] separators = { "\n\t\t", "\n###\n", "###", ""}; + for (int size : sizes) { + for (String lineEnding : lineEndings) { + for (boolean overshoot : overshoots) { + for (String separator : separators) { + String fileContent; + if (separator.isEmpty()) + fileContent = createFileContent(size, lineEnding, overshoot, lineEnding + lineEnding); // make empty lines + else + fileContent = createFileContent(size, lineEnding, overshoot, separator); + final var filePath = Files.createTempFile(directory, "Query", ".txt"); + Files.writeString(filePath, fileContent); + FileSeparatorQuerySource querySource; + if (separator.equals("###")) + querySource = new FileSeparatorQuerySource(filePath); // test default separator + else + querySource = new FileSeparatorQuerySource(filePath, separator); + output.add(Arguments.of(querySource, new SourceConfig(size, lineEnding, overshoot, separator))); + } + } + } + } + cachedArguments = output; + return output; + } + + @BeforeAll + public static void createTempDirectory() throws IOException { + directory = Files.createTempDirectory("iguana-file-line-query-source-test-dir"); + } + + @AfterAll + public static void deleteTempDirectory() throws IOException { + FileUtils.deleteDirectory(directory.toFile()); + } + + @Test + public void testInitialization() throws IOException { + assertThrows(IllegalArgumentException.class, () -> new FileSeparatorQuerySource(null)); + assertDoesNotThrow(() -> new FileSeparatorQuerySource(Files.createTempFile(directory, "Query", ".txt"), "###")); + final var notEmptyFile = Files.createTempFile(directory, "Query", ".txt"); + Files.writeString(notEmptyFile, "not empty"); + assertDoesNotThrow(() -> new FileSeparatorQuerySource(notEmptyFile)); + assertDoesNotThrow(() -> new FileSeparatorQuerySource(notEmptyFile, "\n\n\n")); + } + + @ParameterizedTest(name = "[{index}] config = {1}") + @MethodSource("createTestSource") + public void sizeTest(FileSeparatorQuerySource querySource, SourceConfig config) throws IOException { + assertEquals(config.size, querySource.size()); + } + + @ParameterizedTest(name = "[{index}] config = {1}") + @MethodSource("createTestSource") + public void getQueryTest(FileSeparatorQuerySource querySource, SourceConfig config) throws IOException { + for (int i = 0; i < config.size; i++) { + assertEquals(queryTemplate.apply(i, config.lineEnding), querySource.getQuery(i)); + } + } + + @ParameterizedTest(name = "[{index}] config = {1}") + @MethodSource("createTestSource") + public void getAllQueriesTest(FileSeparatorQuerySource querySource, SourceConfig config) throws IOException { + List expected = IntStream.range(0, config.size).mapToObj(i -> queryTemplate.apply(i, config.lineEnding)).toList(); + assertEquals(expected, querySource.getAllQueries()); + } + + @ParameterizedTest(name = "[{index}] config = {1}") + @MethodSource("createTestSource") + public void getHashcodeTest(FileSeparatorQuerySource querySource, SourceConfig config) { + assertTrue(querySource.hashCode() != 0); + } +} \ No newline at end of file diff --git a/src/test/java/org/aksw/iguana/cc/query/source/impl/FolderQuerySourceTest.java b/src/test/java/org/aksw/iguana/cc/query/source/impl/FolderQuerySourceTest.java new file mode 100644 index 000000000..811cd26c5 --- /dev/null +++ b/src/test/java/org/aksw/iguana/cc/query/source/impl/FolderQuerySourceTest.java @@ -0,0 +1,72 @@ +package org.aksw.iguana.cc.query.source.impl; + +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.*; +import java.util.stream.Collectors; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +public class FolderQuerySourceTest { + static Path tempDir; + + public record Query(Path queryFile, String content) implements Comparable { + + @Override + public int compareTo(Query other) { + return this.queryFile.compareTo(other.queryFile); + } + } + + public static List data() throws IOException { + final var sizes = List.of(1, 2, 10, 100, 1000); + final var out = new ArrayList(); + for (int size : sizes) { + final var queries = new LinkedList(); + final var queryDir = Files.createTempDirectory(tempDir, "query-dir"); + for (int i = 0; i < size; i++) { + final Path queryFile = Files.createTempFile(queryDir, "Query", ".txt"); + final String content = UUID.randomUUID().toString(); + Files.write(queryFile, content.getBytes(StandardCharsets.UTF_8)); + queries.add(new Query(queryFile, content)); + } + // Queries in the folder are expected in alphabetic order of the file names. + Collections.sort(queries); + out.add(Arguments.of(queryDir, queries)); + } + return out; + } + + @BeforeAll + public static void createFolder() throws IOException { + tempDir = Files.createTempDirectory("iguana-folder-query-source-test-dir"); + } + + + @AfterAll + public static void removeFolder() throws IOException { + org.apache.commons.io.FileUtils.deleteDirectory(tempDir.toFile()); + } + + @ParameterizedTest + @MethodSource("data") + public void testFolderQuerySource(Path tempDir, List expectedQueries) throws IOException { + FolderQuerySource querySource = new FolderQuerySource(tempDir); + + assertEquals(expectedQueries.size(), querySource.size()); + + for (int i = 0; i < querySource.size(); i++) { + assertEquals(expectedQueries.get(i).content, querySource.getQuery(i)); + } + + assertEquals(expectedQueries.stream().map(q -> q.content).collect(Collectors.toList()), querySource.getAllQueries()); + } +} \ No newline at end of file diff --git a/src/test/java/org/aksw/iguana/cc/query/source/impl/QuerySourceTest.java b/src/test/java/org/aksw/iguana/cc/query/source/impl/QuerySourceTest.java new file mode 100644 index 000000000..b5baefe8c --- /dev/null +++ b/src/test/java/org/aksw/iguana/cc/query/source/impl/QuerySourceTest.java @@ -0,0 +1,17 @@ +package org.aksw.iguana.cc.query.source.impl; + +import org.junit.jupiter.api.Test; + +import java.nio.file.Path; + +import static org.junit.jupiter.api.Assertions.assertThrows; + +public class QuerySourceTest { + + @Test + public void testIllegalArguments() { + assertThrows(IllegalArgumentException.class, () -> new FileLineQuerySource(null)); + assertThrows(IllegalArgumentException.class, () -> new FileSeparatorQuerySource(null, "\n")); + assertThrows(IllegalArgumentException.class, () -> new FolderQuerySource(null)); + } +} diff --git a/src/test/java/org/aksw/iguana/cc/storage/impl/CSVStorageTest.java b/src/test/java/org/aksw/iguana/cc/storage/impl/CSVStorageTest.java new file mode 100644 index 000000000..a77333377 --- /dev/null +++ b/src/test/java/org/aksw/iguana/cc/storage/impl/CSVStorageTest.java @@ -0,0 +1,141 @@ +package org.aksw.iguana.cc.storage.impl; + +import com.opencsv.CSVReader; +import com.opencsv.exceptions.CsvException; +import org.aksw.iguana.cc.mockup.MockupQueryHandler; +import org.aksw.iguana.cc.mockup.MockupWorker; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + +import java.io.FileReader; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.stream.Collectors; + +import static org.junit.jupiter.api.Assertions.*; + +public class CSVStorageTest extends StorageTest { + private static final String EXPECTED_FILES_DIR = "src/test/resources/test-data/csv-storage-test/"; + + public static List data() { + resetDate(); + final var workersTask1 = List.of( + MockupWorker.createWorkers(0, 2, new MockupQueryHandler(0, 10), "test-connection-1", "v1.0.0", "test-dataset-1"), + MockupWorker.createWorkers(2, 2, new MockupQueryHandler(1, 10), "test-connection-2", "v1.1.0", "test-dataset-2") + ); + + final var workersTask2 = List.of( + MockupWorker.createWorkers(0, 2, new MockupQueryHandler(2, 5), "test-connection-3", "v1.2.0", "test-dataset-3"), + MockupWorker.createWorkers(2, 2, new MockupQueryHandler(3, 5), "test-connection-4", "v1.3.0", "test-dataset-4") + ); + + return List.of(Arguments.of(List.of(createTaskResult(workersTask1, 0, "123"), createTaskResult(workersTask2, 1, "123")))); + } + + @ParameterizedTest + @MethodSource("data") + protected void testCSVStorage(List results) throws IOException { + final var storage = new CSVStorage(tempDir.toString(), getMetrics(), "123"); + for (var result : results) + storage.storeResult(result.resultModel()); + + final var expectedFiles = Path.of(EXPECTED_FILES_DIR); + final var actualFiles = tempDir; + + try (var files = Files.list(expectedFiles)) { + files.forEach( + x -> { + try { + compareFile(x, actualFiles.resolve(x.getFileName())); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + ); + } + + storage.storeData(new TestStorable()); + final var path = tempDir.resolve("suite-123").resolve("task-1").resolve("csv-folder").toFile(); + assertTrue(path.exists()); + assertTrue(path.isDirectory()); + assertEquals(2, path.listFiles().length); + for (var file : path.listFiles()) { + if (file.getName().equals("csv-file-1.csv")) + assertEquals(2, Files.readAllLines(file.toPath()).size()); + else if (file.getName().equals("csv-file-2.csv")) + assertEquals(3, Files.readAllLines(file.toPath()).size()); + else + throw new RuntimeException("Unexpected file name: " + file.getName()); + } + } + + private void compareFile(Path expected, Path actual) throws IOException { + if (Files.isDirectory(expected)) { + assertTrue(Files.isDirectory(actual), String.format("Expected directory %s but found file %s", expected, actual)); + assertEquals(expected.getFileName(), actual.getFileName()); + try (var files = Files.list(expected)) { + files.forEach(x -> { + try { + compareFile(x, actual.resolve(x.getFileName())); + } catch (IOException e) { + throw new RuntimeException(e); + } + }); + } + } else if (Files.isRegularFile(expected)) { + assertTrue(Files.isRegularFile(actual), String.format("Expected file %s but found directory %s", expected, actual)); + assertEquals(expected.getFileName(), actual.getFileName()); + compareCSVFiles(expected, actual); + } else { + throw new RuntimeException(String.format("Expected file or directory %s but found nothing", expected)); + } + } + + private void compareCSVFiles(Path expected, Path actual) throws IOException { + try (CSVReader readerExpected = new CSVReader(new FileReader(expected.toFile())); + CSVReader readerActual = new CSVReader(new FileReader(actual.toFile()))) { + + String[] headerExpected; + String[] headerActual; + try { + headerExpected = readerExpected.readNext(); + headerActual = readerActual.readNext(); + } catch (CsvException e) { + throw new RuntimeException(String.format("CSV format in the header of file %s is malformed.", expected), e); + } + + assertEquals(headerExpected.length, headerActual.length, String.format("Headers don't match. Actual: %s, Expected: %s", Arrays.toString(headerActual), Arrays.toString(headerExpected))); + for (int i = 0; i < headerExpected.length; i++) { + assertEquals(headerExpected[i], headerActual[i], String.format("Headers don't match. Actual: %s, Expected: %s", Arrays.toString(headerActual), Arrays.toString(headerExpected))); + } + + List expectedValues; + List actualValues; + + try { + expectedValues = new ArrayList<>(readerExpected.readAll()); + actualValues = new ArrayList<>(readerActual.readAll()); + } catch (CsvException e) { + throw new RuntimeException(String.format("CSV format in file %s is malformed.", expected), e); + } + + for (String[] expectedLine : expectedValues) { + List sameLines = actualValues.stream().filter(x -> { + for (int i = 0; i < expectedLine.length; i++) { + if (!expectedLine[i].equals(x[i])) return false; + } + return true; + }).toList(); + + assertFalse(sameLines.isEmpty(), String.format("Line (%s) not found in actual file", Arrays.toString(expectedLine))); + actualValues.remove(sameLines.get(0)); + } + assertTrue(actualValues.isEmpty(), String.format("Actual file contains more lines than expected. Lines: %s", actualValues.stream().map(x -> "[" + String.join(", ", x) + "]").collect(Collectors.joining("\n")))); + } + } +} diff --git a/src/test/java/org/aksw/iguana/cc/storage/impl/RDFFileStorageTest.java b/src/test/java/org/aksw/iguana/cc/storage/impl/RDFFileStorageTest.java new file mode 100644 index 000000000..e251b55b0 --- /dev/null +++ b/src/test/java/org/aksw/iguana/cc/storage/impl/RDFFileStorageTest.java @@ -0,0 +1,76 @@ +package org.aksw.iguana.cc.storage.impl; + +import org.aksw.iguana.cc.mockup.MockupQueryHandler; +import org.aksw.iguana.cc.mockup.MockupWorker; +import org.apache.jena.rdf.model.*; +import org.apache.jena.riot.RDFDataMgr; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + +import java.util.ArrayList; +import java.util.List; + +import static org.junit.jupiter.api.Assertions.assertTrue; + +/** + * This Test class extends the StorageTest class and tests the RDFFileStorage class. + */ +public class RDFFileStorageTest extends StorageTest { + public static List data() { + resetDate(); + final var arguments = new ArrayList(); + + final var paths = new ArrayList<>(List.of("rdf-file-storage-test1.ttl", "rdf-file-storage-test1.nt", "rdf-file-storage-test1.nt", "")); + + final var queryHandler1 = new MockupQueryHandler(0, 10); + final var queryHandler2 = new MockupQueryHandler(1, 10); + + final var workers = List.of( + MockupWorker.createWorkers(0, 2, queryHandler1, "test-connection-1", "v1.0.0", "test-dataset-1"), + MockupWorker.createWorkers(2, 2, queryHandler2, "test-connection-1", "v1.0.0", "test-dataset-1") + ); + final var task1 = createTaskResult(workers, 0, "0"); + final var task2 = createTaskResult(workers, 1, "0"); + + // test file creation + for (String path : paths) { + arguments.add(Arguments.of(tempDir.resolve(path).toString(), List.of(task1), task1.resultModel())); + } + + // test file appending + Model concatenatedModel = ModelFactory.createDefaultModel().add(task1.resultModel()).add(task2.resultModel()); + arguments.add(Arguments.of(tempDir.resolve("rdf-file-storage-test2.ttl").toString(), List.of(task1, task2), concatenatedModel)); + return arguments; + } + + @ParameterizedTest + @MethodSource("data") + public void testRDFFileStorage(String path, List results, Model expectedModel) { + final var rdfStorage = new RDFFileStorage(path); + for (var result : results) { + rdfStorage.storeResult(result.resultModel()); + } + path = rdfStorage.getFileName(); + Model actualModel = RDFDataMgr.loadModel(path); + calculateModelDifference(expectedModel, actualModel); + // TODO: This test probably fails, because the expected model uses java's Duration objects for duration literals, + // while the actual model uses XSDDuration objects for duration literals. + // assertTrue(actualModel.isIsomorphicWith(expectedModel)); + } + + private void calculateModelDifference(Model expectedModel, Model actualModel) { + List expectedStmts = new ArrayList<>(); + List actualStmts = new ArrayList<>(); + expectedModel.listStatements().forEach(s -> expectedStmts.add(s.toString())); + actualModel.listStatements().forEach(s -> actualStmts.add(s.toString())); + + for (String stmt : expectedStmts) { + if (!actualStmts.contains(stmt)) { + System.out.println("Expected but not found: " + stmt); + } + actualStmts.remove(stmt); + } + assertTrue(actualStmts.isEmpty()); + } +} diff --git a/src/test/java/org/aksw/iguana/cc/storage/impl/StorageTest.java b/src/test/java/org/aksw/iguana/cc/storage/impl/StorageTest.java new file mode 100644 index 000000000..c38586a4c --- /dev/null +++ b/src/test/java/org/aksw/iguana/cc/storage/impl/StorageTest.java @@ -0,0 +1,123 @@ +package org.aksw.iguana.cc.storage.impl; + +import org.aksw.iguana.cc.lang.LanguageProcessor; +import org.aksw.iguana.cc.metrics.Metric; +import org.aksw.iguana.cc.metrics.impl.*; +import org.aksw.iguana.cc.mockup.MockupWorker; +import org.aksw.iguana.cc.storage.Storable; +import org.aksw.iguana.cc.storage.Storage; +import org.aksw.iguana.cc.mockup.MockupStorage; +import org.aksw.iguana.cc.tasks.impl.StresstestResultProcessor; +import org.aksw.iguana.cc.worker.HttpWorker; +import org.apache.commons.io.FileUtils; +import org.apache.jena.rdf.model.Model; +import org.apache.jena.rdf.model.ModelFactory; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.time.Instant; +import java.time.ZoneId; +import java.time.ZonedDateTime; +import java.util.*; +import java.util.function.Supplier; + + +public abstract class StorageTest { + @BeforeAll + public static void createFolder() throws IOException { + tempDir = Files.createTempDirectory("iguana-storage-test-dir"); + } + + @AfterAll + public static void deleteFolder() throws IOException { + FileUtils.deleteDirectory(tempDir.toFile()); + } + + public static class TestStorable implements Storable.AsCSV, Storable.AsRDF { + + @Override + public Storable.CSVData toCSV() { + final var data = new Storable.CSVData("csv-folder", List.of( + new Storable.CSVData.CSVFileData("csv-file-1", new String[][]{{"header-1", "header-2"}, {"randomString", "100"}}), + new Storable.CSVData.CSVFileData("csv-file-2", new String[][]{{"header-1", "header-2"}, {"randomString-2", "200"}, {"randomString-3", "300"}}) + )); + return data; + } + + @Override + public Model toRDF() { + Model m = ModelFactory.createDefaultModel(); + m.add(m.createResource("http://example.org/subject"), m.createProperty("http://example.org/predicate"), m.createResource("http://example.org/object")); + return m; + } + } + + /** + * This method resets the date to a fixed date. + * This is necessary to ensure that the tests are deterministic. + * The method needs to be called manually before retrieving the test data. + */ + public static void resetDate() { + someDateTime = GregorianCalendar.from(ZonedDateTime.ofInstant(Instant.parse("2023-10-21T20:48:06.399Z"), ZoneId.of("Europe/Berlin"))); + } + + public record TaskResult(Model resultModel, List workerResults) {} + + protected static Path tempDir; + + private static Calendar someDateTime = GregorianCalendar.from(ZonedDateTime.ofInstant(Instant.parse("2023-10-21T20:48:06.399Z"), ZoneId.of("Europe/Berlin"))); + + private static Calendar getDateTime() { + someDateTime.add(Calendar.MINUTE, 1); + someDateTime.add(Calendar.SECOND, 18); + return someDateTime; + } + + public static List getMetrics() { + return List.of( + new AggregatedExecutionStatistics(), + new AvgQPS(), + new EachExecutionStatistic(), + new NoQ(), + new NoQPH(), + new PAvgQPS(1000), + new PQPS(1000), + new QMPH(), + new QPS() + ); + } + + // Argument is a List that contains lists of workers with the same configuration. + protected static TaskResult createTaskResult(List> workers, int taskID, String suiteID) { + final var queryIDs = new ArrayList(); + for (var list : workers) { + queryIDs.addAll(List.of(list.get(0).config().queries().getAllQueryIds())); + } + + final var metrics = getMetrics(); + final var storages = new ArrayList(); + final Supplier>> supplier = HashMap::new; + + final var ls = new MockupStorage(); + storages.add(ls); + + final var flatWorkerList = workers.stream().flatMap(Collection::stream).toList(); + + final var srp = new StresstestResultProcessor(suiteID, taskID, flatWorkerList, queryIDs, metrics, storages, supplier); + + final var workerResults = new ArrayList(); + for (var list : workers) { + workerResults.addAll(MockupWorker.createWorkerResults(list.get(0).config().queries(), list)); + } + + srp.process(workerResults); + Calendar startTime = (Calendar) getDateTime().clone(); + srp.calculateAndSaveMetrics(startTime, getDateTime()); + + return new TaskResult(ls.getResultModel(), workerResults); + } + +} diff --git a/src/test/java/org/aksw/iguana/cc/storage/impl/TriplestoreStorageTest.java b/src/test/java/org/aksw/iguana/cc/storage/impl/TriplestoreStorageTest.java new file mode 100644 index 000000000..7dc0694d3 --- /dev/null +++ b/src/test/java/org/aksw/iguana/cc/storage/impl/TriplestoreStorageTest.java @@ -0,0 +1,73 @@ +package org.aksw.iguana.cc.storage.impl; + +import com.github.tomakehurst.wiremock.common.Slf4jNotifier; +import com.github.tomakehurst.wiremock.core.Options; +import com.github.tomakehurst.wiremock.core.WireMockConfiguration; +import com.github.tomakehurst.wiremock.junit5.WireMockExtension; +import com.github.tomakehurst.wiremock.stubbing.ServeEvent; +import org.aksw.iguana.cc.mockup.MockupQueryHandler; +import org.aksw.iguana.cc.mockup.MockupWorker; +import org.aksw.iguana.cc.worker.HttpWorker; +import org.apache.jena.riot.RDFDataMgr; +import org.apache.jena.update.UpdateFactory; +import org.apache.jena.update.UpdateRequest; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.condition.DisabledInNativeImage; +import org.junit.jupiter.api.extension.RegisterExtension; + +import java.io.StringWriter; +import java.net.URI; +import java.net.URISyntaxException; +import java.time.Duration; +import java.util.List; +import java.util.UUID; + +import static com.github.tomakehurst.wiremock.client.WireMock.*; +import static org.junit.jupiter.api.Assertions.assertTrue; + +@DisabledInNativeImage // WireMock is not supported in native image +public class TriplestoreStorageTest extends StorageTest { + + @RegisterExtension + public static WireMockExtension wm = WireMockExtension.newInstance() + .options(new WireMockConfiguration().useChunkedTransferEncoding(Options.ChunkedEncodingPolicy.NEVER).dynamicPort().notifier(new Slf4jNotifier(true))) + .failOnUnmatchedRequests(true) + .build(); + + @Test + public void dataTest() throws URISyntaxException { + resetDate(); + final var uuid = UUID.randomUUID(); + wm.stubFor(post(urlEqualTo("/ds/sparql")) + .willReturn(aResponse() + .withStatus(200))) + .setId(uuid); + + final List> worker = List.of(List.of( + new MockupWorker( + 0, + new MockupQueryHandler(1, 2), + "conneciton", + "v2", + "wikidata", + Duration.ofSeconds(2)) + )); + final var testData = createTaskResult(worker, 0, "1"); + + final var uri = new URI("http://localhost:" + wm.getPort() + "/ds/sparql"); + final var storage = new TriplestoreStorage(String.valueOf(uri)); + storage.storeResult(testData.resultModel()); + + List allServeEvents = wm.getAllServeEvents(); + ServeEvent request = allServeEvents.get(0); + String body = request.getRequest().getBodyAsString(); + + StringWriter nt = new StringWriter(); + RDFDataMgr.write(nt, testData.resultModel(), org.apache.jena.riot.Lang.NTRIPLES); + + UpdateRequest updateRequestActual = UpdateFactory.create(body); + UpdateRequest updateRequestExpected = UpdateFactory.create().add("INSERT DATA { " + nt + " }"); + + assertTrue(updateRequestExpected.iterator().next().equalTo(updateRequestActual.iterator().next(), null)); + } +} diff --git a/src/test/java/org/aksw/iguana/cc/suite/IguanaSuiteParserTest.java b/src/test/java/org/aksw/iguana/cc/suite/IguanaSuiteParserTest.java new file mode 100644 index 000000000..9ab39b009 --- /dev/null +++ b/src/test/java/org/aksw/iguana/cc/suite/IguanaSuiteParserTest.java @@ -0,0 +1,36 @@ +package org.aksw.iguana.cc.suite; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.stream.Stream; + +class IguanaSuiteParserTest { + + public static Stream validData() throws IOException { + final var dir = Path.of("./src/test/resources/suite-configs/valid/"); + return Files.list(dir).map(Arguments::of); + } + + public static Stream invalidData() throws IOException { + final var dir = Path.of("./src/test/resources/suite-configs/invalid/"); + return Files.list(dir).map(Arguments::of); + } + + @ParameterizedTest + @MethodSource("validData") + public void testValidDeserialization(Path config) throws IOException { + Assertions.assertTrue(IguanaSuiteParser.validateConfig(config)); + } + + @ParameterizedTest + @MethodSource("invalidData") + public void testInvalidDeserialization(Path config) throws IOException { + Assertions.assertFalse(IguanaSuiteParser.validateConfig(config)); + } +} \ No newline at end of file diff --git a/src/test/java/org/aksw/iguana/cc/utils/FileUtilsTest.java b/src/test/java/org/aksw/iguana/cc/utils/FileUtilsTest.java new file mode 100644 index 000000000..b18d52704 --- /dev/null +++ b/src/test/java/org/aksw/iguana/cc/utils/FileUtilsTest.java @@ -0,0 +1,86 @@ +package org.aksw.iguana.cc.utils; + +import org.aksw.iguana.cc.utils.files.FileUtils; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.MethodSource; +import org.junit.jupiter.params.provider.ValueSource; + +import java.io.*; +import java.nio.charset.StandardCharsets; +import java.nio.file.Path; +import java.util.Arrays; +import java.util.Collection; +import java.util.List; +import java.util.UUID; + +import static java.nio.file.Files.createTempFile; +import static org.apache.commons.io.FileUtils.writeStringToFile; +import static org.junit.jupiter.api.Assertions.*; + +public class FileUtilsTest { + public static Path createTestFileWithLines(List content, String lineEnding) throws IOException { + final var file = createTempFile("getHashTest", ".txt"); + for (String s : content) { + writeStringToFile(file.toFile(), s + lineEnding, StandardCharsets.UTF_8, true); + } + file.toFile().deleteOnExit(); + return file; + } + + public static Path createTestFileWithContent(String content) throws IOException { + final var file = createTempFile("getHashTest", ".txt"); + writeStringToFile(file.toFile(), content, StandardCharsets.UTF_8, false); + file.toFile().deleteOnExit(); + return file; + } + + @ParameterizedTest + @ValueSource(strings = {"\n", "\r", "\r\n"}) + public void testGetLineEndings(String ending) throws IOException { + final var file = createTestFileWithLines(List.of("a", "b"), ending); + assertEquals(FileUtils.getLineEnding(file), ending); + } + + public record IndexTestData( + String content, // String to be separated + String separator, + List indices // List of [offset, length] arrays + ) {} + + public static Collection data() { + return List.of( + new IndexTestData("", "a", Arrays.asList(new long[]{0, 0})), + new IndexTestData("a", "a", Arrays.asList(new long[]{0, 0}, new long[]{1, 0})), + new IndexTestData("abc", "b", Arrays.asList(new long[]{0, 1}, new long[]{2, 1})), + new IndexTestData("1\n2", "\n", Arrays.asList(new long[]{0, 1}, new long[]{2, 1})), + new IndexTestData("1\t2", "\t", Arrays.asList(new long[]{0, 1}, new long[]{2, 1})), + new IndexTestData("abcbd", "b", Arrays.asList(new long[]{0, 1}, new long[]{2, 1}, new long[]{4, 1})), + new IndexTestData("aab", "ab", Arrays.asList(new long[]{0, 1}, new long[]{3, 0})), + new IndexTestData("aaaabaabaa", "ab", Arrays.asList(new long[]{0, 3}, new long[]{5, 1}, new long[]{8, 2})), + new IndexTestData("1\n\t\n2", "\n\t\n", Arrays.asList(new long[]{0, 1}, new long[]{4, 1})) + ); + } + + @ParameterizedTest + @MethodSource("data") + public void testIndexingStrings(IndexTestData data) throws IOException { + List index = FileUtils.indexStream(data.separator, new ByteArrayInputStream(data.content.getBytes())); + assertEquals(data.indices.size(), index.size()); + for (int i = 0; i < index.size(); i++) { + assertArrayEquals(data.indices.get(i), index.get(i)); + } + } + + @Test + public void getHashTest() throws IOException { + for (int i = 0; i < 10; i++) { + String content = UUID.randomUUID().toString(); + final var file = createTestFileWithContent(content); + final int expected = Math.abs(content.hashCode()); + final int actual = FileUtils.getHashcodeFromFileContent(file); + assertTrue(actual >= 0); + assertEquals(expected, actual); + } + } +} diff --git a/src/test/java/org/aksw/iguana/cc/utils/IndexedQueryReaderTest.java b/src/test/java/org/aksw/iguana/cc/utils/IndexedQueryReaderTest.java new file mode 100644 index 000000000..2bafe0ddf --- /dev/null +++ b/src/test/java/org/aksw/iguana/cc/utils/IndexedQueryReaderTest.java @@ -0,0 +1,170 @@ +package org.aksw.iguana.cc.utils; + +import org.aksw.iguana.cc.utils.files.IndexedQueryReader; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + +import java.io.IOException; +import java.io.StringWriter; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.List; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +public class IndexedQueryReaderTest { + + private static Path tempDir; + + @BeforeAll + public static void createTestFolder() throws IOException { + tempDir = Files.createTempDirectory("iguana-indexed-query-reader-test"); + } + + @AfterAll + public static void removeData() throws IOException { + org.apache.commons.io.FileUtils.deleteDirectory(tempDir.toFile()); + } + + private record TestData ( + Path filepath, + String separator, + List expectedStrings + ) {} + + private static TestData createTestFile(String content, String separator, boolean emptyBegin, boolean leadingEmptyLine, int number, int spacing) throws IOException { + final var file = Files.createTempFile(tempDir, "line", "queries.txt"); + final var writer = new StringWriter(); + final var lines = new ArrayList(); + for (int i = (emptyBegin ? -1 : 0); i < (number * spacing) + 1; i++) { + if (i % spacing == 0) { + writer.append(content + i); + lines.add(content + i); + } + if (leadingEmptyLine || i != number * spacing) { + writer.append(separator); + } + } + Files.writeString(file, writer.toString()); + return new TestData(file, separator, lines); + } + + public static List indexWithLineEndingData() throws IOException { + final var out = new ArrayList(); + + final var numbers = List.of(1, 5, 10); + final var spacings = List.of(1, 2, 5, 10, 100, 1000000); + final var separators = List.of("\n", "\r\n", "\r"); + final var emptyBegins = List.of(true, false); + final var leadingEmptyLines = List.of(true, false); + + // cartesian product + for (var number : numbers) { + for (var spacing : spacings) { + for (var separator : separators) { + for (var emptyBegin : emptyBegins) { + for (var leadingEmptyLine : leadingEmptyLines) { + out.add(Arguments.of(createTestFile("line: ", separator, emptyBegin, leadingEmptyLine, number, spacing))); + } + } + } + } + } + + return out; + } + + public static List indexWithBlankLinesData() throws IOException { + final var out = new ArrayList(); + + final var numbers = List.of(1, 5, 10, 100, 10000); + final var spacings = List.of(2); + final var separators = List.of("\n", "\r\n", "\r"); + final var emptyBegins = List.of(false); + final var leadingEmptyLines = List.of(false); + + // cartesian product + for (var number : numbers) { + for (var spacing : spacings) { + for (var separator : separators) { + for (var emptyBegin : emptyBegins) { + for (var leadingEmptyLine : leadingEmptyLines) { + out.add(Arguments.of(createTestFile(String.format("this is %s line: ", separator), separator, emptyBegin, leadingEmptyLine, number, spacing))); + out.add(Arguments.of(createTestFile("line: ", separator, emptyBegin, leadingEmptyLine, number, spacing))); + out.add(Arguments.of(createTestFile(String.format("make this %s three lines %s long: ", separator, separator), separator, emptyBegin, leadingEmptyLine, number, spacing))); + } + } + } + } + } + + return out; + } + + public static List indexWithCustomSeparatorData() throws IOException { + final var out = new ArrayList(); + + final var numbers = List.of(1, 5, 10, 100, 10000); + final var spacings = List.of(1); + final var separators = List.of("\n", "\r\n", "\r", "\n+++\n", "\t\t\t", "test", "###$"); + final var emptyBegins = List.of(false); + final var leadingEmptyLines = List.of(false); + + // cartesian product + for (var number : numbers) { + for (var spacing : spacings) { + for (var separator : separators) { + for (var emptyBegin : emptyBegins) { + for (var leadingEmptyLine : leadingEmptyLines) { + out.add(Arguments.of(createTestFile("line: ", separator, emptyBegin, leadingEmptyLine, number, spacing))); + } + } + } + } + } + + final var file1 = Files.createTempFile(tempDir, "iguana", "queries.txt"); + final var file2 = Files.createTempFile(tempDir, "iguana", "queries.txt"); + Files.writeString(file1, "a####$b"); + Files.writeString(file2, "a21212111b"); + + out.add(Arguments.of(new TestData(file1, "###$", List.of("a#", "b")))); + out.add(Arguments.of(new TestData(file2, "211", List.of("a2121", "1b")))); + + return out; + } + + @ParameterizedTest + @MethodSource("indexWithLineEndingData") + public void testIndexingWithLineEndings(TestData data) throws IOException { + var reader = IndexedQueryReader.make(data.filepath); + for (int i = 0; i < data.expectedStrings.size(); i++) { + assertEquals(data.expectedStrings.get(i), reader.readQuery(i)); + } + assertEquals(data.expectedStrings.size(), reader.size()); + } + + @ParameterizedTest + @MethodSource("indexWithBlankLinesData") + public void testIndexingWithBlankLines(TestData data) throws IOException { + IndexedQueryReader reader = IndexedQueryReader.makeWithEmptyLines(data.filepath); + for (int i = 0; i < data.expectedStrings.size(); i++) { + assertEquals(data.expectedStrings.get(i), reader.readQuery(i)); + } + assertEquals(data.expectedStrings.size(), reader.size()); + } + + @ParameterizedTest + @MethodSource("indexWithCustomSeparatorData") + public void testIndexingWithCustomSeparator(TestData data) throws IOException { + IndexedQueryReader reader = IndexedQueryReader.makeWithStringSeparator(data.filepath, data.separator); + for (int i = 0; i < data.expectedStrings.size(); i++) { + assertEquals(data.expectedStrings.get(i), reader.readQuery(i)); + } + assertEquals(data.expectedStrings.size(), reader.size()); + } +} diff --git a/src/test/java/org/aksw/iguana/cc/worker/impl/SPARQLProtocolWorkerTest.java b/src/test/java/org/aksw/iguana/cc/worker/impl/SPARQLProtocolWorkerTest.java new file mode 100644 index 000000000..f7955b947 --- /dev/null +++ b/src/test/java/org/aksw/iguana/cc/worker/impl/SPARQLProtocolWorkerTest.java @@ -0,0 +1,311 @@ +package org.aksw.iguana.cc.worker.impl; + +import com.github.tomakehurst.wiremock.client.MappingBuilder; +import com.github.tomakehurst.wiremock.common.ConsoleNotifier; +import com.github.tomakehurst.wiremock.core.Options; +import com.github.tomakehurst.wiremock.core.WireMockConfiguration; +import com.github.tomakehurst.wiremock.http.Fault; +import com.github.tomakehurst.wiremock.junit5.WireMockExtension; +import org.aksw.iguana.cc.config.elements.ConnectionConfig; +import org.aksw.iguana.cc.config.elements.DatasetConfig; +import org.aksw.iguana.cc.query.handler.QueryHandler; +import org.aksw.iguana.cc.utils.http.RequestFactory; +import org.aksw.iguana.cc.worker.HttpWorker; +import org.aksw.iguana.cc.worker.ResponseBodyProcessor; +import org.junit.jupiter.api.*; +import org.junit.jupiter.api.condition.DisabledInNativeImage; +import org.junit.jupiter.api.extension.RegisterExtension; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.EnumSource; +import org.junit.jupiter.params.provider.MethodSource; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.net.URI; +import java.net.URISyntaxException; +import java.net.URLEncoder; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.nio.file.Path; +import java.time.Duration; +import java.time.temporal.ChronoUnit; +import java.util.ArrayDeque; +import java.util.ArrayList; +import java.util.List; +import java.util.function.BiFunction; +import java.util.function.Function; +import java.util.stream.Stream; + +import static com.github.tomakehurst.wiremock.client.WireMock.*; +import static org.junit.jupiter.api.Assertions.*; + +@DisabledInNativeImage // WireMock is not supported in native image +public class SPARQLProtocolWorkerTest { + + @RegisterExtension + public static WireMockExtension wm = WireMockExtension.newInstance() + .options(new WireMockConfiguration() + .useChunkedTransferEncoding(Options.ChunkedEncodingPolicy.NEVER) + .dynamicPort() + .notifier(new ConsoleNotifier(false)) + .jettyIdleTimeout(2000L) + .jettyStopTimeout(2000L) + .timeout(2000)) + .failOnUnmatchedRequests(true) + .build(); + + private final static String QUERY = "SELECT * WHERE { ?s ?p ?o }"; + private final static int QUERY_MIXES = 1; + private static Path queryFile; + + private static final Logger LOGGER = LoggerFactory.getLogger(SPARQLProtocolWorker.class); + + @BeforeAll + public static void setup() throws IOException { + queryFile = Files.createTempFile("iguana-test-queries", ".tmp"); + Files.writeString(queryFile, QUERY, StandardCharsets.UTF_8); + } + + @BeforeEach + public void reset() { + SPARQLProtocolWorker.initHttpClient(1); + wm.resetMappings(); // reset stubbing maps after each test + } + + @AfterAll + public static void cleanup() throws IOException { + Files.deleteIfExists(queryFile); + SPARQLProtocolWorker.closeHttpClient(); + } + + @AfterEach + public void verify() { + wm.resetAll(); + SPARQLProtocolWorker.closeHttpClient(); + } + + public static Stream requestFactoryData() throws URISyntaxException { + final var uri = new URI("http://localhost:" + wm.getPort() + "/ds/query"); + + final var processor = new ResponseBodyProcessor("application/sparql-results+json"); + final var format = QueryHandler.Config.Format.SEPARATOR; + Function queryHandlderSupplier = (cached) -> { + try { + return new QueryHandler(new QueryHandler.Config(queryFile.toAbsolutePath().toString(), format, null, cached, QueryHandler.Config.Order.LINEAR, 0L, QueryHandler.Config.Language.SPARQL)); + } catch (IOException e) { + throw new RuntimeException(e); + } + }; + final var datasetConfig = new DatasetConfig("TestDS", null); + final var connection = new ConnectionConfig("TestConn", "1", datasetConfig, uri, new ConnectionConfig.Authentication("testUser", "password"), null, null); + final var workers = new ArrayDeque(); + int i = 0; + for (var requestType : RequestFactory.RequestType.values()) { + for (var cached : List.of(true, false)) { + final var config = new SPARQLProtocolWorker.Config( + 1, + queryHandlderSupplier.apply(cached), + new HttpWorker.QueryMixes(QUERY_MIXES), + connection, + Duration.parse("PT6S"), + "application/sparql-results+json", + requestType, + true + ); + workers.add(Arguments.of(Named.of(requestType.name(), new SPARQLProtocolWorker(i++, processor, config)), Named.of(String.valueOf(cached), cached))); + } + } + return workers.stream(); + } + + public static List completionTargets() { + final var out = new ArrayList(); + final var queryMixesAmount = List.of(1, 2, 5, 10, 100, 200); + final var timeDurations = List.of(Duration.of(1, ChronoUnit.SECONDS), Duration.of(5, ChronoUnit.SECONDS)); + + for (var queryMixes : queryMixesAmount) { + out.add(Arguments.of(new HttpWorker.QueryMixes(queryMixes))); + } + + for (var duration : timeDurations) { + out.add(Arguments.of(new HttpWorker.TimeLimit(duration))); + } + + return out; + } + + @ParameterizedTest(name = "[{index}] requestType = {0}, cached = {1}") + @MethodSource("requestFactoryData") + @DisplayName("Test Request Factory") + public void testRequestFactory(SPARQLProtocolWorker worker, boolean cached) { + BiFunction encoding = (builder, size) -> { + if (!cached) { + return builder.withHeader("Transfer-Encoding", equalTo("chunked")); + } else { + return builder.withHeader("Content-Length", equalTo(String.valueOf(size))); + } + }; + + MappingBuilder temp; + switch (worker.config().requestType()) { + case GET_QUERY -> + wm.stubFor(get(urlPathEqualTo("/ds/query")) + .withQueryParam("query", equalTo(QUERY)) + .withBasicAuth("testUser", "password") + .willReturn(aResponse().withStatus(200).withBody("Non-Empty-Body"))); + case POST_QUERY -> { + temp = post(urlPathEqualTo("/ds/query")) + .withHeader("Content-Type", equalTo("application/sparql-query")) + .withBasicAuth("testUser", "password") + .withRequestBody(equalTo(QUERY)) + .willReturn(aResponse().withStatus(200).withBody("Non-Empty-Body")); + encoding.apply(temp, QUERY.length()); + wm.stubFor(temp); + } + case POST_UPDATE -> { + temp = post(urlPathEqualTo("/ds/query")) + .withHeader("Content-Type", equalTo("application/sparql-update")) + .withBasicAuth("testUser", "password") + .withRequestBody(equalTo(QUERY)) + .willReturn(aResponse().withStatus(200).withBody("Non-Empty-Body")); + encoding.apply(temp, QUERY.length()); + wm.stubFor(temp); + } + + case POST_URL_ENC_QUERY -> { + temp = post(urlPathEqualTo("/ds/query")) + .withHeader("Content-Type", equalTo("application/x-www-form-urlencoded")) + .withBasicAuth("testUser", "password") + .withRequestBody(equalTo("query=" + URLEncoder.encode(QUERY, StandardCharsets.UTF_8))) + .willReturn(aResponse().withStatus(200).withBody("Non-Empty-Body")); + encoding.apply(temp, 43); + wm.stubFor(temp); + } + case POST_URL_ENC_UPDATE -> { + temp = post(urlPathEqualTo("/ds/query")) + .withHeader("Content-Type", equalTo("application/x-www-form-urlencoded")) + .withBasicAuth("testUser", "password") + .withRequestBody(equalTo("update=" + URLEncoder.encode(QUERY, StandardCharsets.UTF_8))) + .willReturn(aResponse().withStatus(200).withBody("Non-Empty-Body")); + encoding.apply(temp, 44); + wm.stubFor(temp); + } + } + + final HttpWorker.Result result = worker.start().join(); + + assertEquals(result.executionStats().size(), QUERY_MIXES, "Worker should have executed only 1 query"); + assertNull(result.executionStats().get(0).error().orElse(null), "Worker threw an exception, during execution"); + assertEquals(200, result.executionStats().get(0).httpStatusCode().get(), "Worker returned wrong status code"); + assertNotEquals(0, result.executionStats().get(0).responseBodyHash().getAsLong(), "Worker didn't return a response body hash"); + assertEquals("Non-Empty-Body".getBytes(StandardCharsets.UTF_8).length, result.executionStats().get(0).contentLength().getAsLong(), "Worker returned wrong content length"); + assertNotEquals(Duration.ZERO, result.executionStats().get(0).duration(), "Worker returned zero duration"); + } + + @DisplayName("Test Malformed Response Processing") + @ParameterizedTest(name = "[{index}] fault = {0}") + @EnumSource(Fault.class) + public void testMalformedResponseProcessing(Fault fault) throws IOException, URISyntaxException { + SPARQLProtocolWorker worker = (SPARQLProtocolWorker) ((Named)requestFactoryData().toList().get(0).get()[0]).getPayload(); + wm.stubFor(get(urlPathEqualTo("/ds/query")) + .willReturn(aResponse().withFault(fault))); + final HttpWorker.Result result = worker.start().join(); + assertEquals(1, result.executionStats().size()); + assertNotNull(result.executionStats().get(0).error().orElse(null)); + } + + @Test + public void testBadHttpCodeResponse() throws IOException, URISyntaxException { + SPARQLProtocolWorker worker = (SPARQLProtocolWorker) ((Named)requestFactoryData().toList().get(0).get()[0]).getPayload(); + wm.stubFor(get(urlPathEqualTo("/ds/query")) + .willReturn(aResponse().withStatus(404))); + final HttpWorker.Result result = worker.start().join(); + assertEquals(1, result.executionStats().size()); + assertTrue(result.executionStats().get(0).httpError()); + } + + @ParameterizedTest + @MethodSource("completionTargets") + public void testCompletionTargets(HttpWorker.CompletionTarget target) throws URISyntaxException, IOException { + final var uri = new URI("http://localhost:" + wm.getPort() + "/ds/query"); + final var processor = new ResponseBodyProcessor("application/sparql-results+json"); + final var queryHandler = new QueryHandler(new QueryHandler.Config(queryFile.toAbsolutePath().toString(), QueryHandler.Config.Format.SEPARATOR, null, true, QueryHandler.Config.Order.LINEAR, 0L, QueryHandler.Config.Language.SPARQL)); + final var datasetConfig = new DatasetConfig("TestDS", null); + final var connection = new ConnectionConfig("TestConn", "1", datasetConfig, uri, new ConnectionConfig.Authentication("testUser", "password"), null, null); + + final var config = new SPARQLProtocolWorker.Config( + 1, + queryHandler, + target, + connection, + Duration.parse("PT5S"), + "application/sparql-results+json", + RequestFactory.RequestType.POST_URL_ENC_QUERY, + false + ); + + SPARQLProtocolWorker worker = new SPARQLProtocolWorker(0, processor, config); + wm.stubFor(post(urlPathEqualTo("/ds/query")) + .withHeader("Content-Type", equalTo("application/x-www-form-urlencoded")) + // .withBasicAuth("testUser", "password") + .withRequestBody(equalTo("query=" + URLEncoder.encode(QUERY, StandardCharsets.UTF_8))) + .willReturn(aResponse().withStatus(200).withBody("Non-Empty-Body"))); + + final HttpWorker.Result result = worker.start().join(); + + for (var stat : result.executionStats()) { + if (stat.httpStatusCode().orElse(0) == 500) + continue; // ignore server errors + stat.error().ifPresent(ex -> LOGGER.error(ex.getMessage(), ex)); + assertTrue(stat.successful()); + assertTrue(stat.error().isEmpty()); + assertEquals(200, stat.httpStatusCode().orElseThrow()); + assertTrue(stat.contentLength().orElseThrow() > 0); + assertTrue(stat.duration().compareTo(Duration.ZERO) > 0); + } + + if (target instanceof HttpWorker.TimeLimit) { + Duration totalDuration = result.executionStats().stream() + .map(HttpWorker.ExecutionStats::duration) + .reduce(Duration::plus) + .get(); + + assertTrue(totalDuration.compareTo(((HttpWorker.TimeLimit) target).duration()) <= 0); + } else { + assertEquals(((HttpWorker.QueryMixes) target).number(), result.executionStats().size()); + } + } + + @Test + public void testTimeLimitExecutionCutoff() throws URISyntaxException, IOException { + final var uri = new URI("http://localhost:" + wm.getPort() + "/ds/query"); + + final var processor = new ResponseBodyProcessor("application/sparql-results+json"); + final var queryHandlder = new QueryHandler(new QueryHandler.Config(queryFile.toAbsolutePath().toString(), QueryHandler.Config.Format.SEPARATOR, null, true, QueryHandler.Config.Order.LINEAR, 0L, QueryHandler.Config.Language.SPARQL)); + final var datasetConfig = new DatasetConfig("TestDS", null); + final var connection = new ConnectionConfig("TestConn", "1", datasetConfig, uri, new ConnectionConfig.Authentication("testUser", "password"), null, null); + + final var config = new SPARQLProtocolWorker.Config( + 1, + queryHandlder, + new HttpWorker.TimeLimit(Duration.of(2, ChronoUnit.SECONDS)), + connection, + Duration.parse("PT2S"), + "application/sparql-results+json", + RequestFactory.RequestType.POST_URL_ENC_QUERY, + false + ); + + SPARQLProtocolWorker worker = new SPARQLProtocolWorker(0, processor, config); + wm.stubFor(post(urlPathEqualTo("/ds/query")) + .withHeader("Content-Type", equalTo("application/x-www-form-urlencoded")) + .withBasicAuth("testUser", "password") + .withRequestBody(equalTo("query=" + URLEncoder.encode(QUERY, StandardCharsets.UTF_8))) + .willReturn(aResponse().withStatus(200).withBody("Non-Empty-Body").withFixedDelay(1000))); + + final HttpWorker.Result result = worker.start().join(); + assertEquals(1, result.executionStats().size()); // because of the delay, only one query should be executed + } +} \ No newline at end of file diff --git a/src/test/java/org/aksw/iguana/commons/io/BigByteArrayInputStreamTest.java b/src/test/java/org/aksw/iguana/commons/io/BigByteArrayInputStreamTest.java new file mode 100644 index 000000000..939328b75 --- /dev/null +++ b/src/test/java/org/aksw/iguana/commons/io/BigByteArrayInputStreamTest.java @@ -0,0 +1,224 @@ +package org.aksw.iguana.commons.io; + +import com.google.common.primitives.Bytes; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.condition.EnabledIfEnvironmentVariable; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Random; + +import static org.junit.jupiter.api.Assertions.*; + +@EnabledIfEnvironmentVariable(named = "RUN_LARGE_TESTS", matches = "true") +class BigByteArrayInputStreamTest { + + private static final int MAX_SINGLE_BUFFER_SIZE = Integer.MAX_VALUE - 8; + private static Random rng = new Random(); + + /** + * Creates a random 2d-array buffer with the given size. + * + * @param size number of bytes + * @param maxSingleBufferSize maximum size of a single array + * @return 2d-array buffer + */ + private static byte[][] getBigRandomBuffer(long size, int maxSingleBufferSize) { + if (size < 1) + return new byte[0][0]; + final var bufferField = new byte[(int) ((size - 1) / maxSingleBufferSize) + 1][]; + for (int i = 0; i < bufferField.length; i++) { + final var bufferSize = (size > maxSingleBufferSize) ? maxSingleBufferSize : (int) size; + bufferField[i] = new byte[bufferSize]; + rng.nextBytes(bufferField[i]); + size -= bufferSize; + } + return bufferField; + } + + @Test + @DisplayName("Test illegal arguments") + public void testIllegalArguments() throws IOException { + final var bbaos = new BigByteArrayOutputStream(100); + final var data = 1; + bbaos.write(data); + final var bbais = new BigByteArrayInputStream(bbaos); + + assertThrows(NullPointerException.class, () -> bbais.readNBytes(null, 0, 1)); + assertThrows(IndexOutOfBoundsException.class, () -> bbais.readNBytes(new byte[1], -1, 1)); + assertThrows(IndexOutOfBoundsException.class, () -> bbais.readNBytes(new byte[1], 0, -1)); + assertThrows(IndexOutOfBoundsException.class, () -> bbais.readNBytes(new byte[1], 0, 2)); + assertThrows(IndexOutOfBoundsException.class, () -> bbais.readNBytes(new byte[1], 1, 1)); + assertThrows(IndexOutOfBoundsException.class, () -> bbais.readNBytes(new byte[1], 2, 0)); + assertThrows(NullPointerException.class, () -> bbais.read(null, 0, 1)); + assertThrows(IndexOutOfBoundsException.class, () -> bbais.read(new byte[1], -1, 1)); + assertThrows(IndexOutOfBoundsException.class, () -> bbais.read(new byte[1], 0, -1)); + assertThrows(IndexOutOfBoundsException.class, () -> bbais.read(new byte[1], 0, 2)); + assertThrows(IndexOutOfBoundsException.class, () -> bbais.read(new byte[1], 1, 1)); + assertThrows(IndexOutOfBoundsException.class, () -> bbais.read(new byte[1], 2, 0)); + + assertThrows(NullPointerException.class, () -> new BigByteArrayInputStream((byte[]) null)); + assertThrows(NullPointerException.class, () -> new BigByteArrayInputStream((BigByteArrayOutputStream) null)); + } + + @Test + @DisplayName("Test read method with big data") + public void testBigRead() throws IOException { + final var bbaos = new BigByteArrayOutputStream(); + final var buffer = getBigRandomBuffer(((long) MAX_SINGLE_BUFFER_SIZE) + 1000L, MAX_SINGLE_BUFFER_SIZE - 1); + bbaos.write(buffer); + final var bbais = new BigByteArrayInputStream(bbaos); + + assertArrayEquals(buffer[0], bbais.readNBytes(MAX_SINGLE_BUFFER_SIZE - 1)); + assertArrayEquals(buffer[1], bbais.readNBytes(MAX_SINGLE_BUFFER_SIZE - 1)); + } + + @Test + @DisplayName("Test read method with small data") + public void testSmallRead() throws IOException { + final var bbaos = new BigByteArrayOutputStream(100); + final var data = 1; + bbaos.write(data); + final var bbais = new BigByteArrayInputStream(bbaos); + assertEquals(data, bbais.read()); + assertEquals(-1, bbais.read()); + } + + @Test + @DisplayName("Test allBytes() method throws exception") + public void testReadAllBytesException() throws IOException { + final var bbais = new BigByteArrayInputStream(new byte[]{ 1,2,3,4 }); + assertThrows(IOException.class, () -> bbais.readAllBytes()); + } + + @Test + @DisplayName("Test readNBytes(len) method") + public void testReadMethods1() throws IOException { + final var bbaos = new BigByteArrayOutputStream(); + final var buffer = getBigRandomBuffer(1000, MAX_SINGLE_BUFFER_SIZE); + bbaos.write(buffer); + final var bbais = new BigByteArrayInputStream(bbaos); + + assertArrayEquals(Arrays.copyOfRange(buffer[0], 0, 500), bbais.readNBytes(500)); + assertArrayEquals(Arrays.copyOfRange(buffer[0], 500, 1000), bbais.readNBytes(510)); + assertArrayEquals(new byte[0], bbais.readNBytes(1)); + assertEquals(-1, bbais.read()); + } + + @Test + @DisplayName("Test readNBytes(buffer, off, len) method") + public void testReadMethods2() throws IOException { + final var bbaos = new BigByteArrayOutputStream(); + final var data = getBigRandomBuffer(210, MAX_SINGLE_BUFFER_SIZE); + bbaos.write(data); + final var bbais = new BigByteArrayInputStream(bbaos); + + final var buffer = new byte[100]; + assertEquals(100, bbais.readNBytes(buffer, 0, 100)); + assertArrayEquals(Arrays.copyOfRange(data[0], 0, 100), buffer); + assertEquals(50, bbais.readNBytes(buffer, 0, 50)); + assertEquals(50, bbais.readNBytes(buffer, 50, 50)); + assertArrayEquals(Arrays.copyOfRange(data[0], 100, 200), buffer); + assertEquals(10, bbais.readNBytes(buffer, 0, 100)); + assertArrayEquals(Arrays.copyOfRange(data[0], 200, 210), Arrays.copyOfRange(buffer, 0, 10)); + assertEquals(0, bbais.readNBytes(buffer, 0, 100)); + } + + @Test + @DisplayName("Test read(buffer, off, len) method") + public void testReadMethods3() throws IOException { + final var bbaos = new BigByteArrayOutputStream(); + final var data = getBigRandomBuffer(210, MAX_SINGLE_BUFFER_SIZE); + bbaos.write(data); + final var bbais = new BigByteArrayInputStream(bbaos); + + final var buffer = new byte[100]; + assertEquals(100, bbais.read(buffer, 0, 100)); + assertArrayEquals(Arrays.copyOfRange(data[0], 0, 100), buffer); + assertEquals(50, bbais.read(buffer, 0, 50)); + assertEquals(50, bbais.read(buffer, 50, 50)); + assertArrayEquals(Arrays.copyOfRange(data[0], 100, 200), buffer); + assertEquals(10, bbais.read(buffer, 0, 100)); + assertArrayEquals(Arrays.copyOfRange(data[0], 200, 210), Arrays.copyOfRange(buffer, 0, 10)); + assertEquals(-1, bbais.read(buffer, 0, 100)); + } + + @Test + @DisplayName("Test read(buffer) method") + public void testReadMethods4() throws IOException { + final var bbaos = new BigByteArrayOutputStream(); + final var data = getBigRandomBuffer(110, MAX_SINGLE_BUFFER_SIZE); + bbaos.write(data); + final var bbais = new BigByteArrayInputStream(bbaos); + + assertEquals(0, bbais.read(new byte[0])); + final var buffer = new byte[100]; + assertEquals(100, bbais.read(buffer)); + assertArrayEquals(Arrays.copyOfRange(data[0], 0, 100), buffer); + assertEquals(10, bbais.read(buffer)); + assertArrayEquals(Arrays.copyOfRange(data[0], 100, 110), Arrays.copyOfRange(buffer, 0 , 10)); + assertEquals(-1, bbais.read(buffer)); + } + + @Test + @DisplayName("Test read() method") + public void testReadMethods5() throws IOException { + final var bbaos = new BigByteArrayOutputStream(); + final var data = "test".getBytes(StandardCharsets.UTF_8); + bbaos.write(data); + final var bbais = new BigByteArrayInputStream(bbaos); + + List buffer = new ArrayList<>(); + byte currentByte; + while ((currentByte = (byte) bbais.read()) != -1) { + buffer.add(currentByte); + } + assertEquals("test", new String(Bytes.toArray(buffer), StandardCharsets.UTF_8)); + } + + + @Test + @DisplayName("Test bbaos is closed after reading") + public void testBbaosIsClosed() throws IOException { + final var bbaos = new BigByteArrayOutputStream(); + bbaos.write(new byte[] { 1, 2, 3, 4 }); + final var bbais = new BigByteArrayInputStream(bbaos); + assertEquals(1, bbais.read()); + assertEquals(2, bbais.read()); + assertEquals(3, bbais.read()); + assertEquals(4, bbais.read()); + assertEquals(-1, bbais.read()); + assertThrows(IOException.class, () -> bbaos.write("test".getBytes())); + } + + @Test + @DisplayName("Test skip() method with small data") + public void testSmallSkip() throws IOException { + final var bigBuffer = getBigRandomBuffer(400, MAX_SINGLE_BUFFER_SIZE); + final var bbaos = new BigByteArrayOutputStream(); + bbaos.write(bigBuffer); + final var bbais = new BigByteArrayInputStream(bbaos); + assertEquals(100, bbais.skip(100)); + assertArrayEquals(Arrays.copyOfRange(bigBuffer[0], 100, 200), bbais.readNBytes(100)); + assertEquals(200, bbais.skip(200)); + assertEquals(-1, bbais.read()); + assertEquals(0, bbais.skip(100)); + } + + @Test + @DisplayName("Test skip() method with big data") + public void testBigSkip() throws IOException { + final var bigBuffer = getBigRandomBuffer(((long) MAX_SINGLE_BUFFER_SIZE) * 2L, MAX_SINGLE_BUFFER_SIZE); + final var bbaos = new BigByteArrayOutputStream(); + bbaos.write(bigBuffer); + final var bbais = new BigByteArrayInputStream(bbaos); + assertEquals((MAX_SINGLE_BUFFER_SIZE * 2L) - 4, bbais.skip((MAX_SINGLE_BUFFER_SIZE * 2L) - 4)); + assertArrayEquals(Arrays.copyOfRange(bigBuffer[1], MAX_SINGLE_BUFFER_SIZE - 4, MAX_SINGLE_BUFFER_SIZE - 2), bbais.readNBytes(2)); + assertEquals(2, bbais.skip(200)); + assertEquals(-1, bbais.read()); + } +} \ No newline at end of file diff --git a/src/test/java/org/aksw/iguana/commons/io/BigByteArrayOutputStreamTest.java b/src/test/java/org/aksw/iguana/commons/io/BigByteArrayOutputStreamTest.java new file mode 100644 index 000000000..21104d80c --- /dev/null +++ b/src/test/java/org/aksw/iguana/commons/io/BigByteArrayOutputStreamTest.java @@ -0,0 +1,310 @@ +package org.aksw.iguana.commons.io; + +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Named; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.condition.EnabledIfEnvironmentVariable; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + +import java.io.IOException; +import java.nio.ByteBuffer; +import java.nio.charset.StandardCharsets; +import java.util.Arrays; +import java.util.List; +import java.util.Random; +import java.util.function.Supplier; + +import static org.junit.jupiter.api.Assertions.*; + +@EnabledIfEnvironmentVariable(named = "RUN_LARGE_TESTS", matches = "true") +class BigByteArrayOutputStreamTest { + final static Random rng = new Random(0); + + public static List data() { + final long maxSize = Integer.MAX_VALUE - 8; + + final Supplier sup1 = () -> getBigRandomBuffer(10L, (int) maxSize); + final Supplier sup2 = () -> getBigRandomBuffer(maxSize * 2L, (int) maxSize); + + return List.of( + Arguments.of(Named.of(String.valueOf(10), sup1), 10, new int[] { 10 }), + Arguments.of(Named.of(String.valueOf(10), sup1), maxSize * 2L, new int[] {(int) maxSize, (int) maxSize}), // small data, high initial capacity + Arguments.of(Named.of(String.valueOf(maxSize * 2L), sup2), maxSize * 2L, new int[] {(int) maxSize, (int) maxSize}), + Arguments.of(Named.of(String.valueOf(maxSize * 2L), sup2), 0, new int[] {(int) maxSize, (int) maxSize}) + ); + } + + /** + * Creates a random 2d-array buffer with the given size. + * + * @param size number of bytes + * @param maxSingleBufferSize maximum size of a single array + * @return 2d-array buffer + */ + public static byte[][] getBigRandomBuffer(long size, int maxSingleBufferSize) { + if (size < 1) + return new byte[0][0]; + final var bufferField = new byte[(int) ((size - 1) / maxSingleBufferSize) + 1][]; + for (int i = 0; i < bufferField.length; i++) { + final var bufferSize = (size > maxSingleBufferSize) ? maxSingleBufferSize : (int) size; + bufferField[i] = new byte[bufferSize]; + rng.nextBytes(bufferField[i]); + size -= bufferSize; + } + return bufferField; + } + + @Test + public void testClose() throws IOException { + final var bbaos = new BigByteArrayOutputStream(); + final var testData = "test123".getBytes(StandardCharsets.UTF_8); + bbaos.write(testData); + bbaos.close(); + assertThrows(IOException.class, () -> bbaos.clear()); + assertThrows(IOException.class, () -> bbaos.reset()); + assertThrows(IOException.class, () -> bbaos.write(1)); + assertThrows(IOException.class, () -> bbaos.write((byte) 1)); + assertThrows(IOException.class, () -> bbaos.write(new byte[][] {{1}}) ); + assertThrows(IOException.class, () -> bbaos.write(new byte[] {1}, 0, 1)); + assertThrows(IOException.class, () -> bbaos.write(new byte[] {1})); + assertThrows(IOException.class, () -> bbaos.write((new BigByteArrayOutputStream(10)))); + assertEquals(testData.length, bbaos.size()); + assertArrayEquals(new byte[][] {testData} , bbaos.toByteArray()); + assertEquals(1, bbaos.getBaos().size()); + assertArrayEquals(testData, bbaos.getBaos().get(0).toByteArray()); + } + + @Test + @DisplayName("Test basic write operations") + public void testOtherWriteMethods() throws IOException { + final byte[] buffer = getBigRandomBuffer(10, 10)[0]; + + final var b2 = new byte[] { 0, 1, 2, 3 }; + int i = ByteBuffer.wrap(b2).getInt(); + + try (final var bbaos = new BigByteArrayOutputStream()) { + assertDoesNotThrow(() -> bbaos.write(buffer[0])); + assertEquals(1, bbaos.size()); + assertEquals(buffer[0], bbaos.toByteArray()[0][0]); + + assertDoesNotThrow(() -> bbaos.write(buffer, 1, 9)); + assertEquals(10, bbaos.size()); + assertArrayEquals(buffer, bbaos.toByteArray()[0]); + + final var bbaos2 = new BigByteArrayOutputStream(1); + assertDoesNotThrow(() -> bbaos2.write(bbaos)); + assertEquals(10, bbaos2.size()); + assertArrayEquals(buffer, bbaos2.toByteArray()[0]); + + assertDoesNotThrow(() -> bbaos2.write(i)); + assertEquals(11, bbaos2.size()); + assertEquals(b2[3], bbaos2.toByteArray()[0][10]); // low order byte + } + } + + @Test + @DisplayName("Test illegal capacity arguments") + public void testNegativeCapactiy() { + assertThrows(IllegalArgumentException.class, () -> new BigByteArrayOutputStream(-1)); + assertThrows(IllegalArgumentException.class, () -> new BigByteArrayOutputStream(-1L)); + } + + @Test + @DisplayName("Test illegal write arguments") + public void testIndexOutOfBounds() throws IOException { + try (final var bbaos = new BigByteArrayOutputStream()) { + final byte[] nullBuffer = null; + final var buffer = new byte[10]; + assertThrows(IndexOutOfBoundsException.class, () -> bbaos.write(buffer, -1, 10)); + assertThrows(IndexOutOfBoundsException.class, () -> bbaos.write(buffer, 0, -1)); + assertThrows(IndexOutOfBoundsException.class, () -> bbaos.write(buffer, 0, 11)); + assertThrows(NullPointerException.class, () -> bbaos.write(nullBuffer)); + } + } + + + @Test + @DisplayName("Test default constructor") + void testDefaultConstructor() throws IOException { + try (final var bbaos = new BigByteArrayOutputStream()) { + assertEquals(0, bbaos.size()); + assertEquals(1, bbaos.getBaos().size()); + assertEquals(0, bbaos.getBaos().get(0).size()); + assertDoesNotThrow(() -> bbaos.write("test".getBytes(StandardCharsets.UTF_8))); + assertEquals(1, bbaos.getBaos().size()); + assertEquals(4, bbaos.getBaos().get(0).size()); + assertEquals(4, bbaos.size()); + } + } + + @Test + @DisplayName("Test constructor with capacity argument") + void testConstructorWithInt() throws IOException { + try (final var bbaos = new BigByteArrayOutputStream(100)) { + assertEquals(0, bbaos.size()); + assertEquals(1, bbaos.getBaos().size()); + assertEquals(0, bbaos.getBaos().get(0).size()); + assertEquals(100, bbaos.getBaos().get(0).getBuffer().length); + assertDoesNotThrow(() -> bbaos.write("test".getBytes(StandardCharsets.UTF_8))); + assertEquals(4, bbaos.size()); + assertEquals(1, bbaos.getBaos().size()); + assertEquals(4, bbaos.getBaos().get(0).size()); + assertEquals(100, bbaos.getBaos().get(0).getBuffer().length); + } + } + + @Test + @DisplayName("Test constructor with big capacity argument") + void testConstructorWithBigLong() throws IOException { + try (final var bbaos = new BigByteArrayOutputStream(((long) Integer.MAX_VALUE) + 10)) { + assertEquals(0, bbaos.size()); + assertEquals(2, bbaos.getBaos().size()); + assertEquals(0, bbaos.getBaos().get(0).size()); + assertEquals(0, bbaos.getBaos().get(1).size()); + assertNotEquals(0, bbaos.getBaos().get(0).getBuffer().length); // rough comparison + assertNotEquals(0, bbaos.getBaos().get(1).getBuffer().length); + assertDoesNotThrow(() -> bbaos.write("test".getBytes(StandardCharsets.UTF_8))); + assertEquals(4, bbaos.size()); + assertEquals(2, bbaos.getBaos().size()); + assertEquals(4, bbaos.getBaos().get(0).size()); + assertEquals(0, bbaos.getBaos().get(1).size()); + } + } + + @Test + @DisplayName("Test write method with big byte arrays") + void testBaosOverflow() throws IOException { + final var maxArraySize = Integer.MAX_VALUE - 8; + final var firstBufferSize = maxArraySize - 1; + final var secondBufferSize = 2; + try (final var bbaos = new BigByteArrayOutputStream(maxArraySize)) { + final var firstBuffer = getBigRandomBuffer(firstBufferSize, maxArraySize); + final var secondBuffer = getBigRandomBuffer(secondBufferSize, maxArraySize); + + assertEquals(0, bbaos.size()); + assertEquals(1, bbaos.getBaos().size()); + assertEquals(0, bbaos.getBaos().get(0).size()); + assertEquals(maxArraySize, bbaos.getBaos().get(0).getBuffer().length); + assertDoesNotThrow(() -> bbaos.write(firstBuffer)); + for (int i = 0; i < firstBufferSize; i++) { + assertEquals(firstBuffer[0][i], bbaos.getBaos().get(0).getBuffer()[i]); // save memory during execution of this test with this loop + } + assertEquals(firstBufferSize, bbaos.size()); + assertEquals(1, bbaos.getBaos().size()); + assertEquals(firstBufferSize, bbaos.getBaos().get(0).size()); + assertArrayEquals(firstBuffer, bbaos.toByteArray()); + + // overflow first baos + assertDoesNotThrow(() -> bbaos.write(secondBuffer)); + assertEquals(maxArraySize, bbaos.getBaos().get(1).getBuffer().length); + assertEquals(firstBufferSize + secondBufferSize, bbaos.size()); + assertEquals(2, bbaos.getBaos().size()); + assertEquals(maxArraySize, bbaos.getBaos().get(0).size()); + assertEquals(secondBufferSize - (maxArraySize - firstBufferSize), bbaos.getBaos().get(1).size()); + + // test content of first baos + for (int i = 0; i < firstBufferSize; i++) + assertEquals(firstBuffer[0][i], bbaos.getBaos().get(0).getBuffer()[i]); + for (int i = firstBufferSize; i < maxArraySize; i++) + assertEquals(secondBuffer[0][i - firstBufferSize], bbaos.getBaos().get(0).getBuffer()[i]); + + // test content of second baos + assertArrayEquals(Arrays.copyOfRange(secondBuffer[0], secondBufferSize - (maxArraySize - firstBufferSize), secondBufferSize), bbaos.getBaos().get(1).toByteArray()); + + // reset + bbaos.reset(); + assertEquals(2, bbaos.getBaos().size()); // baos won't be removed with reset + assertEquals(0, bbaos.size()); + assertEquals(0, bbaos.getBaos().get(0).size()); + assertEquals(0, bbaos.getBaos().get(1).size()); + assertEquals(maxArraySize, bbaos.getBaos().get(0).getBuffer().length); + assertEquals(maxArraySize, bbaos.getBaos().get(1).getBuffer().length); + + assertDoesNotThrow(() -> bbaos.write(firstBuffer)); + assertEquals(firstBufferSize, bbaos.size()); + assertEquals(firstBufferSize, bbaos.getBaos().get(0).size()); + for (int i = 0; i < firstBufferSize; i++) { + assertEquals(firstBuffer[0][i], bbaos.getBaos().get(0).getBuffer()[i]); + } + + assertDoesNotThrow(() -> bbaos.write(secondBuffer)); + assertEquals(2, bbaos.getBaos().size()); + assertEquals(maxArraySize, bbaos.getBaos().get(1).getBuffer().length); + assertEquals(firstBufferSize + secondBufferSize, bbaos.size()); + assertEquals(maxArraySize, bbaos.getBaos().get(0).size()); + assertEquals(secondBufferSize - (maxArraySize - firstBufferSize), bbaos.getBaos().get(1).size()); + for (int i = 0; i < firstBufferSize; i++) + assertEquals(firstBuffer[0][i], bbaos.getBaos().get(0).getBuffer()[i]); + for (int i = firstBufferSize; i < maxArraySize; i++) + assertEquals(secondBuffer[0][i - firstBufferSize], bbaos.getBaos().get(0).getBuffer()[i]); + + assertArrayEquals(Arrays.copyOfRange(secondBuffer[0], secondBufferSize - (maxArraySize - firstBufferSize), secondBufferSize), bbaos.getBaos().get(1).toByteArray()); + } + } + + @ParameterizedTest(name = "[{index}] randomBufferSize={0}, initialCapacitiy={1}, baosSizes={2}") + @MethodSource("data") + @DisplayName("Test reset method") + void testReset(Supplier bufferSup, long initialCapacitiy, int[] baosSizes) throws IOException { + final var buffer = bufferSup.get(); + try (final var bbaos = new BigByteArrayOutputStream(initialCapacitiy)) { + bbaos.write(buffer); + assertEquals(baosSizes.length, bbaos.getBaos().size()); // expected amount of baos + for (int i = 0; i < buffer.length; i++) { + assertArrayEquals(buffer[i], bbaos.getBaos().get(i).toByteArray()); // expected content + assertEquals(baosSizes[i], bbaos.getBaos().get(i).getBuffer().length); // expected baos sizes + } + assertEquals(Arrays.stream(buffer).mapToLong(x -> x.length).sum(), bbaos.size()); + + bbaos.reset(); + + assertEquals(0, bbaos.size()); + assertEquals(baosSizes.length, bbaos.getBaos().size()); // same amount of baos + for (int i = 0; i < buffer.length; i++) { + assertEquals(baosSizes[i], bbaos.getBaos().get(i).getBuffer().length); // baos sizes should be same + } + + // after clear, a new write should result same expected content and state + bbaos.write(buffer); + assertEquals(Arrays.stream(buffer).mapToLong(x -> x.length).sum(), bbaos.size()); + for (int i = 0; i < buffer.length; i++) { + assertArrayEquals(buffer[i], bbaos.getBaos().get(i).toByteArray()); // expected content + } + + // check baos sizes again after write + for (int i = 0; i < baosSizes.length; i++) { + assertEquals(baosSizes[i], bbaos.getBaos().get(i).getBuffer().length); + } + } + } + + @ParameterizedTest(name = "[{index}] randomBufferSize={0}, initialCapacitiy={1}, baosSizes={2}") + @MethodSource("data") + @DisplayName("Test clear method") + void testClear(Supplier bufferSup, long initialCapacitiy, int[] baosSizes) throws IOException { + final var buffer = bufferSup.get(); + try (final var bbaos = new BigByteArrayOutputStream(initialCapacitiy)) { + bbaos.write(buffer); + assertEquals(baosSizes.length, bbaos.getBaos().size()); // expected amount of baos + for (int i = 0; i < buffer.length; i++) { + assertArrayEquals(buffer[i], bbaos.getBaos().get(i).toByteArray()); // expected content + assertEquals(baosSizes[i], bbaos.getBaos().get(i).getBuffer().length); // expected baos sizes + } + assertEquals(Arrays.stream(buffer).mapToLong(x -> x.length).sum(), bbaos.size()); + + bbaos.clear(); + assertEquals(0, bbaos.size()); + assertEquals(1, bbaos.getBaos().size()); // deleted all baos except first one + assertEquals(baosSizes[0], bbaos.getBaos().get(0).getBuffer().length); // first baos maintained previous buffer size + + // after clear, a new write should result same expected content + bbaos.write(buffer); + for (int i = 0; i < buffer.length; i++) { + assertArrayEquals(buffer[i], bbaos.getBaos().get(i).toByteArray()); // expected content + } + assertEquals(Arrays.stream(buffer).mapToLong(x -> x.length).sum(), bbaos.size()); + } + } +} \ No newline at end of file diff --git a/src/test/java/org/aksw/iguana/commons/io/ByteArrayListInputStreamTest.java b/src/test/java/org/aksw/iguana/commons/io/ByteArrayListInputStreamTest.java new file mode 100644 index 000000000..bf841d0db --- /dev/null +++ b/src/test/java/org/aksw/iguana/commons/io/ByteArrayListInputStreamTest.java @@ -0,0 +1,174 @@ +package org.aksw.iguana.commons.io; + +import org.junit.jupiter.api.Test; + +import java.io.EOFException; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Random; + +import static org.junit.jupiter.api.Assertions.*; + +class ByteArrayListInputStreamTest { + + private final static int BUFFER_SIZE = 1024; + private final static int NUM_BUFFERS = 10; + + private static final Random rng = new Random(); + + private static List createByteArrayListInputStream(int arraySize, int numArrays) { + + List data = new ArrayList<>(numArrays); + for (int i = 0; i < numArrays; i++) { + final var temp = new byte[arraySize]; + rng.nextBytes(temp); + data.add(temp); + } + return data; + } + + + @Test + void testReadSingle() throws IOException { + final var data = createByteArrayListInputStream(1024, 10); + final var stream = new ByteArrayListInputStream(data); + for (int i = 0; i < BUFFER_SIZE * NUM_BUFFERS; i++) { + assertEquals(data.get(i / BUFFER_SIZE)[i % BUFFER_SIZE], (byte) stream.read(), String.format("Failed at index %d", i)); + } + assertEquals(-1, stream.read()); + } + + @Test + void testReadAllBytes() throws IOException { + final var data = createByteArrayListInputStream(BUFFER_SIZE, NUM_BUFFERS); + final var stream = new ByteArrayListInputStream(data); + assertEquals(BUFFER_SIZE * NUM_BUFFERS, stream.availableLong()); + assertThrows(UnsupportedOperationException.class, stream::readAllBytes); + assertEquals(BUFFER_SIZE * NUM_BUFFERS, stream.availableLong()); + } + + @Test + void testReadMultiple() throws IOException { + // readNBytes + // test full read + var data = createByteArrayListInputStream(BUFFER_SIZE, NUM_BUFFERS); + var stream = new ByteArrayListInputStream(data); + assertEquals(BUFFER_SIZE * NUM_BUFFERS, stream.availableLong()); + byte[] buffer = new byte[BUFFER_SIZE * NUM_BUFFERS + 1]; + assertEquals(BUFFER_SIZE * NUM_BUFFERS, stream.readNBytes(buffer, 0, BUFFER_SIZE * NUM_BUFFERS + 1)); + for (int i = 0; i < BUFFER_SIZE * NUM_BUFFERS; i++) { + assertEquals(data.get(i / BUFFER_SIZE)[i % BUFFER_SIZE], buffer[i], String.format("Failed at index %d", i)); + } + assertEquals(0, stream.availableLong()); + assertEquals(0, stream.readNBytes(buffer, 0, 1)); + + // test partial read with 3 bytes + data = createByteArrayListInputStream(BUFFER_SIZE, NUM_BUFFERS); + stream = new ByteArrayListInputStream(data); + assertEquals(BUFFER_SIZE * NUM_BUFFERS, stream.availableLong()); + buffer = new byte[3]; + for (int i = 0; i < BUFFER_SIZE * NUM_BUFFERS; i += 3) { + assertEquals(Math.min(BUFFER_SIZE * NUM_BUFFERS - i, 3), stream.readNBytes(buffer, 0, 3)); + for (int j = 0; j < Math.min(BUFFER_SIZE * NUM_BUFFERS - i, 3); j++) { + assertEquals(data.get((i + j) / BUFFER_SIZE)[(i + j) % BUFFER_SIZE], buffer[j], String.format("Failed at index %d", i + j)); + } + } + assertEquals(0, stream.availableLong()); + + // read + // test full read + data = createByteArrayListInputStream(BUFFER_SIZE, NUM_BUFFERS); + stream = new ByteArrayListInputStream(data); + assertEquals(BUFFER_SIZE * NUM_BUFFERS, stream.availableLong()); + buffer = new byte[BUFFER_SIZE * NUM_BUFFERS + 1]; + assertEquals(BUFFER_SIZE * NUM_BUFFERS, stream.read(buffer, 0, BUFFER_SIZE * NUM_BUFFERS + 1)); + for (int i = 0; i < BUFFER_SIZE * NUM_BUFFERS; i++) { + assertEquals(data.get(i / BUFFER_SIZE)[i % BUFFER_SIZE], buffer[i], String.format("Failed at index %d", i)); + } + assertEquals(0, stream.availableLong()); + assertEquals(-1, stream.read(buffer, 0, 1)); + + // test partial read with 3 bytes + data = createByteArrayListInputStream(BUFFER_SIZE, NUM_BUFFERS); + stream = new ByteArrayListInputStream(data); + assertEquals(BUFFER_SIZE * NUM_BUFFERS, stream.availableLong()); + buffer = new byte[3]; + for (int i = 0; i < BUFFER_SIZE * NUM_BUFFERS; i += 3) { + assertEquals(Math.min(BUFFER_SIZE * NUM_BUFFERS - i, 3), stream.read(buffer, 0, 3)); + for (int j = 0; j < Math.min(BUFFER_SIZE * NUM_BUFFERS - i, 3); j++) { + assertEquals(data.get((i + j) / BUFFER_SIZE)[(i + j) % BUFFER_SIZE], buffer[j], String.format("Failed at index %d", i + j)); + } + } + assertEquals(0, stream.availableLong()); + assertEquals(-1, stream.read(buffer, 0, 1)); + } + + @Test + void testSkip() throws IOException { + // skip + final var data = createByteArrayListInputStream(BUFFER_SIZE, NUM_BUFFERS); + final var stream = new ByteArrayListInputStream(data); + assertEquals(BUFFER_SIZE * NUM_BUFFERS, stream.availableLong()); + for (int i = 0; i < BUFFER_SIZE * NUM_BUFFERS; i += 3) { + final var skip = stream.skip(3); + assertEquals(Math.min(3, BUFFER_SIZE * NUM_BUFFERS - i), skip); + assertEquals(BUFFER_SIZE * NUM_BUFFERS - i - skip, stream.availableLong()); + } + assertEquals(0, stream.availableLong()); + assertEquals(0, stream.skip(1)); + + // skipNBytes + final var data2 = createByteArrayListInputStream(BUFFER_SIZE, NUM_BUFFERS); + final var stream2 = new ByteArrayListInputStream(data2); + assertEquals(BUFFER_SIZE * NUM_BUFFERS, stream2.availableLong()); + for (int i = 0; i < BUFFER_SIZE * NUM_BUFFERS; i += 3) { + try { + stream2.skipNBytes(3); + } catch (EOFException e) { + if (i <= BUFFER_SIZE * NUM_BUFFERS - 3) { + fail("EOFException thrown too early"); + } else { + break; + } + } + assertEquals(BUFFER_SIZE * NUM_BUFFERS - i - 3, stream2.availableLong()); + } + assertEquals(0, stream2.availableLong()); + assertThrows(EOFException.class, () -> stream2.skipNBytes(1)); + } + + @Test + void testAvailable() throws IOException { + final var data = createByteArrayListInputStream(BUFFER_SIZE, NUM_BUFFERS); + final var stream = new ByteArrayListInputStream(data); + assertEquals(BUFFER_SIZE * NUM_BUFFERS, stream.availableLong()); + assertEquals(BUFFER_SIZE * NUM_BUFFERS, stream.available()); + } + + @Test + void testClose() { + final var data = createByteArrayListInputStream(BUFFER_SIZE, NUM_BUFFERS); + final var stream = new ByteArrayListInputStream(data); + final var buffer = new byte[BUFFER_SIZE * NUM_BUFFERS]; + assertDoesNotThrow(stream::close); + assertThrows(IOException.class, stream::read); + assertThrows(IOException.class, () -> stream.read(buffer, 0, BUFFER_SIZE * NUM_BUFFERS)); + assertThrows(IOException.class, () -> stream.readNBytes(buffer, 0, BUFFER_SIZE * NUM_BUFFERS)); + assertThrows(IOException.class, () -> stream.skip(1)); + assertThrows(IOException.class, () -> stream.skipNBytes(1)); + assertThrows(IOException.class, stream::availableLong); + + } + + @Test + void testAvailableLong() throws IOException { + final var data1 = createByteArrayListInputStream(Integer.MAX_VALUE - 8, 1); + final var data2 = createByteArrayListInputStream(BUFFER_SIZE, 1); + final var combined = new ArrayList<>(data1); + combined.addAll(data2); + final var stream = new ByteArrayListInputStream(combined); + assertEquals(Integer.MAX_VALUE - 8 + (long) BUFFER_SIZE, stream.availableLong()); + assertEquals(Integer.MAX_VALUE, stream.available()); + } +} \ No newline at end of file diff --git a/src/test/java/org/aksw/iguana/commons/io/ByteArrayListOutputStreamTest.java b/src/test/java/org/aksw/iguana/commons/io/ByteArrayListOutputStreamTest.java new file mode 100644 index 000000000..007468cfc --- /dev/null +++ b/src/test/java/org/aksw/iguana/commons/io/ByteArrayListOutputStreamTest.java @@ -0,0 +1,96 @@ +package org.aksw.iguana.commons.io; + +import org.junit.jupiter.api.Test; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Random; + +import static org.junit.jupiter.api.Assertions.*; + +class ByteArrayListOutputStreamTest { + + private static final Random random = new Random(); + + private static byte[] getRandomData(int size) { + final var buffer = new byte[size]; + random.nextBytes(buffer); + return buffer; + } + + @Test + void testSingleWrite() throws IOException { + final var data = getRandomData(1024); + final var out = new ByteArrayListOutputStream(); + assertDoesNotThrow(() -> out.write(data)); + assertDoesNotThrow(out::close); + assertArrayEquals(data, out.getBuffers().get(0)); + assertEquals(1024, out.size()); + + final var out2 = new ByteArrayListOutputStream(1024 / 4); + assertDoesNotThrow(() -> out2.write(data)); + assertDoesNotThrow(out2::close); + assertArrayEquals(data, out2.getBuffers().get(0)); + assertEquals(1024, out2.size()); + } + + @Test + void testMultipleWrite() { + final var data = getRandomData(1024); + final var out = new ByteArrayListOutputStream(); + assertDoesNotThrow(() -> out.write(data)); + assertDoesNotThrow(() -> out.write(data)); + assertDoesNotThrow(out::close); + assertArrayEquals(data, Arrays.copyOfRange(out.getBuffers().get(0), 0, 1024)); + assertArrayEquals(data, Arrays.copyOfRange(out.getBuffers().get(0), 1024, 2048)); + assertEquals(2048, out.size()); + + final var out2 = new ByteArrayListOutputStream(1024 / 4); + assertDoesNotThrow(() -> out2.write(data)); + assertDoesNotThrow(() -> out2.write(data)); + assertDoesNotThrow(out2::close); + assertArrayEquals(data, out2.getBuffers().get(0)); + assertArrayEquals(data, out2.getBuffers().get(1)); + assertEquals(2048, out2.size()); + + final var out3 = new ByteArrayListOutputStream(1024 / 4); + for (int i = 0; i < 1024; i++) { + int finalI = i; + assertDoesNotThrow(() -> out3.write(data[finalI])); + } + assertDoesNotThrow(out3::close); + assertArrayEquals(Arrays.copyOfRange(data, 0, 256), out3.getBuffers().get(0)); + assertArrayEquals(Arrays.copyOfRange(data, 256, 512), out3.getBuffers().get(1)); + assertArrayEquals(Arrays.copyOfRange(data, 512, 768), out3.getBuffers().get(2)); + assertArrayEquals(Arrays.copyOfRange(data, 768, 1024), out3.getBuffers().get(3)); + assertEquals(1024, out3.size()); + } + + @Test + void testClose() { + final var out = new ByteArrayListOutputStream(); + final var data = getRandomData(1024); + assertDoesNotThrow(out::close); + assertDoesNotThrow(out::close); + assertThrows(IOException.class, () -> out.write(data)); + assertThrows(IOException.class, () -> out.write(data[0])); + } + + @Test + void testToInputStream() throws IOException { + final var data = getRandomData(1024); + final var out = new ByteArrayListOutputStream(); + assertDoesNotThrow(() -> out.write(data)); + final var in = out.toInputStream(); + + // stream should be closed + assertThrows(IOException.class, () -> out.write(data)); + + assertEquals(ByteArrayListInputStream.class, in.getClass()); + final var typedIn = (ByteArrayListInputStream) in; + final var buffer = new byte[1024]; + assertEquals(1024, typedIn.availableLong()); + assertEquals(1024, typedIn.read(buffer)); + assertArrayEquals(data, buffer); + } +} diff --git a/iguana.corecontroller/src/test/resources/cli/echoinput.sh b/src/test/resources/cli/echoinput.sh similarity index 100% rename from iguana.corecontroller/src/test/resources/cli/echoinput.sh rename to src/test/resources/cli/echoinput.sh diff --git a/iguana.commons/src/test/resources/complex-script-example-issue108.sh b/src/test/resources/complex-script-example-issue108.sh similarity index 100% rename from iguana.commons/src/test/resources/complex-script-example-issue108.sh rename to src/test/resources/complex-script-example-issue108.sh diff --git a/iguana.corecontroller/src/test/resources/config/mockupworkflow-default.yml b/src/test/resources/config/mockupworkflow-default.yml similarity index 100% rename from iguana.corecontroller/src/test/resources/config/mockupworkflow-default.yml rename to src/test/resources/config/mockupworkflow-default.yml diff --git a/iguana.corecontroller/src/test/resources/config/mockupworkflow-no-default.yml b/src/test/resources/config/mockupworkflow-no-default.yml similarity index 80% rename from iguana.corecontroller/src/test/resources/config/mockupworkflow-no-default.yml rename to src/test/resources/config/mockupworkflow-no-default.yml index 2a348a7f1..ca20335c6 100644 --- a/iguana.corecontroller/src/test/resources/config/mockupworkflow-no-default.yml +++ b/src/test/resources/config/mockupworkflow-no-default.yml @@ -20,8 +20,8 @@ preScriptHook: "src/test/resources/config/pre.sh {{connection}} {{dataset.name}} postScriptHook: "src/test/resources/config/post.sh {{dataset.file}} {{dataset.name}} {{connection}}" metrics: - - className: "org.aksw.iguana.rp.metrics.impl.QMPHMetric" - - className: "org.aksw.iguana.rp.metrics.impl.QPSMetric" + - className: "org.aksw.iguana.cc.tasks.stresstest.metrics.impl.QMPH" + - className: "org.aksw.iguana.cc.tasks.stresstest.metrics.impl.QPS" storages: - - className: "org.aksw.iguana.cc.tasks.MockupStorage" \ No newline at end of file + - className: "org.aksw.iguana.cc.mockup.MockupStorage" \ No newline at end of file diff --git a/iguana.corecontroller/src/test/resources/config/mockupworkflow.yml b/src/test/resources/config/mockupworkflow.yml similarity index 94% rename from iguana.corecontroller/src/test/resources/config/mockupworkflow.yml rename to src/test/resources/config/mockupworkflow.yml index 271c41a42..caea7e0f0 100644 --- a/iguana.corecontroller/src/test/resources/config/mockupworkflow.yml +++ b/src/test/resources/config/mockupworkflow.yml @@ -24,4 +24,4 @@ metrics: - className: "org.aksw.iguana.rp.metrics.impl.NoQMetric" storages: - - className: "org.aksw.iguana.cc.tasks.MockupStorage" \ No newline at end of file + - className: "org.aksw.iguana.cc.mockup.MockupStorage" \ No newline at end of file diff --git a/iguana.corecontroller/src/test/resources/config/post.sh b/src/test/resources/config/post.sh similarity index 100% rename from iguana.corecontroller/src/test/resources/config/post.sh rename to src/test/resources/config/post.sh diff --git a/iguana.corecontroller/src/test/resources/config/pre.sh b/src/test/resources/config/pre.sh similarity index 100% rename from iguana.corecontroller/src/test/resources/config/pre.sh rename to src/test/resources/config/pre.sh diff --git a/iguana.corecontroller/src/test/resources/config/workflow-expected.nt b/src/test/resources/config/workflow-expected.nt similarity index 100% rename from iguana.corecontroller/src/test/resources/config/workflow-expected.nt rename to src/test/resources/config/workflow-expected.nt diff --git a/iguana.resultprocessor/src/test/resources/controller_test.properties b/src/test/resources/controller_test.properties similarity index 95% rename from iguana.resultprocessor/src/test/resources/controller_test.properties rename to src/test/resources/controller_test.properties index ffb11acc7..2a6ba73b6 100644 --- a/iguana.resultprocessor/src/test/resources/controller_test.properties +++ b/src/test/resources/controller_test.properties @@ -27,7 +27,7 @@ metric4.class=org.aksw.iguana.rp.metrics.impl.NoQPHMetric ## Storages to use ## ################################## store1.class=org.aksw.iguana.rp.storage.imp.PropertiesSenderStorage -store2.class=org.aksw.iguana.rp.storage.impl.NTFileStorage +store2.class=org.aksw.iguana.rp.storage.impl.RDFFileStorage store2.constructorArgs=results_test.nt store3.class=org.aksw.iguana.rp.storage.impl.TriplestoreStorage store3.constructorArgs=http://localhost:3030/das/sparql,http://localhost:3030/das/update diff --git a/iguana.corecontroller/src/test/resources/fileUtils.txt b/src/test/resources/fileUtils.txt similarity index 100% rename from iguana.corecontroller/src/test/resources/fileUtils.txt rename to src/test/resources/fileUtils.txt diff --git a/iguana.corecontroller/src/test/resources/iguana-valid.json b/src/test/resources/iguana-valid.json similarity index 90% rename from iguana.corecontroller/src/test/resources/iguana-valid.json rename to src/test/resources/iguana-valid.json index 487a420a2..bcd8d7ba5 100644 --- a/iguana.corecontroller/src/test/resources/iguana-valid.json +++ b/src/test/resources/iguana-valid.json @@ -37,20 +37,21 @@ "className": "Stresstest", "configuration": { "timeLimit": 360000, - "queryHandler": { - "className": "InstancesQueryHandler" - }, "workers": [ { "threads": 16, "className": "SPARQLWorker", - "queriesFile": "queries_easy.txt", + "queries": { + "location": "queries_easy.txt" + }, "timeOut": 180000 }, { "threads": 4, "className": "SPARQLWorker", - "queriesFile": "queries_complex.txt", + "queries": { + "location": "queries_complex.txt" + }, "fixedLatency": 100, "gaussianLatency": 50, "parameterName": "query", diff --git a/iguana.corecontroller/src/test/resources/iguana-valid.yml b/src/test/resources/iguana-valid.yml similarity index 90% rename from iguana.corecontroller/src/test/resources/iguana-valid.yml rename to src/test/resources/iguana-valid.yml index a8a570954..a8842463b 100644 --- a/iguana.corecontroller/src/test/resources/iguana-valid.yml +++ b/src/test/resources/iguana-valid.yml @@ -23,16 +23,16 @@ tasks: - className: "Stresstest" configuration: timeLimit: 360000 - queryHandler: - className: "InstancesQueryHandler" workers: - threads: 16 className: "SPARQLWorker" - queriesFile: "queries_easy.txt" + queries: + location: "queries_easy.txt" timeOut: 180000 - threads: 4 className: "SPARQLWorker" - queriesFile: "queries_complex.txt" + queries: + location: "queries_complex.txt" fixedLatency: 100 gaussianLatency: 50 parameterName: "query" diff --git a/iguana.corecontroller/src/test/resources/iguana.json b/src/test/resources/iguana.json similarity index 100% rename from iguana.corecontroller/src/test/resources/iguana.json rename to src/test/resources/iguana.json diff --git a/iguana.corecontroller/src/test/resources/iguana.yml b/src/test/resources/iguana.yml similarity index 100% rename from iguana.corecontroller/src/test/resources/iguana.yml rename to src/test/resources/iguana.yml diff --git a/iguana.corecontroller/src/test/resources/mockupq.txt b/src/test/resources/mockupq.txt similarity index 100% rename from iguana.corecontroller/src/test/resources/mockupq.txt rename to src/test/resources/mockupq.txt diff --git a/iguana.resultprocessor/src/test/resources/nt/avgqpstest.nt b/src/test/resources/nt/avgqpstest.nt similarity index 100% rename from iguana.resultprocessor/src/test/resources/nt/avgqpstest.nt rename to src/test/resources/nt/avgqpstest.nt diff --git a/iguana.resultprocessor/src/test/resources/nt/eqtest.nt b/src/test/resources/nt/eqtest.nt similarity index 100% rename from iguana.resultprocessor/src/test/resources/nt/eqtest.nt rename to src/test/resources/nt/eqtest.nt diff --git a/iguana.resultprocessor/src/test/resources/nt/f1test.nt b/src/test/resources/nt/f1test.nt similarity index 100% rename from iguana.resultprocessor/src/test/resources/nt/f1test.nt rename to src/test/resources/nt/f1test.nt diff --git a/iguana.resultprocessor/src/test/resources/nt/noqphtest.nt b/src/test/resources/nt/noqphtest.nt similarity index 100% rename from iguana.resultprocessor/src/test/resources/nt/noqphtest.nt rename to src/test/resources/nt/noqphtest.nt diff --git a/iguana.resultprocessor/src/test/resources/nt/noqtest.nt b/src/test/resources/nt/noqtest.nt similarity index 100% rename from iguana.resultprocessor/src/test/resources/nt/noqtest.nt rename to src/test/resources/nt/noqtest.nt diff --git a/iguana.resultprocessor/src/test/resources/nt/nt_results_wMeta.nt b/src/test/resources/nt/nt_results_wMeta.nt similarity index 100% rename from iguana.resultprocessor/src/test/resources/nt/nt_results_wMeta.nt rename to src/test/resources/nt/nt_results_wMeta.nt diff --git a/iguana.resultprocessor/src/test/resources/nt/nt_results_woMeta.nt b/src/test/resources/nt/nt_results_woMeta.nt similarity index 100% rename from iguana.resultprocessor/src/test/resources/nt/nt_results_woMeta.nt rename to src/test/resources/nt/nt_results_woMeta.nt diff --git a/iguana.resultprocessor/src/test/resources/nt/penaltyavgqpstest.nt b/src/test/resources/nt/penaltyavgqpstest.nt similarity index 100% rename from iguana.resultprocessor/src/test/resources/nt/penaltyavgqpstest.nt rename to src/test/resources/nt/penaltyavgqpstest.nt diff --git a/iguana.resultprocessor/src/test/resources/nt/qmphtest.nt b/src/test/resources/nt/qmphtest.nt similarity index 100% rename from iguana.resultprocessor/src/test/resources/nt/qmphtest.nt rename to src/test/resources/nt/qmphtest.nt diff --git a/iguana.resultprocessor/src/test/resources/nt/qpspenaltytest.nt b/src/test/resources/nt/qpspenaltytest.nt similarity index 100% rename from iguana.resultprocessor/src/test/resources/nt/qpspenaltytest.nt rename to src/test/resources/nt/qpspenaltytest.nt diff --git a/iguana.resultprocessor/src/test/resources/nt/qpspenaltytest2.nt b/src/test/resources/nt/qpspenaltytest2.nt similarity index 100% rename from iguana.resultprocessor/src/test/resources/nt/qpspenaltytest2.nt rename to src/test/resources/nt/qpspenaltytest2.nt diff --git a/iguana.resultprocessor/src/test/resources/nt/qpstest.nt b/src/test/resources/nt/qpstest.nt similarity index 100% rename from iguana.resultprocessor/src/test/resources/nt/qpstest.nt rename to src/test/resources/nt/qpstest.nt diff --git a/iguana.resultprocessor/src/test/resources/nt/results_test1.nt b/src/test/resources/nt/results_test1.nt similarity index 100% rename from iguana.resultprocessor/src/test/resources/nt/results_test1.nt rename to src/test/resources/nt/results_test1.nt diff --git a/src/test/resources/query/pattern-query.txt b/src/test/resources/query/pattern-query.txt new file mode 100644 index 000000000..612618757 --- /dev/null +++ b/src/test/resources/query/pattern-query.txt @@ -0,0 +1 @@ +SELECT ?book {?book %%var0%% ?o} \ No newline at end of file diff --git a/src/test/resources/query/source/queries.txt b/src/test/resources/query/source/queries.txt new file mode 100644 index 000000000..c62f4a847 --- /dev/null +++ b/src/test/resources/query/source/queries.txt @@ -0,0 +1,3 @@ +QUERY 1 {still query 1} +QUERY 2 {still query 2} +QUERY 3 {still query 3} \ No newline at end of file diff --git a/src/test/resources/query/source/query-folder/query1.txt b/src/test/resources/query/source/query-folder/query1.txt new file mode 100644 index 000000000..fdef9bb9d --- /dev/null +++ b/src/test/resources/query/source/query-folder/query1.txt @@ -0,0 +1,3 @@ +QUERY 1 { +still query 1 +} \ No newline at end of file diff --git a/src/test/resources/query/source/query-folder/query2.txt b/src/test/resources/query/source/query-folder/query2.txt new file mode 100644 index 000000000..976f82c51 --- /dev/null +++ b/src/test/resources/query/source/query-folder/query2.txt @@ -0,0 +1,3 @@ +QUERY 2 { +still query 2 +} \ No newline at end of file diff --git a/src/test/resources/query/source/query-folder/query3.txt b/src/test/resources/query/source/query-folder/query3.txt new file mode 100644 index 000000000..e34d54dad --- /dev/null +++ b/src/test/resources/query/source/query-folder/query3.txt @@ -0,0 +1,3 @@ +QUERY 3 { +still query 3 +} \ No newline at end of file diff --git a/src/test/resources/query/source/separated-queries-default.txt b/src/test/resources/query/source/separated-queries-default.txt new file mode 100644 index 000000000..33f467c47 --- /dev/null +++ b/src/test/resources/query/source/separated-queries-default.txt @@ -0,0 +1,7 @@ +QUERY 1 { +still query 1 +}###QUERY 2 { +still query 2 +}###QUERY 3 { +still query 3 +} \ No newline at end of file diff --git a/src/test/resources/query/source/separated-queries-space.txt b/src/test/resources/query/source/separated-queries-space.txt new file mode 100644 index 000000000..9b948028b --- /dev/null +++ b/src/test/resources/query/source/separated-queries-space.txt @@ -0,0 +1,11 @@ +QUERY 1 { +still query 1 +} + +QUERY 2 { +still query 2 +} + +QUERY 3 { +still query 3 +} \ No newline at end of file diff --git a/src/test/resources/querystats.nt b/src/test/resources/querystats.nt new file mode 100644 index 000000000..df176f427 --- /dev/null +++ b/src/test/resources/querystats.nt @@ -0,0 +1,13 @@ + "false"^^. + "true"^^ . + "true"^^ . + "false"^^. + "2"^^. + "false"^^. + "false"^^. + "false"^^. + "false"^^. + . + "0"^^. + "SELECT *\nWHERE\n { ?s ?p ?o .\n ?o ?q ?t\n FILTER ( ?t = \"abc\" )\n }\nGROUP BY ?s\n" . + . \ No newline at end of file diff --git a/src/test/resources/readLineTestFile1.txt b/src/test/resources/readLineTestFile1.txt new file mode 100644 index 000000000..ec0512b87 --- /dev/null +++ b/src/test/resources/readLineTestFile1.txt @@ -0,0 +1,40001 @@ + +line 1 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +line 2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +line 3 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +line 4 + + + + + + + diff --git a/src/test/resources/readLineTestFile2.txt b/src/test/resources/readLineTestFile2.txt new file mode 100644 index 000000000..34cbb661e --- /dev/null +++ b/src/test/resources/readLineTestFile2.txt @@ -0,0 +1 @@ + line 1 line 2 line 3 line 4 \ No newline at end of file diff --git a/src/test/resources/readLineTestFile3.txt b/src/test/resources/readLineTestFile3.txt new file mode 100644 index 000000000..a72a501c8 --- /dev/null +++ b/src/test/resources/readLineTestFile3.txt @@ -0,0 +1,20000 @@ + + + + + + + + + + +line 1 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +line 2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +line 3 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +line 4 + diff --git a/iguana.corecontroller/src/test/resources/sparql-json-response.json b/src/test/resources/sparql-json-response.json similarity index 100% rename from iguana.corecontroller/src/test/resources/sparql-json-response.json rename to src/test/resources/sparql-json-response.json diff --git a/src/test/resources/suite-configs/invalid/invalid-number.yaml b/src/test/resources/suite-configs/invalid/invalid-number.yaml new file mode 100644 index 000000000..992c62e90 --- /dev/null +++ b/src/test/resources/suite-configs/invalid/invalid-number.yaml @@ -0,0 +1,82 @@ +datasets: + - name: "sp2b" + file: "testFile" + +connections: + - name: "fuseki" + version: "v2" + endpoint: "http://localhost:3030/sp2b" + updateEndpoint: "http://localhost:3030/sp2b/update" + authentication: + user: "test" + password: "test" + updateAuthentication: + user: "test" + password: "test" + dataset: "sp2b" + - name: "fuseki2" + endpoint: "http://localhost:3030/sp2b" + dataset: "sp2b" + +tasks: + - type: stresstest + warmupWorkers: + - type: "SPARQLProtocolWorker" + queries: + path: "./" + completionTarget: + duration: "2S" + connection: "fuseki" + timeout: "2S" + workers: + - number: 1 + requestType: "post query" + type: "SPARQLProtocolWorker" + queries: + path: "./" + format: "folder" + separator: " " + caching: true + order: "random" + seed: 1 + lang: "SPARQL" + timeout: "2S" + connection: "fuseki" + completionTarget: + duration: "2S" + parseResults: true + acceptHeader: "application/sparql-results+json" + - number: 1 + requestType: post query + type: "SPARQLProtocolWorker" + queries: + path: "./example/suite/queries/" + format: "folder" + order: "random" + timeout: 2S + connection: fuseki + completionTarget: + duration: 5S + parseResults: true + acceptHeader: "application/sparql-results+json" + +storages: + - type: "csv file" + directory: "result/" + - type: rdf file + path: "result/result.nt" + - type: "triplestore" + endpoint: "http://localhost:3030/ds/sparql" + user: "test" + password: "123" + baseUri: "http://example.org/" + +responseBodyProcessors: + - contentType: "application/sparql-results+json" + threads: -1 # invalid number + +metrics: + - type: "PQPS" + penalty: 1000 + - type: "QPS" + - type: "QMPH" diff --git a/src/test/resources/suite-configs/invalid/wrong-task.yaml b/src/test/resources/suite-configs/invalid/wrong-task.yaml new file mode 100644 index 000000000..84cfb7762 --- /dev/null +++ b/src/test/resources/suite-configs/invalid/wrong-task.yaml @@ -0,0 +1,82 @@ +datasets: + - name: "sp2b" + file: "testFile" + +connections: + - name: "fuseki" + version: "v2" + endpoint: "http://localhost:3030/sp2b" + updateEndpoint: "http://localhost:3030/sp2b/update" + authentication: + user: "test" + password: "test" + updateAuthentication: + user: "test" + password: "test" + dataset: "sp2b" + - name: "fuseki2" + endpoint: "http://localhost:3030/sp2b" + dataset: "sp2b" + +tasks: + - type: test # invalid type + warmupWorkers: + - type: "SPARQLProtocolWorker" + queries: + path: "./" + completionTarget: + duration: "2S" + connection: "fuseki" + timeout: "2S" + workers: + - number: 1 + requestType: "post query" + type: "SPARQLProtocolWorker" + queries: + path: "./" + format: "folder" + separator: " " + caching: true + order: "random" + seed: 1 + lang: "SPARQL" + timeout: "2S" + connection: "fuseki" + completionTarget: + duration: "2S" + parseResults: true + acceptHeader: "application/sparql-results+json" + - number: 1 + requestType: post query + type: "SPARQLProtocolWorker" + queries: + path: "./example/suite/queries/" + format: "folder" + order: "random" + timeout: 2S + connection: fuseki + completionTarget: + duration: 5S + parseResults: true + acceptHeader: "application/sparql-results+json" + +storages: + - type: "csv file" + directory: "result/" + - type: rdf file + path: "result/result.nt" + - type: "triplestore" + endpoint: "http://localhost:3030/ds/sparql" + user: "test" + password: "123" + baseUri: "http://example.org/" + +responseBodyProcessors: + - contentType: "application/sparql-results+json" + threads: 2 + +metrics: + - type: "PQPS" + penalty: 1000 + - type: "QPS" + - type: "QMPH" diff --git a/src/test/resources/suite-configs/valid/config-full.yaml b/src/test/resources/suite-configs/valid/config-full.yaml new file mode 100644 index 000000000..5b4c5fb21 --- /dev/null +++ b/src/test/resources/suite-configs/valid/config-full.yaml @@ -0,0 +1,82 @@ +datasets: + - name: "sp2b" + file: "testFile" + +connections: + - name: "fuseki" + version: "v2" + endpoint: "http://localhost:3030/sp2b" + updateEndpoint: "http://localhost:3030/sp2b/update" + authentication: + user: "test" + password: "test" + updateAuthentication: + user: "test" + password: "test" + dataset: "sp2b" + - name: "fuseki2" + endpoint: "http://localhost:3030/sp2b" + dataset: "sp2b" + +tasks: + - type: stresstest + warmupWorkers: + - type: "SPARQLProtocolWorker" + queries: + path: "./" + completionTarget: + duration: "2S" + connection: "fuseki" + timeout: "2S" + workers: + - number: 1 + requestType: "post query" + type: "SPARQLProtocolWorker" + queries: + path: "./" + format: "folder" + separator: " " + caching: true + order: "random" + seed: 1 + lang: "SPARQL" + timeout: "2S" + connection: "fuseki" + completionTarget: + duration: "2S" + parseResults: true + acceptHeader: "application/sparql-results+json" + - number: 1 + requestType: post query + type: "SPARQLProtocolWorker" + queries: + path: "./example/suite/queries/" + format: "folder" + order: "random" + timeout: 2S + connection: fuseki + completionTarget: + duration: 5S + parseResults: true + acceptHeader: "application/sparql-results+json" + +storages: + - type: "csv file" + directory: "result/" + - type: rdf file + path: "result/result.nt" + - type: "triplestore" + endpoint: "http://localhost:3030/ds/sparql" + user: "test" + password: "123" + baseUri: "http://example.org/" + +responseBodyProcessors: + - contentType: "application/sparql-results+json" + threads: 2 + +metrics: + - type: "PQPS" + penalty: 1000 + - type: "QPS" + - type: "QMPH" diff --git a/iguana.corecontroller/src/test/resources/test-DatasetName.sh b/src/test/resources/test-DatasetName.sh old mode 100644 new mode 100755 similarity index 100% rename from iguana.corecontroller/src/test/resources/test-DatasetName.sh rename to src/test/resources/test-DatasetName.sh diff --git a/src/test/resources/test-data/csv-storage-test/suite-123/suite-summary.csv b/src/test/resources/test-data/csv-storage-test/suite-123/suite-summary.csv new file mode 100644 index 000000000..2549f7ed2 --- /dev/null +++ b/src/test/resources/test-data/csv-storage-test/suite-123/suite-summary.csv @@ -0,0 +1,3 @@ +"taskID","startDate","endDate","noOfWorkers","AvgQPS","NoQ","NoQPH","PAvgQPS","QMPH" +"http://iguana-benchmark.eu/resource/123/0","2023-10-21T20:49:24.399Z","2023-10-21T20:50:42.399Z","4","0.5","40","7200","0.75","720" +"http://iguana-benchmark.eu/resource/123/1","2023-10-21T20:52:00.399Z","2023-10-21T20:53:18.399Z","4","0.5","20","7200","0.75","1440" diff --git a/src/test/resources/test-data/csv-storage-test/suite-123/task-0/each-execution-worker-0.csv b/src/test/resources/test-data/csv-storage-test/suite-123/task-0/each-execution-worker-0.csv new file mode 100644 index 000000000..add02a800 --- /dev/null +++ b/src/test/resources/test-data/csv-storage-test/suite-123/task-0/each-execution-worker-0.csv @@ -0,0 +1,31 @@ +queryID,run,success,startTime,time,resultSize,code,httpCode,exception,responseBodyHash +6,1,true,2023-10-21T20:48:25.399Z,PT2S,1000,0,200,,123 +6,3,false,2023-10-21T20:48:27.399Z,PT5S,-1,1,200,java.lang.Exception: io_exception,456 +6,2,false,2023-10-21T20:48:26.399Z,PT0.5S,-1,111,404,java.lang.Exception: httperror, +3,3,false,2023-10-21T20:48:18.399Z,PT5S,-1,1,200,java.lang.Exception: io_exception,456 +3,1,true,2023-10-21T20:48:16.399Z,PT2S,1000,0,200,,123 +3,2,false,2023-10-21T20:48:17.399Z,PT0.5S,-1,111,404,java.lang.Exception: httperror, +4,3,false,2023-10-21T20:48:21.399Z,PT5S,-1,1,200,java.lang.Exception: io_exception,456 +4,1,true,2023-10-21T20:48:19.399Z,PT2S,1000,0,200,,123 +4,2,false,2023-10-21T20:48:20.399Z,PT0.5S,-1,111,404,java.lang.Exception: httperror, +9,3,false,2023-10-21T20:48:36.399Z,PT5S,-1,1,200,java.lang.Exception: io_exception,456 +9,2,false,2023-10-21T20:48:35.399Z,PT0.5S,-1,111,404,java.lang.Exception: httperror, +9,1,true,2023-10-21T20:48:34.399Z,PT2S,1000,0,200,,123 +7,3,false,2023-10-21T20:48:30.399Z,PT5S,-1,1,200,java.lang.Exception: io_exception,456 +7,2,false,2023-10-21T20:48:29.399Z,PT0.5S,-1,111,404,java.lang.Exception: httperror, +7,1,true,2023-10-21T20:48:28.399Z,PT2S,1000,0,200,,123 +1,3,false,2023-10-21T20:48:12.399Z,PT5S,-1,1,200,java.lang.Exception: io_exception,456 +1,2,false,2023-10-21T20:48:11.399Z,PT0.5S,-1,111,404,java.lang.Exception: httperror, +1,1,true,2023-10-21T20:48:10.399Z,PT2S,1000,0,200,,123 +8,1,true,2023-10-21T20:48:31.399Z,PT2S,1000,0,200,,123 +8,2,false,2023-10-21T20:48:32.399Z,PT0.5S,-1,111,404,java.lang.Exception: httperror, +8,3,false,2023-10-21T20:48:33.399Z,PT5S,-1,1,200,java.lang.Exception: io_exception,456 +2,1,true,2023-10-21T20:48:13.399Z,PT2S,1000,0,200,,123 +2,3,false,2023-10-21T20:48:15.399Z,PT5S,-1,1,200,java.lang.Exception: io_exception,456 +2,2,false,2023-10-21T20:48:14.399Z,PT0.5S,-1,111,404,java.lang.Exception: httperror, +0,3,false,2023-10-21T20:48:09.399Z,PT5S,-1,1,200,java.lang.Exception: io_exception,456 +0,2,false,2023-10-21T20:48:08.399Z,PT0.5S,-1,111,404,java.lang.Exception: httperror, +0,1,true,2023-10-21T20:48:07.399Z,PT2S,1000,0,200,,123 +5,1,true,2023-10-21T20:48:22.399Z,PT2S,1000,0,200,,123 +5,2,false,2023-10-21T20:48:23.399Z,PT0.5S,-1,111,404,java.lang.Exception: httperror, +5,3,false,2023-10-21T20:48:24.399Z,PT5S,-1,1,200,java.lang.Exception: io_exception,456 diff --git a/src/test/resources/test-data/csv-storage-test/suite-123/task-0/each-execution-worker-1.csv b/src/test/resources/test-data/csv-storage-test/suite-123/task-0/each-execution-worker-1.csv new file mode 100644 index 000000000..1cf280155 --- /dev/null +++ b/src/test/resources/test-data/csv-storage-test/suite-123/task-0/each-execution-worker-1.csv @@ -0,0 +1,31 @@ +queryID,run,success,startTime,time,resultSize,code,httpCode,exception,responseBodyHash +6,1,true,2023-10-21T20:48:55.399Z,PT2S,1000,0,200,,123 +6,2,false,2023-10-21T20:48:56.399Z,PT0.5S,-1,111,404,java.lang.Exception: httperror, +6,3,false,2023-10-21T20:48:57.399Z,PT5S,-1,1,200,java.lang.Exception: io_exception,456 +3,1,true,2023-10-21T20:48:46.399Z,PT2S,1000,0,200,,123 +3,3,false,2023-10-21T20:48:48.399Z,PT5S,-1,1,200,java.lang.Exception: io_exception,456 +3,2,false,2023-10-21T20:48:47.399Z,PT0.5S,-1,111,404,java.lang.Exception: httperror, +4,3,false,2023-10-21T20:48:51.399Z,PT5S,-1,1,200,java.lang.Exception: io_exception,456 +4,2,false,2023-10-21T20:48:50.399Z,PT0.5S,-1,111,404,java.lang.Exception: httperror, +4,1,true,2023-10-21T20:48:49.399Z,PT2S,1000,0,200,,123 +0,2,false,2023-10-21T20:48:38.399Z,PT0.5S,-1,111,404,java.lang.Exception: httperror, +0,1,true,2023-10-21T20:48:37.399Z,PT2S,1000,0,200,,123 +0,3,false,2023-10-21T20:48:39.399Z,PT5S,-1,1,200,java.lang.Exception: io_exception,456 +7,3,false,2023-10-21T20:49:00.399Z,PT5S,-1,1,200,java.lang.Exception: io_exception,456 +7,1,true,2023-10-21T20:48:58.399Z,PT2S,1000,0,200,,123 +7,2,false,2023-10-21T20:48:59.399Z,PT0.5S,-1,111,404,java.lang.Exception: httperror, +8,3,false,2023-10-21T20:49:03.399Z,PT5S,-1,1,200,java.lang.Exception: io_exception,456 +8,1,true,2023-10-21T20:49:01.399Z,PT2S,1000,0,200,,123 +8,2,false,2023-10-21T20:49:02.399Z,PT0.5S,-1,111,404,java.lang.Exception: httperror, +5,1,true,2023-10-21T20:48:52.399Z,PT2S,1000,0,200,,123 +5,3,false,2023-10-21T20:48:54.399Z,PT5S,-1,1,200,java.lang.Exception: io_exception,456 +5,2,false,2023-10-21T20:48:53.399Z,PT0.5S,-1,111,404,java.lang.Exception: httperror, +1,2,false,2023-10-21T20:48:41.399Z,PT0.5S,-1,111,404,java.lang.Exception: httperror, +1,3,false,2023-10-21T20:48:42.399Z,PT5S,-1,1,200,java.lang.Exception: io_exception,456 +1,1,true,2023-10-21T20:48:40.399Z,PT2S,1000,0,200,,123 +9,2,false,2023-10-21T20:49:05.399Z,PT0.5S,-1,111,404,java.lang.Exception: httperror, +9,3,false,2023-10-21T20:49:06.399Z,PT5S,-1,1,200,java.lang.Exception: io_exception,456 +9,1,true,2023-10-21T20:49:04.399Z,PT2S,1000,0,200,,123 +2,1,true,2023-10-21T20:48:43.399Z,PT2S,1000,0,200,,123 +2,3,false,2023-10-21T20:48:45.399Z,PT5S,-1,1,200,java.lang.Exception: io_exception,456 +2,2,false,2023-10-21T20:48:44.399Z,PT0.5S,-1,111,404,java.lang.Exception: httperror, diff --git a/src/test/resources/test-data/csv-storage-test/suite-123/task-0/each-execution-worker-2.csv b/src/test/resources/test-data/csv-storage-test/suite-123/task-0/each-execution-worker-2.csv new file mode 100644 index 000000000..7cdace8c0 --- /dev/null +++ b/src/test/resources/test-data/csv-storage-test/suite-123/task-0/each-execution-worker-2.csv @@ -0,0 +1,31 @@ +queryID,run,success,startTime,time,resultSize,code,httpCode,exception,responseBodyHash +9,1,true,2023-10-21T20:48:34.399Z,PT2S,1000,0,200,,123 +9,2,false,2023-10-21T20:48:35.399Z,PT0.5S,-1,111,404,java.lang.Exception: httperror, +9,3,false,2023-10-21T20:48:36.399Z,PT5S,-1,1,200,java.lang.Exception: io_exception,456 +6,1,true,2023-10-21T20:48:25.399Z,PT2S,1000,0,200,,123 +6,3,false,2023-10-21T20:48:27.399Z,PT5S,-1,1,200,java.lang.Exception: io_exception,456 +6,2,false,2023-10-21T20:48:26.399Z,PT0.5S,-1,111,404,java.lang.Exception: httperror, +8,2,false,2023-10-21T20:48:32.399Z,PT0.5S,-1,111,404,java.lang.Exception: httperror, +8,1,true,2023-10-21T20:48:31.399Z,PT2S,1000,0,200,,123 +8,3,false,2023-10-21T20:48:33.399Z,PT5S,-1,1,200,java.lang.Exception: io_exception,456 +1,3,false,2023-10-21T20:48:12.399Z,PT5S,-1,1,200,java.lang.Exception: io_exception,456 +1,1,true,2023-10-21T20:48:10.399Z,PT2S,1000,0,200,,123 +1,2,false,2023-10-21T20:48:11.399Z,PT0.5S,-1,111,404,java.lang.Exception: httperror, +0,3,false,2023-10-21T20:48:09.399Z,PT5S,-1,1,200,java.lang.Exception: io_exception,456 +0,1,true,2023-10-21T20:48:07.399Z,PT2S,1000,0,200,,123 +0,2,false,2023-10-21T20:48:08.399Z,PT0.5S,-1,111,404,java.lang.Exception: httperror, +3,2,false,2023-10-21T20:48:17.399Z,PT0.5S,-1,111,404,java.lang.Exception: httperror, +3,3,false,2023-10-21T20:48:18.399Z,PT5S,-1,1,200,java.lang.Exception: io_exception,456 +3,1,true,2023-10-21T20:48:16.399Z,PT2S,1000,0,200,,123 +5,1,true,2023-10-21T20:48:22.399Z,PT2S,1000,0,200,,123 +5,2,false,2023-10-21T20:48:23.399Z,PT0.5S,-1,111,404,java.lang.Exception: httperror, +5,3,false,2023-10-21T20:48:24.399Z,PT5S,-1,1,200,java.lang.Exception: io_exception,456 +2,3,false,2023-10-21T20:48:15.399Z,PT5S,-1,1,200,java.lang.Exception: io_exception,456 +2,1,true,2023-10-21T20:48:13.399Z,PT2S,1000,0,200,,123 +2,2,false,2023-10-21T20:48:14.399Z,PT0.5S,-1,111,404,java.lang.Exception: httperror, +4,3,false,2023-10-21T20:48:21.399Z,PT5S,-1,1,200,java.lang.Exception: io_exception,456 +4,2,false,2023-10-21T20:48:20.399Z,PT0.5S,-1,111,404,java.lang.Exception: httperror, +4,1,true,2023-10-21T20:48:19.399Z,PT2S,1000,0,200,,123 +7,1,true,2023-10-21T20:48:28.399Z,PT2S,1000,0,200,,123 +7,3,false,2023-10-21T20:48:30.399Z,PT5S,-1,1,200,java.lang.Exception: io_exception,456 +7,2,false,2023-10-21T20:48:29.399Z,PT0.5S,-1,111,404,java.lang.Exception: httperror, diff --git a/src/test/resources/test-data/csv-storage-test/suite-123/task-0/each-execution-worker-3.csv b/src/test/resources/test-data/csv-storage-test/suite-123/task-0/each-execution-worker-3.csv new file mode 100644 index 000000000..2d4f3b2fd --- /dev/null +++ b/src/test/resources/test-data/csv-storage-test/suite-123/task-0/each-execution-worker-3.csv @@ -0,0 +1,31 @@ +queryID,run,success,startTime,time,resultSize,code,httpCode,exception,responseBodyHash +0,3,false,2023-10-21T20:48:39.399Z,PT5S,-1,1,200,java.lang.Exception: io_exception,456 +0,1,true,2023-10-21T20:48:37.399Z,PT2S,1000,0,200,,123 +0,2,false,2023-10-21T20:48:38.399Z,PT0.5S,-1,111,404,java.lang.Exception: httperror, +9,1,true,2023-10-21T20:49:04.399Z,PT2S,1000,0,200,,123 +9,2,false,2023-10-21T20:49:05.399Z,PT0.5S,-1,111,404,java.lang.Exception: httperror, +9,3,false,2023-10-21T20:49:06.399Z,PT5S,-1,1,200,java.lang.Exception: io_exception,456 +2,2,false,2023-10-21T20:48:44.399Z,PT0.5S,-1,111,404,java.lang.Exception: httperror, +2,3,false,2023-10-21T20:48:45.399Z,PT5S,-1,1,200,java.lang.Exception: io_exception,456 +2,1,true,2023-10-21T20:48:43.399Z,PT2S,1000,0,200,,123 +8,3,false,2023-10-21T20:49:03.399Z,PT5S,-1,1,200,java.lang.Exception: io_exception,456 +8,2,false,2023-10-21T20:49:02.399Z,PT0.5S,-1,111,404,java.lang.Exception: httperror, +8,1,true,2023-10-21T20:49:01.399Z,PT2S,1000,0,200,,123 +4,3,false,2023-10-21T20:48:51.399Z,PT5S,-1,1,200,java.lang.Exception: io_exception,456 +4,1,true,2023-10-21T20:48:49.399Z,PT2S,1000,0,200,,123 +4,2,false,2023-10-21T20:48:50.399Z,PT0.5S,-1,111,404,java.lang.Exception: httperror, +1,3,false,2023-10-21T20:48:42.399Z,PT5S,-1,1,200,java.lang.Exception: io_exception,456 +1,1,true,2023-10-21T20:48:40.399Z,PT2S,1000,0,200,,123 +1,2,false,2023-10-21T20:48:41.399Z,PT0.5S,-1,111,404,java.lang.Exception: httperror, +7,3,false,2023-10-21T20:49:00.399Z,PT5S,-1,1,200,java.lang.Exception: io_exception,456 +7,2,false,2023-10-21T20:48:59.399Z,PT0.5S,-1,111,404,java.lang.Exception: httperror, +7,1,true,2023-10-21T20:48:58.399Z,PT2S,1000,0,200,,123 +3,3,false,2023-10-21T20:48:48.399Z,PT5S,-1,1,200,java.lang.Exception: io_exception,456 +3,2,false,2023-10-21T20:48:47.399Z,PT0.5S,-1,111,404,java.lang.Exception: httperror, +3,1,true,2023-10-21T20:48:46.399Z,PT2S,1000,0,200,,123 +6,2,false,2023-10-21T20:48:56.399Z,PT0.5S,-1,111,404,java.lang.Exception: httperror, +6,1,true,2023-10-21T20:48:55.399Z,PT2S,1000,0,200,,123 +6,3,false,2023-10-21T20:48:57.399Z,PT5S,-1,1,200,java.lang.Exception: io_exception,456 +5,1,true,2023-10-21T20:48:52.399Z,PT2S,1000,0,200,,123 +5,3,false,2023-10-21T20:48:54.399Z,PT5S,-1,1,200,java.lang.Exception: io_exception,456 +5,2,false,2023-10-21T20:48:53.399Z,PT0.5S,-1,111,404,java.lang.Exception: httperror, diff --git a/src/test/resources/test-data/csv-storage-test/suite-123/task-0/query-summary-task.csv b/src/test/resources/test-data/csv-storage-test/suite-123/task-0/query-summary-task.csv new file mode 100644 index 000000000..083ac3beb --- /dev/null +++ b/src/test/resources/test-data/csv-storage-test/suite-123/task-0/query-summary-task.csv @@ -0,0 +1,21 @@ +queryID,succeeded,failed,totalTime,resultSize,wrongCodes,timeOuts,unknownException,PQPS,QPS +MockQueryHandler1:1,2,4,PT15S,1000,2,0,2,0.75,0.5 +MockQueryHandler0:7,2,4,PT15S,1000,2,0,2,0.75,0.5 +MockQueryHandler1:8,2,4,PT15S,1000,2,0,2,0.75,0.5 +MockQueryHandler0:8,2,4,PT15S,1000,2,0,2,0.75,0.5 +MockQueryHandler0:5,2,4,PT15S,1000,2,0,2,0.75,0.5 +MockQueryHandler1:5,2,4,PT15S,1000,2,0,2,0.75,0.5 +MockQueryHandler1:2,2,4,PT15S,1000,2,0,2,0.75,0.5 +MockQueryHandler0:2,2,4,PT15S,1000,2,0,2,0.75,0.5 +MockQueryHandler1:3,2,4,PT15S,1000,2,0,2,0.75,0.5 +MockQueryHandler1:0,2,4,PT15S,1000,2,0,2,0.75,0.5 +MockQueryHandler0:9,2,4,PT15S,1000,2,0,2,0.75,0.5 +MockQueryHandler0:0,2,4,PT15S,1000,2,0,2,0.75,0.5 +MockQueryHandler1:9,2,4,PT15S,1000,2,0,2,0.75,0.5 +MockQueryHandler1:6,2,4,PT15S,1000,2,0,2,0.75,0.5 +MockQueryHandler0:6,2,4,PT15S,1000,2,0,2,0.75,0.5 +MockQueryHandler0:3,2,4,PT15S,1000,2,0,2,0.75,0.5 +MockQueryHandler1:7,2,4,PT15S,1000,2,0,2,0.75,0.5 +MockQueryHandler1:4,2,4,PT15S,1000,2,0,2,0.75,0.5 +MockQueryHandler0:4,2,4,PT15S,1000,2,0,2,0.75,0.5 +MockQueryHandler0:1,2,4,PT15S,1000,2,0,2,0.75,0.5 diff --git a/src/test/resources/test-data/csv-storage-test/suite-123/task-0/query-summary-worker-0.csv b/src/test/resources/test-data/csv-storage-test/suite-123/task-0/query-summary-worker-0.csv new file mode 100644 index 000000000..af664d816 --- /dev/null +++ b/src/test/resources/test-data/csv-storage-test/suite-123/task-0/query-summary-worker-0.csv @@ -0,0 +1,11 @@ +queryID,succeeded,failed,totalTime,resultSize,wrongCodes,timeOuts,unknownException,PQPS,QPS +6,1,2,PT7.5S,1000,1,0,1,0.75,0.5 +3,1,2,PT7.5S,1000,1,0,1,0.75,0.5 +4,1,2,PT7.5S,1000,1,0,1,0.75,0.5 +9,1,2,PT7.5S,1000,1,0,1,0.75,0.5 +7,1,2,PT7.5S,1000,1,0,1,0.75,0.5 +1,1,2,PT7.5S,1000,1,0,1,0.75,0.5 +8,1,2,PT7.5S,1000,1,0,1,0.75,0.5 +2,1,2,PT7.5S,1000,1,0,1,0.75,0.5 +0,1,2,PT7.5S,1000,1,0,1,0.75,0.5 +5,1,2,PT7.5S,1000,1,0,1,0.75,0.5 diff --git a/src/test/resources/test-data/csv-storage-test/suite-123/task-0/query-summary-worker-1.csv b/src/test/resources/test-data/csv-storage-test/suite-123/task-0/query-summary-worker-1.csv new file mode 100644 index 000000000..0e5b47c06 --- /dev/null +++ b/src/test/resources/test-data/csv-storage-test/suite-123/task-0/query-summary-worker-1.csv @@ -0,0 +1,11 @@ +queryID,succeeded,failed,totalTime,resultSize,wrongCodes,timeOuts,unknownException,PQPS,QPS +6,1,2,PT7.5S,1000,1,0,1,0.75,0.5 +3,1,2,PT7.5S,1000,1,0,1,0.75,0.5 +4,1,2,PT7.5S,1000,1,0,1,0.75,0.5 +0,1,2,PT7.5S,1000,1,0,1,0.75,0.5 +7,1,2,PT7.5S,1000,1,0,1,0.75,0.5 +8,1,2,PT7.5S,1000,1,0,1,0.75,0.5 +5,1,2,PT7.5S,1000,1,0,1,0.75,0.5 +1,1,2,PT7.5S,1000,1,0,1,0.75,0.5 +9,1,2,PT7.5S,1000,1,0,1,0.75,0.5 +2,1,2,PT7.5S,1000,1,0,1,0.75,0.5 diff --git a/src/test/resources/test-data/csv-storage-test/suite-123/task-0/query-summary-worker-2.csv b/src/test/resources/test-data/csv-storage-test/suite-123/task-0/query-summary-worker-2.csv new file mode 100644 index 000000000..209f63b7b --- /dev/null +++ b/src/test/resources/test-data/csv-storage-test/suite-123/task-0/query-summary-worker-2.csv @@ -0,0 +1,11 @@ +queryID,succeeded,failed,totalTime,resultSize,wrongCodes,timeOuts,unknownException,PQPS,QPS +9,1,2,PT7.5S,1000,1,0,1,0.75,0.5 +6,1,2,PT7.5S,1000,1,0,1,0.75,0.5 +8,1,2,PT7.5S,1000,1,0,1,0.75,0.5 +1,1,2,PT7.5S,1000,1,0,1,0.75,0.5 +0,1,2,PT7.5S,1000,1,0,1,0.75,0.5 +3,1,2,PT7.5S,1000,1,0,1,0.75,0.5 +5,1,2,PT7.5S,1000,1,0,1,0.75,0.5 +2,1,2,PT7.5S,1000,1,0,1,0.75,0.5 +4,1,2,PT7.5S,1000,1,0,1,0.75,0.5 +7,1,2,PT7.5S,1000,1,0,1,0.75,0.5 diff --git a/src/test/resources/test-data/csv-storage-test/suite-123/task-0/query-summary-worker-3.csv b/src/test/resources/test-data/csv-storage-test/suite-123/task-0/query-summary-worker-3.csv new file mode 100644 index 000000000..7ae9ee643 --- /dev/null +++ b/src/test/resources/test-data/csv-storage-test/suite-123/task-0/query-summary-worker-3.csv @@ -0,0 +1,11 @@ +queryID,succeeded,failed,totalTime,resultSize,wrongCodes,timeOuts,unknownException,PQPS,QPS +0,1,2,PT7.5S,1000,1,0,1,0.75,0.5 +9,1,2,PT7.5S,1000,1,0,1,0.75,0.5 +2,1,2,PT7.5S,1000,1,0,1,0.75,0.5 +8,1,2,PT7.5S,1000,1,0,1,0.75,0.5 +4,1,2,PT7.5S,1000,1,0,1,0.75,0.5 +1,1,2,PT7.5S,1000,1,0,1,0.75,0.5 +7,1,2,PT7.5S,1000,1,0,1,0.75,0.5 +3,1,2,PT7.5S,1000,1,0,1,0.75,0.5 +6,1,2,PT7.5S,1000,1,0,1,0.75,0.5 +5,1,2,PT7.5S,1000,1,0,1,0.75,0.5 diff --git a/src/test/resources/test-data/csv-storage-test/suite-123/task-0/worker-summary.csv b/src/test/resources/test-data/csv-storage-test/suite-123/task-0/worker-summary.csv new file mode 100644 index 000000000..308fec7ae --- /dev/null +++ b/src/test/resources/test-data/csv-storage-test/suite-123/task-0/worker-summary.csv @@ -0,0 +1,5 @@ +workerID,workerType,noOfQueries,timeOut,startDate,endDate,AvgQPS,NoQ,NoQPH,PAvgQPS,QMPH +0,MockupWorker,10,PT2S,2023-10-11T14:14:10Z,2023-10-12T15:15:15Z,0.5,10,1800,0.75,180 +2,MockupWorker,10,PT2S,2023-10-11T14:14:10Z,2023-10-12T15:15:15Z,0.5,10,1800,0.75,180 +3,MockupWorker,10,PT2S,2023-10-11T14:14:10Z,2023-10-12T15:15:15Z,0.5,10,1800,0.75,180 +1,MockupWorker,10,PT2S,2023-10-11T14:14:10Z,2023-10-12T15:15:15Z,0.5,10,1800,0.75,180 diff --git a/src/test/resources/test-data/csv-storage-test/suite-123/task-1/each-execution-worker-0.csv b/src/test/resources/test-data/csv-storage-test/suite-123/task-1/each-execution-worker-0.csv new file mode 100644 index 000000000..ba71cef6e --- /dev/null +++ b/src/test/resources/test-data/csv-storage-test/suite-123/task-1/each-execution-worker-0.csv @@ -0,0 +1,16 @@ +queryID,run,success,startTime,time,resultSize,code,httpCode,exception,responseBodyHash +1,1,true,2023-10-21T20:48:10.399Z,PT2S,1000,0,200,,123 +1,2,false,2023-10-21T20:48:11.399Z,PT0.5S,-1,111,404,java.lang.Exception: httperror, +1,3,false,2023-10-21T20:48:12.399Z,PT5S,-1,1,200,java.lang.Exception: io_exception,456 +2,3,false,2023-10-21T20:48:15.399Z,PT5S,-1,1,200,java.lang.Exception: io_exception,456 +2,1,true,2023-10-21T20:48:13.399Z,PT2S,1000,0,200,,123 +2,2,false,2023-10-21T20:48:14.399Z,PT0.5S,-1,111,404,java.lang.Exception: httperror, +4,3,false,2023-10-21T20:48:21.399Z,PT5S,-1,1,200,java.lang.Exception: io_exception,456 +4,2,false,2023-10-21T20:48:20.399Z,PT0.5S,-1,111,404,java.lang.Exception: httperror, +4,1,true,2023-10-21T20:48:19.399Z,PT2S,1000,0,200,,123 +3,2,false,2023-10-21T20:48:17.399Z,PT0.5S,-1,111,404,java.lang.Exception: httperror, +3,3,false,2023-10-21T20:48:18.399Z,PT5S,-1,1,200,java.lang.Exception: io_exception,456 +3,1,true,2023-10-21T20:48:16.399Z,PT2S,1000,0,200,,123 +0,1,true,2023-10-21T20:48:07.399Z,PT2S,1000,0,200,,123 +0,2,false,2023-10-21T20:48:08.399Z,PT0.5S,-1,111,404,java.lang.Exception: httperror, +0,3,false,2023-10-21T20:48:09.399Z,PT5S,-1,1,200,java.lang.Exception: io_exception,456 diff --git a/src/test/resources/test-data/csv-storage-test/suite-123/task-1/each-execution-worker-1.csv b/src/test/resources/test-data/csv-storage-test/suite-123/task-1/each-execution-worker-1.csv new file mode 100644 index 000000000..076bd5e33 --- /dev/null +++ b/src/test/resources/test-data/csv-storage-test/suite-123/task-1/each-execution-worker-1.csv @@ -0,0 +1,16 @@ +queryID,run,success,startTime,time,resultSize,code,httpCode,exception,responseBodyHash +2,1,true,2023-10-21T20:48:28.399Z,PT2S,1000,0,200,,123 +2,2,false,2023-10-21T20:48:29.399Z,PT0.5S,-1,111,404,java.lang.Exception: httperror, +2,3,false,2023-10-21T20:48:30.399Z,PT5S,-1,1,200,java.lang.Exception: io_exception,456 +4,1,true,2023-10-21T20:48:34.399Z,PT2S,1000,0,200,,123 +4,2,false,2023-10-21T20:48:35.399Z,PT0.5S,-1,111,404,java.lang.Exception: httperror, +4,3,false,2023-10-21T20:48:36.399Z,PT5S,-1,1,200,java.lang.Exception: io_exception,456 +1,2,false,2023-10-21T20:48:26.399Z,PT0.5S,-1,111,404,java.lang.Exception: httperror, +1,3,false,2023-10-21T20:48:27.399Z,PT5S,-1,1,200,java.lang.Exception: io_exception,456 +1,1,true,2023-10-21T20:48:25.399Z,PT2S,1000,0,200,,123 +3,3,false,2023-10-21T20:48:33.399Z,PT5S,-1,1,200,java.lang.Exception: io_exception,456 +3,2,false,2023-10-21T20:48:32.399Z,PT0.5S,-1,111,404,java.lang.Exception: httperror, +3,1,true,2023-10-21T20:48:31.399Z,PT2S,1000,0,200,,123 +0,3,false,2023-10-21T20:48:24.399Z,PT5S,-1,1,200,java.lang.Exception: io_exception,456 +0,1,true,2023-10-21T20:48:22.399Z,PT2S,1000,0,200,,123 +0,2,false,2023-10-21T20:48:23.399Z,PT0.5S,-1,111,404,java.lang.Exception: httperror, diff --git a/src/test/resources/test-data/csv-storage-test/suite-123/task-1/each-execution-worker-2.csv b/src/test/resources/test-data/csv-storage-test/suite-123/task-1/each-execution-worker-2.csv new file mode 100644 index 000000000..56c1a4914 --- /dev/null +++ b/src/test/resources/test-data/csv-storage-test/suite-123/task-1/each-execution-worker-2.csv @@ -0,0 +1,16 @@ +queryID,run,success,startTime,time,resultSize,code,httpCode,exception,responseBodyHash +1,3,false,2023-10-21T20:48:12.399Z,PT5S,-1,1,200,java.lang.Exception: io_exception,456 +1,1,true,2023-10-21T20:48:10.399Z,PT2S,1000,0,200,,123 +1,2,false,2023-10-21T20:48:11.399Z,PT0.5S,-1,111,404,java.lang.Exception: httperror, +4,2,false,2023-10-21T20:48:20.399Z,PT0.5S,-1,111,404,java.lang.Exception: httperror, +4,3,false,2023-10-21T20:48:21.399Z,PT5S,-1,1,200,java.lang.Exception: io_exception,456 +4,1,true,2023-10-21T20:48:19.399Z,PT2S,1000,0,200,,123 +3,1,true,2023-10-21T20:48:16.399Z,PT2S,1000,0,200,,123 +3,2,false,2023-10-21T20:48:17.399Z,PT0.5S,-1,111,404,java.lang.Exception: httperror, +3,3,false,2023-10-21T20:48:18.399Z,PT5S,-1,1,200,java.lang.Exception: io_exception,456 +0,1,true,2023-10-21T20:48:07.399Z,PT2S,1000,0,200,,123 +0,3,false,2023-10-21T20:48:09.399Z,PT5S,-1,1,200,java.lang.Exception: io_exception,456 +0,2,false,2023-10-21T20:48:08.399Z,PT0.5S,-1,111,404,java.lang.Exception: httperror, +2,1,true,2023-10-21T20:48:13.399Z,PT2S,1000,0,200,,123 +2,2,false,2023-10-21T20:48:14.399Z,PT0.5S,-1,111,404,java.lang.Exception: httperror, +2,3,false,2023-10-21T20:48:15.399Z,PT5S,-1,1,200,java.lang.Exception: io_exception,456 diff --git a/src/test/resources/test-data/csv-storage-test/suite-123/task-1/each-execution-worker-3.csv b/src/test/resources/test-data/csv-storage-test/suite-123/task-1/each-execution-worker-3.csv new file mode 100644 index 000000000..91cd36184 --- /dev/null +++ b/src/test/resources/test-data/csv-storage-test/suite-123/task-1/each-execution-worker-3.csv @@ -0,0 +1,16 @@ +queryID,run,success,startTime,time,resultSize,code,httpCode,exception,responseBodyHash +0,3,false,2023-10-21T20:48:24.399Z,PT5S,-1,1,200,java.lang.Exception: io_exception,456 +0,2,false,2023-10-21T20:48:23.399Z,PT0.5S,-1,111,404,java.lang.Exception: httperror, +0,1,true,2023-10-21T20:48:22.399Z,PT2S,1000,0,200,,123 +3,3,false,2023-10-21T20:48:33.399Z,PT5S,-1,1,200,java.lang.Exception: io_exception,456 +3,2,false,2023-10-21T20:48:32.399Z,PT0.5S,-1,111,404,java.lang.Exception: httperror, +3,1,true,2023-10-21T20:48:31.399Z,PT2S,1000,0,200,,123 +2,3,false,2023-10-21T20:48:30.399Z,PT5S,-1,1,200,java.lang.Exception: io_exception,456 +2,2,false,2023-10-21T20:48:29.399Z,PT0.5S,-1,111,404,java.lang.Exception: httperror, +2,1,true,2023-10-21T20:48:28.399Z,PT2S,1000,0,200,,123 +4,1,true,2023-10-21T20:48:34.399Z,PT2S,1000,0,200,,123 +4,2,false,2023-10-21T20:48:35.399Z,PT0.5S,-1,111,404,java.lang.Exception: httperror, +4,3,false,2023-10-21T20:48:36.399Z,PT5S,-1,1,200,java.lang.Exception: io_exception,456 +1,1,true,2023-10-21T20:48:25.399Z,PT2S,1000,0,200,,123 +1,3,false,2023-10-21T20:48:27.399Z,PT5S,-1,1,200,java.lang.Exception: io_exception,456 +1,2,false,2023-10-21T20:48:26.399Z,PT0.5S,-1,111,404,java.lang.Exception: httperror, diff --git a/src/test/resources/test-data/csv-storage-test/suite-123/task-1/query-summary-task.csv b/src/test/resources/test-data/csv-storage-test/suite-123/task-1/query-summary-task.csv new file mode 100644 index 000000000..660e2654d --- /dev/null +++ b/src/test/resources/test-data/csv-storage-test/suite-123/task-1/query-summary-task.csv @@ -0,0 +1,11 @@ +queryID,succeeded,failed,totalTime,resultSize,wrongCodes,timeOuts,unknownException,PQPS,QPS +MockQueryHandler3:3,2,4,PT15S,1000,2,0,2,0.75,0.5 +MockQueryHandler2:3,2,4,PT15S,1000,2,0,2,0.75,0.5 +MockQueryHandler3:4,2,4,PT15S,1000,2,0,2,0.75,0.5 +MockQueryHandler3:1,2,4,PT15S,1000,2,0,2,0.75,0.5 +MockQueryHandler2:1,2,4,PT15S,1000,2,0,2,0.75,0.5 +MockQueryHandler2:4,2,4,PT15S,1000,2,0,2,0.75,0.5 +MockQueryHandler2:0,2,4,PT15S,1000,2,0,2,0.75,0.5 +MockQueryHandler2:2,2,4,PT15S,1000,2,0,2,0.75,0.5 +MockQueryHandler3:2,2,4,PT15S,1000,2,0,2,0.75,0.5 +MockQueryHandler3:0,2,4,PT15S,1000,2,0,2,0.75,0.5 diff --git a/src/test/resources/test-data/csv-storage-test/suite-123/task-1/query-summary-worker-0.csv b/src/test/resources/test-data/csv-storage-test/suite-123/task-1/query-summary-worker-0.csv new file mode 100644 index 000000000..2e657e3a9 --- /dev/null +++ b/src/test/resources/test-data/csv-storage-test/suite-123/task-1/query-summary-worker-0.csv @@ -0,0 +1,6 @@ +queryID,succeeded,failed,totalTime,resultSize,wrongCodes,timeOuts,unknownException,PQPS,QPS +1,1,2,PT7.5S,1000,1,0,1,0.75,0.5 +2,1,2,PT7.5S,1000,1,0,1,0.75,0.5 +4,1,2,PT7.5S,1000,1,0,1,0.75,0.5 +3,1,2,PT7.5S,1000,1,0,1,0.75,0.5 +0,1,2,PT7.5S,1000,1,0,1,0.75,0.5 diff --git a/src/test/resources/test-data/csv-storage-test/suite-123/task-1/query-summary-worker-1.csv b/src/test/resources/test-data/csv-storage-test/suite-123/task-1/query-summary-worker-1.csv new file mode 100644 index 000000000..e55d38a18 --- /dev/null +++ b/src/test/resources/test-data/csv-storage-test/suite-123/task-1/query-summary-worker-1.csv @@ -0,0 +1,6 @@ +queryID,succeeded,failed,totalTime,resultSize,wrongCodes,timeOuts,unknownException,PQPS,QPS +2,1,2,PT7.5S,1000,1,0,1,0.75,0.5 +4,1,2,PT7.5S,1000,1,0,1,0.75,0.5 +1,1,2,PT7.5S,1000,1,0,1,0.75,0.5 +3,1,2,PT7.5S,1000,1,0,1,0.75,0.5 +0,1,2,PT7.5S,1000,1,0,1,0.75,0.5 diff --git a/src/test/resources/test-data/csv-storage-test/suite-123/task-1/query-summary-worker-2.csv b/src/test/resources/test-data/csv-storage-test/suite-123/task-1/query-summary-worker-2.csv new file mode 100644 index 000000000..e244ab700 --- /dev/null +++ b/src/test/resources/test-data/csv-storage-test/suite-123/task-1/query-summary-worker-2.csv @@ -0,0 +1,6 @@ +queryID,succeeded,failed,totalTime,resultSize,wrongCodes,timeOuts,unknownException,PQPS,QPS +1,1,2,PT7.5S,1000,1,0,1,0.75,0.5 +4,1,2,PT7.5S,1000,1,0,1,0.75,0.5 +3,1,2,PT7.5S,1000,1,0,1,0.75,0.5 +0,1,2,PT7.5S,1000,1,0,1,0.75,0.5 +2,1,2,PT7.5S,1000,1,0,1,0.75,0.5 diff --git a/src/test/resources/test-data/csv-storage-test/suite-123/task-1/query-summary-worker-3.csv b/src/test/resources/test-data/csv-storage-test/suite-123/task-1/query-summary-worker-3.csv new file mode 100644 index 000000000..fd294d37c --- /dev/null +++ b/src/test/resources/test-data/csv-storage-test/suite-123/task-1/query-summary-worker-3.csv @@ -0,0 +1,6 @@ +queryID,succeeded,failed,totalTime,resultSize,wrongCodes,timeOuts,unknownException,PQPS,QPS +0,1,2,PT7.5S,1000,1,0,1,0.75,0.5 +3,1,2,PT7.5S,1000,1,0,1,0.75,0.5 +2,1,2,PT7.5S,1000,1,0,1,0.75,0.5 +4,1,2,PT7.5S,1000,1,0,1,0.75,0.5 +1,1,2,PT7.5S,1000,1,0,1,0.75,0.5 diff --git a/src/test/resources/test-data/csv-storage-test/suite-123/task-1/worker-summary.csv b/src/test/resources/test-data/csv-storage-test/suite-123/task-1/worker-summary.csv new file mode 100644 index 000000000..3d862b39a --- /dev/null +++ b/src/test/resources/test-data/csv-storage-test/suite-123/task-1/worker-summary.csv @@ -0,0 +1,5 @@ +workerID,workerType,noOfQueries,timeOut,startDate,endDate,AvgQPS,NoQ,NoQPH,PAvgQPS,QMPH +0,MockupWorker,5,PT2S,2023-10-11T14:14:10Z,2023-10-12T15:15:15Z,0.5,5,1800,0.75,360 +2,MockupWorker,5,PT2S,2023-10-11T14:14:10Z,2023-10-12T15:15:15Z,0.5,5,1800,0.75,360 +3,MockupWorker,5,PT2S,2023-10-11T14:14:10Z,2023-10-12T15:15:15Z,0.5,5,1800,0.75,360 +1,MockupWorker,5,PT2S,2023-10-11T14:14:10Z,2023-10-12T15:15:15Z,0.5,5,1800,0.75,360 diff --git a/src/test/resources/test-data/csv-storage-test/suite-123/task-configuration.csv b/src/test/resources/test-data/csv-storage-test/suite-123/task-configuration.csv new file mode 100644 index 000000000..36af1071b --- /dev/null +++ b/src/test/resources/test-data/csv-storage-test/suite-123/task-configuration.csv @@ -0,0 +1,5 @@ +"taskID","connection","version","dataset" +"http://iguana-benchmark.eu/resource/123/0","test-connection-1","v1.0.0","http://iguana-benchmark.eu/resource/test-dataset-1" +"http://iguana-benchmark.eu/resource/123/0","test-connection-2","v1.1.0","http://iguana-benchmark.eu/resource/test-dataset-2" +"http://iguana-benchmark.eu/resource/123/1","test-connection-4","v1.3.0","http://iguana-benchmark.eu/resource/test-dataset-4" +"http://iguana-benchmark.eu/resource/123/1","test-connection-3","v1.2.0","http://iguana-benchmark.eu/resource/test-dataset-3" diff --git a/iguana.corecontroller/src/test/resources/updates/empty.nt b/src/test/resources/updates/empty.nt similarity index 100% rename from iguana.corecontroller/src/test/resources/updates/empty.nt rename to src/test/resources/updates/empty.nt diff --git a/iguana.corecontroller/src/test/resources/updates/test1.nt b/src/test/resources/updates/test1.nt similarity index 100% rename from iguana.corecontroller/src/test/resources/updates/test1.nt rename to src/test/resources/updates/test1.nt diff --git a/src/test/resources/utils/indexingtestfile1.txt b/src/test/resources/utils/indexingtestfile1.txt new file mode 100644 index 000000000..6ee359ced --- /dev/null +++ b/src/test/resources/utils/indexingtestfile1.txt @@ -0,0 +1,7 @@ +line 1 +##### +##### +##### +##### + +line 2 diff --git a/src/test/resources/utils/indexingtestfile2.txt b/src/test/resources/utils/indexingtestfile2.txt new file mode 100644 index 000000000..062104e86 --- /dev/null +++ b/src/test/resources/utils/indexingtestfile2.txt @@ -0,0 +1,5 @@ +##### +line 0 +##### +line 1 +##### \ No newline at end of file diff --git a/src/test/resources/utils/indexingtestfile3.txt b/src/test/resources/utils/indexingtestfile3.txt new file mode 100644 index 000000000..7e16533c2 --- /dev/null +++ b/src/test/resources/utils/indexingtestfile3.txt @@ -0,0 +1,9 @@ + line 1 +line 2 + +line 3 + + + +line 4 +line 5 \ No newline at end of file diff --git a/src/test/resources/utils/indexingtestfile4.txt b/src/test/resources/utils/indexingtestfile4.txt new file mode 100644 index 000000000..1477ce7ae --- /dev/null +++ b/src/test/resources/utils/indexingtestfile4.txt @@ -0,0 +1 @@ +a####$b \ No newline at end of file diff --git a/src/test/resources/utils/indexingtestfile5.txt b/src/test/resources/utils/indexingtestfile5.txt new file mode 100644 index 000000000..2d9293513 --- /dev/null +++ b/src/test/resources/utils/indexingtestfile5.txt @@ -0,0 +1 @@ +a21211b \ No newline at end of file diff --git a/iguana.commons/src/test/resources/wait5.sh b/src/test/resources/wait5.sh similarity index 100% rename from iguana.commons/src/test/resources/wait5.sh rename to src/test/resources/wait5.sh diff --git a/iguana.corecontroller/src/test/resources/workers/single-query.txt b/src/test/resources/workers/single-query.txt similarity index 100% rename from iguana.corecontroller/src/test/resources/workers/single-query.txt rename to src/test/resources/workers/single-query.txt diff --git a/iguana.corecontroller/src/test/resources/workers/updates.txt b/src/test/resources/workers/updates.txt similarity index 100% rename from iguana.corecontroller/src/test/resources/workers/updates.txt rename to src/test/resources/workers/updates.txt diff --git a/iguana.corecontroller/src/test/resources/workers/updates/test1.nt b/src/test/resources/workers/updates/test1.nt similarity index 100% rename from iguana.corecontroller/src/test/resources/workers/updates/test1.nt rename to src/test/resources/workers/updates/test1.nt diff --git a/iguana.corecontroller/src/test/resources/workers/updates/test2.nt b/src/test/resources/workers/updates/test2.nt similarity index 100% rename from iguana.corecontroller/src/test/resources/workers/updates/test2.nt rename to src/test/resources/workers/updates/test2.nt diff --git a/iguana.corecontroller/src/test/resources/workers/updates/test3.nt b/src/test/resources/workers/updates/test3.nt similarity index 100% rename from iguana.corecontroller/src/test/resources/workers/updates/test3.nt rename to src/test/resources/workers/updates/test3.nt diff --git a/iguana.corecontroller/src/test/resources/workers/updates/test4.nt b/src/test/resources/workers/updates/test4.nt similarity index 100% rename from iguana.corecontroller/src/test/resources/workers/updates/test4.nt rename to src/test/resources/workers/updates/test4.nt