From 797359e6c91455775506d2aad4b55c368e723209 Mon Sep 17 00:00:00 2001 From: Grigory Pomadchin Date: Sun, 12 May 2024 22:55:21 -0400 Subject: [PATCH] Update build and deps --- .circleci/config.yml | 104 --- .github/dependabot.yml | 6 + .github/release-drafter.yml | 22 + .github/workflows/ci.yml | 61 ++ .github/workflows/release-drafter.yml | 22 + .gitignore | 94 +-- .scalafmt.conf | 14 +- README.md | 2 +- build.sbt | 97 +-- .../tile/LazyMultibandRasterOperations.scala | 142 ++-- .../main/scala/dsl/tile/LazyTileMethods.scala | 240 +++---- jvm/src/main/scala/dsl/tile/package.scala | 1 - jvm/src/main/scala/error/MamlException.scala | 2 - .../scala/eval/BufferingInterpreter.scala | 17 +- .../scala/eval/ConcurrentInterpreter.scala | 29 +- jvm/src/main/scala/eval/Interpreter.scala | 2 - .../main/scala/eval/NaiveInterpreter.scala | 14 +- .../main/scala/eval/ParallelInterpreter.scala | 38 +- jvm/src/main/scala/eval/Result.scala | 20 +- .../main/scala/eval/ScopedInterpreter.scala | 8 +- .../main/scala/eval/directive/Directive.scala | 2 - .../eval/directive/FocalDirectives.scala | 89 +-- .../scala/eval/directive/OpDirectives.scala | 623 +++++++++++------- .../eval/directive/ScopedDirective.scala | 12 +- .../eval/directive/SourceDirectives.scala | 6 +- .../eval/directive/UnaryDirectives.scala | 76 ++- jvm/src/main/scala/eval/package.scala | 1 - jvm/src/main/scala/eval/tile/Classify.scala | 1 - .../scala/eval/tile/LazyMultibandRaster.scala | 29 +- jvm/src/main/scala/eval/tile/LazyRaster.scala | 32 +- jvm/src/main/scala/eval/tile/Masking.scala | 11 +- .../main/scala/eval/tile/TileLayouts.scala | 9 +- .../scala/eval/tile/TileWithNeighbors.scala | 19 +- .../scala/util/NeighborhoodConversion.scala | 18 +- jvm/src/main/scala/util/Vars.scala | 6 +- .../scala/eval/ConcurrentEvaluationSpec.scala | 185 +++--- jvm/src/test/scala/eval/EvaluationSpec.scala | 167 +++-- .../scala/eval/MultibandSelectionSpec.scala | 23 +- .../scala/eval/ParallelEvaluationSpec.scala | 30 +- jvm/src/test/scala/eval/ResultSpec.scala | 55 +- .../scala/eval/ScopedEvaluationSpec.scala | 16 +- jvm/src/test/scala/eval/VariableSpec.scala | 30 +- jvm/version.sbt | 1 - project/Dependencies.scala | 34 +- project/build.properties | 2 +- project/plugins.sbt | 31 +- project/project/plugins.sbt | 1 - sbt | 578 ---------------- .../src/main/scala/ast/BinaryExpression.scala | 22 +- shared/src/main/scala/ast/Expression.scala | 107 ++- .../src/main/scala/ast/FocalExpression.scala | 2 - .../main/scala/ast/FoldableExpression.scala | 22 +- shared/src/main/scala/ast/MamlKind.scala | 11 +- shared/src/main/scala/ast/Source.scala | 2 - .../src/main/scala/ast/UnaryExpression.scala | 5 +- .../scala/ast/codec/MamlCodecInstances.scala | 113 ++-- .../scala/ast/codec/MamlUtilityCodecs.scala | 92 +-- .../ast/codec/tree/ExpressionTreeCodec.scala | 262 ++++---- shared/src/main/scala/dsl/Literals.scala | 1 - shared/src/main/scala/dsl/Operations.scala | 1 - shared/src/main/scala/dsl/package.scala | 1 - shared/src/main/scala/error/MamlError.scala | 14 +- shared/src/main/scala/error/package.scala | 1 - shared/src/main/scala/util/ClassMap.scala | 2 - shared/src/main/scala/util/ColorRamp.scala | 2 - shared/src/main/scala/util/Geometry.scala | 2 - shared/src/main/scala/util/Histogram.scala | 2 - shared/src/main/scala/util/Neighborhood.scala | 2 - shared/src/test/scala/ast/Generators.scala | 72 +- .../tree/MamlExpressionTreeCodecSpec.scala | 8 +- shared/src/test/scala/ast/kind/KindSpec.scala | 28 +- spark/build.sbt | 5 - .../src/main/scala/eval/RDDInterpreter.scala | 1 - spark/src/main/scala/eval/RDDResult.scala | 3 +- .../eval/directive/RDDOpDirectives.scala | 118 ++-- .../eval/directive/RDDSourceDirectives.scala | 15 +- .../src/test/scala/RDDOpDirectivesSpec.scala | 85 ++- 77 files changed, 1787 insertions(+), 2236 deletions(-) delete mode 100644 .circleci/config.yml create mode 100644 .github/dependabot.yml create mode 100644 .github/release-drafter.yml create mode 100644 .github/workflows/ci.yml create mode 100644 .github/workflows/release-drafter.yml delete mode 100644 jvm/version.sbt delete mode 100644 project/project/plugins.sbt delete mode 100755 sbt delete mode 100644 spark/build.sbt diff --git a/.circleci/config.yml b/.circleci/config.yml deleted file mode 100644 index 0e7254d0..00000000 --- a/.circleci/config.yml +++ /dev/null @@ -1,104 +0,0 @@ -aliases: - - &restore_sbt_cache - key: sbt-cache-{{ checksum "/tmp/scala_version" }} - - - &save_sbt_cache - key: sbt-cache-{{ checksum "/tmp/scala_version" }}-{{ epoch }} - paths: - - "~/.ivy2/cache" - - "~/.sbt" - - "~/.cache/coursier" - - - &run_cibuild - - checkout - - run: echo "${SCALA_VERSION}" > /tmp/scala_version - - restore_cache: *restore_sbt_cache - - run: - name: Executing cibuild - command: ./scripts/cibuild - - save_cache: *save_sbt_cache - - - &run_cipublish - - checkout - - run: echo "${SCALA_VERSION}" > /tmp/scala_version - - restore_cache: *restore_sbt_cache - - run: - name: "Import signing key" - command: | - gpg --keyserver keyserver.ubuntu.com \ - --recv-keys 0x13E9AA1D8153E95E && \ - echo "${GPG_KEY}" | base64 -d > signing_key.asc && \ - gpg --import signing_key.asc - - run: - name: Executing cipublish - command: ./scripts/cipublish - - # Build environments - - &openjdk8-scala2_11_12-nodelts_environment - docker: - - image: circleci/openjdk:8-stretch-node - environment: - SCALA_VERSION: 2.11.12 - - - &openjdk8-scala2_12_8-nodelts_environment - docker: - - image: circleci/openjdk:8-stretch-node - environment: - SCALA_VERSION: 2.12.8 - -version: 2 -workflows: - version: 2 - build: - jobs: - - "openjdk8-scala2.11.12-nodelts": - filters: # required since `openjdk8-scala2.11.12-nodelts_deploy` has tag filters AND requires `openjdk8-scala2.11.12-nodelts` - tags: - only: - - /^(.*)$/ - - "openjdk8-scala2.12.8-nodelts": - filters: # required since `openjdk8-scala2.12.8-nodelts_deploy` has tag filters AND requires `openjdk8-scala2.12.8-nodelts` - tags: - only: - - /^(.*)$/ - - "openjdk8-scala2.11.12-nodelts_deploy": - requires: - - "openjdk8-scala2.11.12-nodelts" - filters: - tags: - only: - - /^(.*)$/ - branches: - only: - - develop - - /release\/.*/ - - /hotfix\/.*/ - - "openjdk8-scala2.12.8-nodelts_deploy": - requires: - - "openjdk8-scala2.12.8-nodelts" - filters: - tags: - only: - - /^(.*)$/ - branches: - only: - - develop - - /release\/.*/ - - /hotfix\/.*/ - -jobs: - "openjdk8-scala2.11.12-nodelts": - <<: *openjdk8-scala2_11_12-nodelts_environment - steps: *run_cibuild - - "openjdk8-scala2.12.8-nodelts": - <<: *openjdk8-scala2_12_8-nodelts_environment - steps: *run_cibuild - - "openjdk8-scala2.11.12-nodelts_deploy": - <<: *openjdk8-scala2_11_12-nodelts_environment - steps: *run_cipublish - - "openjdk8-scala2.12.8-nodelts_deploy": - <<: *openjdk8-scala2_12_8-nodelts_environment - steps: *run_cipublish diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 00000000..5ace4600 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,6 @@ +version: 2 +updates: + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "weekly" diff --git a/.github/release-drafter.yml b/.github/release-drafter.yml new file mode 100644 index 00000000..4444d46c --- /dev/null +++ b/.github/release-drafter.yml @@ -0,0 +1,22 @@ +name-template: '$NEXT_MINOR_VERSION' +tag-template: 'v$NEXT_MINOR_VERSION' +categories: + - title: 'Added' + labels: + - 'feature' + - title: 'Changed' + labels: + - 'enhancement' + - 'dependency-update' + - title: 'Fixed' + labels: + - 'fix' + - 'bugfix' + - 'bug' +exclude-labels: + - 'skip-changelog' + - 'docs' + - 'build' +change-template: '- $TITLE [#$NUMBER](https://github.com/pomadchin/tagless-mid/pull/$NUMBER) (@$AUTHOR)' +template: | + $CHANGES diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 00000000..28968ecc --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,61 @@ +name: CI +on: + pull_request: + branches: ['**'] + push: + branches: ['**'] + tags: [v*] +jobs: + build: + name: Build and Test + if: github.event_name == 'push' || github.event.pull_request.head.repo.full_name != 'geotrellis/maml' + strategy: + matrix: + os: [ubuntu-latest] + java: [11, 17] + distribution: [temurin] + runs-on: ${{ matrix.os }} + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + - uses: coursier/cache-action@v6 + - uses: actions/setup-java@v4 + with: + distribution: ${{ matrix.distribution }} + java-version: ${{ matrix.java }} + + - name: Check formatting + run: sbt scalafmtCheckAll + + - name: Build project + run: sbt +test + + publish: + name: Publish Artifacts + needs: [build] + if: github.event_name != 'pull_request' && (github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/tags/v')) + strategy: + matrix: + os: [ubuntu-latest] + java: [11] + distribution: [temurin] + runs-on: ${{ matrix.os }} + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + - uses: coursier/cache-action@v6 + - uses: actions/setup-java@v4 + with: + distribution: ${{ matrix.distribution }} + java-version: ${{ matrix.java }} + + - name: Release + run: sbt ci-release + env: + PGP_PASSPHRASE: ${{ secrets.PGP_PASSPHRASE }} + PGP_SECRET: ${{ secrets.PGP_SECRET }} + SONATYPE_PASSWORD: ${{ secrets.SONATYPE_PASSWORD }} + SONATYPE_USERNAME: ${{ secrets.SONATYPE_USERNAME }} + if: ${{ env.SONATYPE_PASSWORD != '' && env.SONATYPE_USERNAME != '' }} diff --git a/.github/workflows/release-drafter.yml b/.github/workflows/release-drafter.yml new file mode 100644 index 00000000..e3badf0f --- /dev/null +++ b/.github/workflows/release-drafter.yml @@ -0,0 +1,22 @@ +name: Release Drafter + +on: + push: + branches: + - main + pull_request: + types: [opened, reopened, synchronize] + +permissions: + contents: read + +jobs: + update_release_draft: + permissions: + contents: write + pull-requests: write + runs-on: ubuntu-latest + steps: + - uses: release-drafter/release-drafter@v6 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.gitignore b/.gitignore index 2cc8ced3..3c35b949 100644 --- a/.gitignore +++ b/.gitignore @@ -1,53 +1,61 @@ -*.class -*.log +# Operating System Files + +*.DS_Store +Thumbs.db + +# Build Files -index.html -index.js -package.html -lib -site/ -docs/_build/ - -project/.boot -project/boot -project/plugins/project -project/plugins/target -project/target +bin target -.ensime -\#*# -*~ -.#* -.lib -*.aux.xml -*.jar -*.crc -_SUCCESS - -*.ivy -*.pyc -.project +build/ +.gradle +cmake-build-debug + +# Eclipse Project Files + .classpath -.cache +.project .settings -.history -.idea -.DS_Store + +# IntelliJ IDEA Files + *.iml -*.swp -*.swo -*.sublime-* -.vagrant +*.ipr +*.iws +*.idea + +# Spring Bootstrap artifacts + +dependency-reduced-pom.xml +README.html -lib -index.html -index.js -.ensime* +# Sublime files -nohup.out +*.sublime-workspace -site/ -.metadata/ +# VSCode files + +.vscode +.history + +# Metals .metals -.bloop \ No newline at end of file +.bloop +metals.sbt + +# SBT + +.bsp + +# Test data files # + +java/data + +# Compiled libs # + +java/*.dylib +java/*.so +java/*dll + +*.log diff --git a/.scalafmt.conf b/.scalafmt.conf index 311c2de2..8749f9de 100644 --- a/.scalafmt.conf +++ b/.scalafmt.conf @@ -1 +1,13 @@ -version=2.0.0-RC4 \ No newline at end of file +version=3.8.1 +runner.dialect = scala3 +align.openParenCallSite = true +align.openParenDefnSite = true +maxColumn = 150 +continuationIndent.defnSite = 2 +assumeStandardLibraryStripMargin = true +danglingParentheses.preset = true +rewrite.rules = [AvoidInfix, SortImports, RedundantParens, SortModifiers] +docstrings = JavaDoc +newlines.afterCurlyLambda = preserve +docstrings.style = Asterisk +docstrings.oneline = unfold diff --git a/README.md b/README.md index 9c6efc0b..58d2d0b5 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ # Map Algebra Modeling Language -[![CircleCI](https://circleci.com/gh/geotrellis/maml.svg?style=svg)](https://circleci.com/gh/geotrellis/maml) [![Join the chat at https://gitter.im/geotrellis/geotrellis](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/geotrellis/geotrellis?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) +[![CI](https://github.com/geotrellis/maml/actions/workflows/ci.yml/badge.svg)](https://github.com/geotrellis/maml/actions/workflows/ci.yml) [![Join the chat at https://gitter.im/geotrellis/geotrellis](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/geotrellis/geotrellis?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) Azavea has been developing Map Algebra Modeling Language (MAML) as part of a NASA grant in [Raster Foundry](https://www.rasterfoundry.com/). MAML is used to create a declarative structure that describes a combination of map algebra operations. This structure may be evaluated against a given collection of datasets to produce a result. Critically, the evaluation logic is not specified in MAML, only the semantic meaning of the operations. This separation allows for multiple interpreters to exist that operate in different computational contexts. diff --git a/build.sbt b/build.sbt index 8604a787..c89220e4 100644 --- a/build.sbt +++ b/build.sbt @@ -1,26 +1,13 @@ -import xerial.sbt.Sonatype._ - import Dependencies._ val commonSettings = Seq( - // We are overriding the default behavior of sbt-git which, by default, - // only appends the `-SNAPSHOT` suffix if there are uncommitted - // changes in the workspace. - version := { - // Avoid Cyclic reference involving error - if (git.gitCurrentTags.value.isEmpty || git.gitUncommittedChanges.value) - git.gitDescribedVersion.value.get + "-SNAPSHOT" - else - git.gitDescribedVersion.value.get - }, - scalaVersion := "2.11.12", - crossScalaVersions := Seq("2.11.12", "2.12.10"), - resolvers ++= Seq( - Resolver.sonatypeRepo("releases"), - "locationtech-releases" at "https://repo.locationtech.org/content/groups/releases", - "locationtech-snapshots" at "https://repo.locationtech.org/content/groups/snapshots" + scalaVersion := "2.12.19", + crossScalaVersions := Seq("2.12.19", "2.13.14"), + resolvers ++= Resolver.sonatypeOssRepos("releases") ++ Resolver.sonatypeOssRepos("snapshots") ++ Seq( + "locationtech-releases".at("https://repo.locationtech.org/content/groups/releases"), + "locationtech-snapshots".at("https://repo.locationtech.org/content/groups/snapshots") ), - addCompilerPlugin("org.scalamacros" % "paradise" % "2.1.0" cross CrossVersion.full), + addCompilerPlugin("org.scalamacros" % "paradise" % "2.1.1" cross CrossVersion.full), scalacOptions := Seq( "-deprecation", "-unchecked", @@ -32,7 +19,8 @@ val commonSettings = Seq( "-language:existentials", "-language:experimental.macros", "-feature", - "-Ypatmat-exhaust-depth", "100" + "-Ypatmat-exhaust-depth", + "100" ) ) @@ -47,48 +35,31 @@ lazy val publishSettings = Seq( organizationName := "GeoTrellis", organizationHomepage := Some(new URL("https://geotrellis.io/")), description := "MAML is used to create a declarative structure that describes a combination of map algebra operations.", - publishArtifact in Test := false -) ++ sonatypeSettings ++ credentialSettings + Test / publishArtifact := false +) ++ sonatypeSettings lazy val sonatypeSettings = Seq( publishMavenStyle := true, - sonatypeProfileName := "com.azavea", - sonatypeProjectHosting := Some(GitHubHosting(user="geotrellis", repository="maml", email="systems@azavea.com")), developers := List( - Developer(id = "moradology", name = "Nathan Zimmerman", email = "nzimmerman@azavea.com", url = url("https://github.com/moradology")), - Developer(id = "echeipesh", name = "Eugene Cheipesh", email = "echeipesh@azavea.com", url = url("https://github.com/echeipesh")), - Developer(id = "lossyrob", name = "Rob Emanuele", email = "remanuele@azavea.com", url = url("https://github.com/lossyrob")) + Developer("moradology", "Nathan Zimmerman", "nzimmerman@azavea.com", url("https://github.com/moradology")), + Developer("echeipesh", "Eugene Cheipesh", "echeipesh@azavea.com", url("https://github.com/echeipesh")), + Developer("lossyrob", "Rob Emanuele", "remanuele@azavea.com", url("https://github.com/lossyrob")) ), licenses := Seq("Apache-2.0" -> url("http://www.apache.org/licenses/LICENSE-2.0.txt")), - publishTo := sonatypePublishTo.value ) -lazy val credentialSettings = Seq( - credentials += Credentials( - "GnuPG Key ID", - "gpg", - System.getenv().get("GPG_KEY_ID"), - "ignored" - ), - - credentials += Credentials( - "Sonatype Nexus Repository Manager", - "oss.sonatype.org", - System.getenv().get("SONATYPE_USERNAME"), - System.getenv().get("SONATYPE_PASSWORD") - ) -) - -lazy val root = project.in(file(".")) +lazy val root = project + .in(file(".")) .settings(commonSettings) .settings(publishSettings) // these settings are needed to release all aggregated modules under this root module .settings(noPublishSettings) // this is to exclue the root module itself from being published .aggregate(mamlJs, mamlJvm, mamlSpark) .enablePlugins(ScalaJSPlugin) -lazy val maml = crossProject.in(file(".")) +lazy val maml = crossProject(JSPlatform, JVMPlatform) + .in(file(".")) .settings(commonSettings) .settings(publishSettings) .settings( @@ -104,41 +75,37 @@ lazy val maml = crossProject.in(file(".")) circe("parser").value, circe("optics").value ) - ).jvmSettings( + ) + .jvmSettings( name := "maml-jvm", libraryDependencies ++= Seq( geotrellis("raster").value, geotrellis("layer").value, geotrellis("proj4").value ) - ).jsSettings( + ) + .jsSettings( name := "maml-js", libraryDependencies += geotrellis("raster").value -) + ) lazy val mamlJvm = maml.jvm lazy val mamlJs = maml.js -lazy val mamlSpark = project.in(file("spark")) +lazy val mamlSpark = project + .in(file("spark")) + .dependsOn(mamlJvm) .settings(commonSettings) .settings(publishSettings) + .settings(name := "maml-spark") .settings( libraryDependencies ++= Seq( spark("sql").value % Test, spark("core").value % Provided, geotrellis("spark-testkit").value % Test, geotrellis("spark").value % Provided - ), - /** https://github.com/lucidworks/spark-solr/issues/179 */ - dependencyOverrides ++= { - val deps = Seq( - "com.fasterxml.jackson.core" % "jackson-core" % "2.6.7", - "com.fasterxml.jackson.core" % "jackson-databind" % "2.6.7", - "com.fasterxml.jackson.core" % "jackson-annotations" % "2.6.7" - ) - CrossVersion.partialVersion(scalaVersion.value) match { - // if Scala 2.12+ is used - case Some((2, scalaMajor)) if scalaMajor >= 12 => deps - case _ => deps :+ "com.fasterxml.jackson.module" %% "jackson-module-scala" % "2.6.7" - } - } - ).dependsOn(mamlJvm) + ) + ) + .settings( + Test / fork := false, + Test / parallelExecution := false + ) diff --git a/jvm/src/main/scala/dsl/tile/LazyMultibandRasterOperations.scala b/jvm/src/main/scala/dsl/tile/LazyMultibandRasterOperations.scala index 5f1968d7..8225c301 100644 --- a/jvm/src/main/scala/dsl/tile/LazyMultibandRasterOperations.scala +++ b/jvm/src/main/scala/dsl/tile/LazyMultibandRasterOperations.scala @@ -6,11 +6,12 @@ import geotrellis.raster._ import geotrellis.raster.render.BreakMap import geotrellis.raster.mapalgebra.local._ - trait LazyMultibandRasterOperations { val self: LazyMultibandRaster - /** Arithmetic Operations*/ + /** + * Arithmetic Operations + */ def +(other: LazyMultibandRaster): LazyMultibandRaster = self.dualCombine(other, Add.combine, Add.combine) def +(other: Int): LazyMultibandRaster = @@ -126,29 +127,28 @@ trait LazyMultibandRasterOperations { { Pow.combine(other, _) } ) - def logE: LazyMultibandRaster = self.dualMap( - { z: Int => if(isNoData(z)) z else d2i(math.log(i2d(z))) }, - { z => if(isNoData(z)) z else math.log(z) } + { z: Int => if (isNoData(z)) z else d2i(math.log(i2d(z))) }, + { z => if (isNoData(z)) z else math.log(z) } ) def log10: LazyMultibandRaster = self.dualMap( - { z: Int => if(isNoData(z)) z else d2i(math.log10(i2d(z))) }, - { z => if(isNoData(z)) z else math.log10(z) } + { z: Int => if (isNoData(z)) z else d2i(math.log10(i2d(z))) }, + { z => if (isNoData(z)) z else math.log10(z) } ) def sqrt: LazyMultibandRaster = self.dualMap( - { z: Int => if(isNoData(z)) z else d2i(math.sqrt(i2d(z))) }, - { z => if(isNoData(z)) z else math.sqrt(z) } + { z: Int => if (isNoData(z)) z else d2i(math.sqrt(i2d(z))) }, + { z => if (isNoData(z)) z else math.sqrt(z) } ) def abs: LazyMultibandRaster = self.dualMap( - { z: Int => if(isNoData(z)) z else math.abs(z) }, - { z => if(isNoData(z)) z else math.abs(z) } + { z: Int => if (isNoData(z)) z else math.abs(z) }, + { z => if (isNoData(z)) z else math.abs(z) } ) def isDefined: LazyMultibandRaster = @@ -178,14 +178,16 @@ trait LazyMultibandRasterOperations { def changeSign: LazyMultibandRaster = self.dualMap( { z: Int => if (isNoData(z)) z else z * -1 }, - { z => if (isNoData(z)) z else z * - 1 } + { z => if (isNoData(z)) z else z * -1 } ) - /** Numeric Comparisons */ + /** + * Numeric Comparisons + */ def <(other: LazyMultibandRaster): LazyMultibandRaster = self.dualCombine(other, - { (i1: Int, i2: Int) => if (Less.compare(i1, i2)) 1 else 0 }, - { (d1: Double, d2: Double) => if (Less.compare(d1, d2)) 1.0 else 0.0 } + { (i1: Int, i2: Int) => if (Less.compare(i1, i2)) 1 else 0 }, + { (d1: Double, d2: Double) => if (Less.compare(d1, d2)) 1.0 else 0.0 } ) def <(other: Int): LazyMultibandRaster = self.dualMap( @@ -200,8 +202,8 @@ trait LazyMultibandRasterOperations { def <=(other: LazyMultibandRaster): LazyMultibandRaster = self.dualCombine(other, - { (i1: Int, i2: Int) => if (LessOrEqual.compare(i1, i2)) 1 else 0 }, - { (d1: Double, d2: Double) => if (LessOrEqual.compare(d1, d2)) 1.0 else 0.0 } + { (i1: Int, i2: Int) => if (LessOrEqual.compare(i1, i2)) 1 else 0 }, + { (d1: Double, d2: Double) => if (LessOrEqual.compare(d1, d2)) 1.0 else 0.0 } ) def <=(other: Int): LazyMultibandRaster = self.dualMap( @@ -216,8 +218,8 @@ trait LazyMultibandRasterOperations { def ===(other: LazyMultibandRaster): LazyMultibandRaster = self.dualCombine(other, - { (i1: Int, i2: Int) => if (Equal.compare(i1, i2)) 1 else 0 }, - { (d1: Double, d2: Double) => if (Equal.compare(d1, d2)) 1.0 else 0.0 } + { (i1: Int, i2: Int) => if (Equal.compare(i1, i2)) 1 else 0 }, + { (d1: Double, d2: Double) => if (Equal.compare(d1, d2)) 1.0 else 0.0 } ) def ===(other: Int): LazyMultibandRaster = self.dualMap( @@ -232,8 +234,8 @@ trait LazyMultibandRasterOperations { def !==(other: LazyMultibandRaster): LazyMultibandRaster = self.dualCombine(other, - { (i1: Int, i2: Int) => if (Unequal.compare(i1, i2)) 1 else 0 }, - { (d1: Double, d2: Double) => if (Unequal.compare(d1, d2)) 1.0 else 0.0 } + { (i1: Int, i2: Int) => if (Unequal.compare(i1, i2)) 1 else 0 }, + { (d1: Double, d2: Double) => if (Unequal.compare(d1, d2)) 1.0 else 0.0 } ) def !==(other: Int): LazyMultibandRaster = self.dualMap( @@ -248,8 +250,8 @@ trait LazyMultibandRasterOperations { def >=(other: LazyMultibandRaster): LazyMultibandRaster = self.dualCombine(other, - { (i1: Int, i2: Int) => if (GreaterOrEqual.compare(i1, i2)) 1 else 0 }, - { (d1: Double, d2: Double) => if (GreaterOrEqual.compare(d1, d2)) 1.0 else 0.0 } + { (i1: Int, i2: Int) => if (GreaterOrEqual.compare(i1, i2)) 1 else 0 }, + { (d1: Double, d2: Double) => if (GreaterOrEqual.compare(d1, d2)) 1.0 else 0.0 } ) def >=(other: Int): LazyMultibandRaster = self.dualMap( @@ -264,8 +266,8 @@ trait LazyMultibandRasterOperations { def >(other: LazyMultibandRaster): LazyMultibandRaster = self.dualCombine(other, - { (i1: Int, i2: Int) => if (Greater.compare(i1, i2)) 1 else 0 }, - { (d1: Double, d2: Double) => if (Greater.compare(d1, d2)) 1.0 else 0.0 } + { (i1: Int, i2: Int) => if (Greater.compare(i1, i2)) 1 else 0 }, + { (d1: Double, d2: Double) => if (Greater.compare(d1, d2)) 1.0 else 0.0 } ) def >(other: Int): LazyMultibandRaster = self.dualMap( @@ -278,60 +280,59 @@ trait LazyMultibandRasterOperations { { (d: Double) => if (Greater.compare(d, other)) 1.0 else 0.0 } ) - /** Trigonometric Operations */ + /** + * Trigonometric Operations + */ def sin: LazyMultibandRaster = self.dualMap( - { z: Int => if(isNoData(z)) z else d2i(math.sin(z)) }, - { z => if(isNoData(z)) z else math.sin(z) } - ) + { z: Int => if (isNoData(z)) z else d2i(math.sin(z)) }, + { z => if (isNoData(z)) z else math.sin(z) } + ) def cos: LazyMultibandRaster = self.dualMap( - { z: Int => if(isNoData(z)) z else d2i(math.cos(z)) }, - { z => if(isNoData(z)) z else math.cos(z) } - ) + { z: Int => if (isNoData(z)) z else d2i(math.cos(z)) }, + { z => if (isNoData(z)) z else math.cos(z) } + ) def tan: LazyMultibandRaster = self.dualMap( - { z: Int => if(isNoData(z)) z else d2i(math.tan(z)) }, - { z => if(isNoData(z)) z else math.tan(z) } - ) + { z: Int => if (isNoData(z)) z else d2i(math.tan(z)) }, + { z => if (isNoData(z)) z else math.tan(z) } + ) def sinh: LazyMultibandRaster = self.dualMap( - { z: Int => if(isNoData(z)) z else d2i(math.sinh(z)) }, - { z => if(isNoData(z)) z else math.sinh(z) } - ) + { z: Int => if (isNoData(z)) z else d2i(math.sinh(z)) }, + { z => if (isNoData(z)) z else math.sinh(z) } + ) def cosh: LazyMultibandRaster = self.dualMap( - { z: Int => if(isNoData(z)) z else d2i(math.cosh(z)) }, - { z => if(isNoData(z)) z else math.cosh(z) } - ) + { z: Int => if (isNoData(z)) z else d2i(math.cosh(z)) }, + { z => if (isNoData(z)) z else math.cosh(z) } + ) def tanh: LazyMultibandRaster = self.dualMap( - { z: Int => if(isNoData(z)) z else d2i(math.tanh(z)) }, - { z => if(isNoData(z)) z else math.tanh(z) } - ) + { z: Int => if (isNoData(z)) z else d2i(math.tanh(z)) }, + { z => if (isNoData(z)) z else math.tanh(z) } + ) def asin: LazyMultibandRaster = self.dualMap( - { z: Int => if(isNoData(z)) z else d2i(math.asin(z)) }, - { z => if(isNoData(z)) z else math.asin(z) } - ) + { z: Int => if (isNoData(z)) z else d2i(math.asin(z)) }, + { z => if (isNoData(z)) z else math.asin(z) } + ) def acos: LazyMultibandRaster = self.dualMap( - { z: Int => if(isNoData(z)) z else d2i(math.acos(z)) }, - { z => if(isNoData(z)) z else math.acos(z) } - ) + { z: Int => if (isNoData(z)) z else d2i(math.acos(z)) }, + { z => if (isNoData(z)) z else math.acos(z) } + ) def atan: LazyMultibandRaster = self.dualMap( - { z: Int => if(isNoData(z)) z else d2i(math.atan(z)) }, - { z => if(isNoData(z)) z else math.atan(z) } - ) + { z: Int => if (isNoData(z)) z else d2i(math.atan(z)) }, + { z => if (isNoData(z)) z else math.atan(z) } + ) def atan2(other: LazyMultibandRaster) = - self.dualCombine(other, - { (z1, z2) => d2i(math.atan2(i2d(z1), i2d(z2))) }, - { (z1, z2) => math.atan2(z1, z2) } - ) + self.dualCombine(other, { (z1, z2) => d2i(math.atan2(i2d(z1), i2d(z2))) }, { (z1, z2) => math.atan2(z1, z2) }) def atan2(other: Int): LazyMultibandRaster = self.dualMap( { i: Int => d2i(math.atan2(i, other)) }, @@ -343,26 +344,30 @@ trait LazyMultibandRasterOperations { { math.atan2(_, other) } ) - /** Rounding Operations */ + /** + * Rounding Operations + */ def round: LazyMultibandRaster = self.dualMap( identity, - { z => if(isNoData(z)) z else math.round(z) } + { z => if (isNoData(z)) z else math.round(z) } ) def floor: LazyMultibandRaster = self.dualMap( identity, - { z => if(isNoData(z)) z else math.floor(z) } + { z => if (isNoData(z)) z else math.floor(z) } ) def ceil: LazyMultibandRaster = self.dualMap( identity, - { z => if(isNoData(z)) z else math.ceil(z) } + { z => if (isNoData(z)) z else math.ceil(z) } ) - /** Logical Operations */ + /** + * Logical Operations + */ // TODO: Look into GT implementations for logical operations... // The handling of nodata vs 0 vs false is not obvious def &&(other: LazyMultibandRaster): LazyMultibandRaster = @@ -391,13 +396,13 @@ trait LazyMultibandRasterOperations { def ||(other: LazyMultibandRaster): LazyMultibandRaster = self.dualCombine(other, Or.combine, Or.combine) def ||(other: Int): LazyMultibandRaster = - self.dualMap( { Or.combine(_, other) }, { Or.combine(_, other) }) + self.dualMap({ Or.combine(_, other) }, { Or.combine(_, other) }) def ||:(other: Int): LazyMultibandRaster = - self.dualMap( { Or.combine(_, other) }, { Or.combine(_, other) }) + self.dualMap({ Or.combine(_, other) }, { Or.combine(_, other) }) def ||(other: Double): LazyMultibandRaster = - self.dualMap( { Or.combine(_, d2i(other)) }, { Or.combine(_, other) }) + self.dualMap({ Or.combine(_, d2i(other)) }, { Or.combine(_, other) }) def ||:(other: Double): LazyMultibandRaster = - self.dualMap( { Or.combine(_, d2i(other)) }, { Or.combine(_, other) }) + self.dualMap({ Or.combine(_, d2i(other)) }, { Or.combine(_, other) }) def xor(other: LazyMultibandRaster): LazyMultibandRaster = self.dualCombine(other, Xor.combine, Xor.combine) @@ -418,7 +423,9 @@ trait LazyMultibandRasterOperations { { z => if (isNoData(z)) z else if (z == 0.0) 1.0 else 0.0 } ) - /** Tile specific methods */ + /** + * Tile specific methods + */ def classify(breaks: BreakMap[Double, Int]) = self.dualMap( { i => breaks(i2d(i)) }, @@ -426,4 +433,3 @@ trait LazyMultibandRasterOperations { ) } - diff --git a/jvm/src/main/scala/dsl/tile/LazyTileMethods.scala b/jvm/src/main/scala/dsl/tile/LazyTileMethods.scala index 36cfa093..e0232592 100644 --- a/jvm/src/main/scala/dsl/tile/LazyTileMethods.scala +++ b/jvm/src/main/scala/dsl/tile/LazyTileMethods.scala @@ -6,11 +6,12 @@ import geotrellis.raster._ import geotrellis.raster.render.BreakMap import geotrellis.raster.mapalgebra.local._ - trait LazyRasterOperations { val self: LazyRaster - /** Arithmetic Operations*/ + /** + * Arithmetic Operations + */ def +(other: LazyRaster): LazyRaster = LazyRaster.DualCombine(List(self, other), Add.combine, Add.combine) def +(other: Int): LazyRaster = LazyRaster.DualMap(List(self), { Add.combine(_, other) }, { Add.combine(_, other) }) def +:(other: Int): LazyRaster = LazyRaster.DualMap(List(self), { Add.combine(other, _) }, { Add.combine(other, _) }) @@ -41,201 +42,168 @@ trait LazyRasterOperations { def **(other: Double): LazyRaster = LazyRaster.DualMap(List(self), { Pow.combine(_, d2i(other)) }, { Pow.combine(_, other) }) def **:(other: Double): LazyRaster = LazyRaster.DualMap(List(self), { Pow.combine(d2i(other), _) }, { Pow.combine(other, _) }) + def logE: LazyRaster = + LazyRaster.DualMap(List(self), { z: Int => if (isNoData(z)) z else d2i(math.log(i2d(z))) }, { z => if (isNoData(z)) z else math.log(z) }) + + def log10: LazyRaster = + LazyRaster.DualMap(List(self), { z: Int => if (isNoData(z)) z else d2i(math.log10(i2d(z))) }, { z => if (isNoData(z)) z else math.log10(z) }) + + def sqrt: LazyRaster = + LazyRaster.DualMap(List(self), { z: Int => if (isNoData(z)) z else d2i(math.sqrt(i2d(z))) }, { z => if (isNoData(z)) z else math.sqrt(z) }) + + def abs: LazyRaster = + LazyRaster.DualMap(List(self), { z: Int => if (isNoData(z)) z else math.abs(z) }, { z => if (isNoData(z)) z else math.abs(z) }) + + def isDefined: LazyRaster = LazyRaster.DualMap(List(self), { z: Int => if (isData(z)) 1 else 0 }, { z => if (isData(z)) 1.0 else 0.0 }) + + def isUndefined: LazyRaster = LazyRaster.DualMap(List(self), { z: Int => if (isNoData(z)) 1 else 0 }, { z => if (isNoData(z)) 1.0 else 0.0 }) - def logE: LazyRaster = LazyRaster.DualMap(List(self), - { z: Int => if(isNoData(z)) z else d2i(math.log(i2d(z))) }, - { z => if(isNoData(z)) z else math.log(z) } - ) - - def log10: LazyRaster = LazyRaster.DualMap(List(self), - { z: Int => if(isNoData(z)) z else d2i(math.log10(i2d(z))) }, - { z => if(isNoData(z)) z else math.log10(z) } - ) - - def sqrt: LazyRaster = LazyRaster.DualMap(List(self), - { z: Int => if(isNoData(z)) z else d2i(math.sqrt(i2d(z))) }, - { z => if(isNoData(z)) z else math.sqrt(z) } - ) - - def abs: LazyRaster = LazyRaster.DualMap(List(self), - { z: Int => if(isNoData(z)) z else math.abs(z) }, - { z => if(isNoData(z)) z else math.abs(z) } - ) - - def isDefined: LazyRaster = LazyRaster.DualMap(List(self), - { z: Int => if (isData(z)) 1 else 0 }, - { z => if (isData(z)) 1.0 else 0.0 } - ) - - def isUndefined: LazyRaster = LazyRaster.DualMap(List(self), - { z: Int => if (isNoData(z)) 1 else 0 }, - { z => if (isNoData(z)) 1.0 else 0.0 } - ) - - def pow(i: Int): LazyRaster = LazyRaster.DualMap(List(self), - { z: Int => if (isNoData(z)) 1 else 0 }, - { z => if (isNoData(z)) 1.0 else 0.0 } - ) - - def pow(d: Double): LazyRaster = LazyRaster.DualMap(List(self), - { z: Int => if (isNoData(z)) 1 else 0 }, - { z => if (isNoData(z)) 1.0 else 0.0 } - ) - - def changeSign: LazyRaster = LazyRaster.DualMap(List(self), - { z: Int => if (isNoData(z)) z else z * -1 }, - { z => if (isNoData(z)) z else z * - 1 } - ) - - /** Numeric Comparisons */ + def pow(i: Int): LazyRaster = LazyRaster.DualMap(List(self), { z: Int => if (isNoData(z)) 1 else 0 }, { z => if (isNoData(z)) 1.0 else 0.0 }) + + def pow(d: Double): LazyRaster = LazyRaster.DualMap(List(self), { z: Int => if (isNoData(z)) 1 else 0 }, { z => if (isNoData(z)) 1.0 else 0.0 }) + + def changeSign: LazyRaster = LazyRaster.DualMap(List(self), { z: Int => if (isNoData(z)) z else z * -1 }, { z => if (isNoData(z)) z else z * -1 }) + + /** + * Numeric Comparisons + */ def <(other: LazyRaster): LazyRaster = LazyRaster.DualCombine(List(self, other), - {(i1: Int, i2: Int) => if (Less.compare(i1, i2)) 1 else 0 }, - {(d1: Double, d2: Double) => if (Less.compare(d1, d2)) 1.0 else 0.0 } + { (i1: Int, i2: Int) => if (Less.compare(i1, i2)) 1 else 0 }, + { (d1: Double, d2: Double) => if (Less.compare(d1, d2)) 1.0 else 0.0 } ) def <(other: Int): LazyRaster = LazyRaster.DualMap(List(self), - {(i: Int) => if (Less.compare(i, other.toInt)) 1 else 0}, - {(d: Double) => if (Less.compare(d, other)) 1.0 else 0.0 } + { (i: Int) => if (Less.compare(i, other.toInt)) 1 else 0 }, + { (d: Double) => if (Less.compare(d, other)) 1.0 else 0.0 } ) def <(other: Double): LazyRaster = LazyRaster.DualMap(List(self), - {(i: Int) => if (Less.compare(i, other.toInt)) 1 else 0}, - {(d: Double) => if (Less.compare(d, other)) 1.0 else 0.0 } + { (i: Int) => if (Less.compare(i, other.toInt)) 1 else 0 }, + { (d: Double) => if (Less.compare(d, other)) 1.0 else 0.0 } ) def <=(other: LazyRaster): LazyRaster = LazyRaster.DualCombine(List(self, other), - {(i1: Int, i2: Int) => if (LessOrEqual.compare(i1, i2)) 1 else 0 }, - {(d1: Double, d2: Double) => if (LessOrEqual.compare(d1, d2)) 1.0 else 0.0 } + { (i1: Int, i2: Int) => if (LessOrEqual.compare(i1, i2)) 1 else 0 }, + { (d1: Double, d2: Double) => if (LessOrEqual.compare(d1, d2)) 1.0 else 0.0 } ) def <=(other: Int): LazyRaster = LazyRaster.DualMap(List(self), - {(i: Int) => if (LessOrEqual.compare(i, other.toInt)) 1 else 0}, - {(d: Double) => if (LessOrEqual.compare(d, other)) 1.0 else 0.0 } + { (i: Int) => if (LessOrEqual.compare(i, other.toInt)) 1 else 0 }, + { (d: Double) => if (LessOrEqual.compare(d, other)) 1.0 else 0.0 } ) def <=(other: Double): LazyRaster = LazyRaster.DualMap(List(self), - {(i: Int) => if (LessOrEqual.compare(i, other.toInt)) 1 else 0}, - {(d: Double) => if (LessOrEqual.compare(d, other)) 1.0 else 0.0 } + { (i: Int) => if (LessOrEqual.compare(i, other.toInt)) 1 else 0 }, + { (d: Double) => if (LessOrEqual.compare(d, other)) 1.0 else 0.0 } ) def ===(other: LazyRaster): LazyRaster = LazyRaster.DualCombine(List(self, other), - {(i1: Int, i2: Int) => if (Equal.compare(i1, i2)) 1 else 0 }, - {(d1: Double, d2: Double) => if (Equal.compare(d1, d2)) 1.0 else 0.0 } + { (i1: Int, i2: Int) => if (Equal.compare(i1, i2)) 1 else 0 }, + { (d1: Double, d2: Double) => if (Equal.compare(d1, d2)) 1.0 else 0.0 } ) def ===(other: Int): LazyRaster = LazyRaster.DualMap(List(self), - {(i: Int) => if (Equal.compare(i, other.toInt)) 1 else 0}, - {(d: Double) => if (Equal.compare(d, other)) 1.0 else 0.0 } + { (i: Int) => if (Equal.compare(i, other.toInt)) 1 else 0 }, + { (d: Double) => if (Equal.compare(d, other)) 1.0 else 0.0 } ) def ===(other: Double): LazyRaster = LazyRaster.DualMap(List(self), - {(i: Int) => if (Equal.compare(i, other.toInt)) 1 else 0}, - {(d: Double) => if (Equal.compare(d, other)) 1.0 else 0.0 } + { (i: Int) => if (Equal.compare(i, other.toInt)) 1 else 0 }, + { (d: Double) => if (Equal.compare(d, other)) 1.0 else 0.0 } ) def !==(other: LazyRaster): LazyRaster = LazyRaster.DualCombine(List(self, other), - {(i1: Int, i2: Int) => if (Unequal.compare(i1, i2)) 1 else 0 }, - {(d1: Double, d2: Double) => if (Unequal.compare(d1, d2)) 1.0 else 0.0 } + { (i1: Int, i2: Int) => if (Unequal.compare(i1, i2)) 1 else 0 }, + { (d1: Double, d2: Double) => if (Unequal.compare(d1, d2)) 1.0 else 0.0 } ) def !==(other: Int): LazyRaster = LazyRaster.DualMap(List(self), - {(i: Int) => if (Unequal.compare(i, other.toInt)) 1 else 0}, - {(d: Double) => if (Unequal.compare(d, other)) 1.0 else 0.0 } + { (i: Int) => if (Unequal.compare(i, other.toInt)) 1 else 0 }, + { (d: Double) => if (Unequal.compare(d, other)) 1.0 else 0.0 } ) def !==(other: Double): LazyRaster = LazyRaster.DualMap(List(self), - {(i: Int) => if (Unequal.compare(i, other.toInt)) 1 else 0}, - {(d: Double) => if (Unequal.compare(d, other)) 1.0 else 0.0 } + { (i: Int) => if (Unequal.compare(i, other.toInt)) 1 else 0 }, + { (d: Double) => if (Unequal.compare(d, other)) 1.0 else 0.0 } ) def >=(other: LazyRaster): LazyRaster = LazyRaster.DualCombine(List(self, other), - {(i1: Int, i2: Int) => if (GreaterOrEqual.compare(i1, i2)) 1 else 0 }, - {(d1: Double, d2: Double) => if (GreaterOrEqual.compare(d1, d2)) 1.0 else 0.0 } + { (i1: Int, i2: Int) => if (GreaterOrEqual.compare(i1, i2)) 1 else 0 }, + { (d1: Double, d2: Double) => if (GreaterOrEqual.compare(d1, d2)) 1.0 else 0.0 } ) def >=(other: Int): LazyRaster = LazyRaster.DualMap(List(self), - {(i: Int) => if (GreaterOrEqual.compare(i, other.toInt)) 1 else 0}, - {(d: Double) => if (GreaterOrEqual.compare(d, other)) 1.0 else 0.0 } + { (i: Int) => if (GreaterOrEqual.compare(i, other.toInt)) 1 else 0 }, + { (d: Double) => if (GreaterOrEqual.compare(d, other)) 1.0 else 0.0 } ) def >=(other: Double): LazyRaster = LazyRaster.DualMap(List(self), - {(i: Int) => if (GreaterOrEqual.compare(i, other.toInt)) 1 else 0}, - {(d: Double) => if (GreaterOrEqual.compare(d, other)) 1.0 else 0.0 } + { (i: Int) => if (GreaterOrEqual.compare(i, other.toInt)) 1 else 0 }, + { (d: Double) => if (GreaterOrEqual.compare(d, other)) 1.0 else 0.0 } ) def >(other: LazyRaster): LazyRaster = LazyRaster.DualCombine(List(self, other), - {(i1: Int, i2: Int) => if (Greater.compare(i1, i2)) 1 else 0 }, - {(d1: Double, d2: Double) => if (Greater.compare(d1, d2)) 1.0 else 0.0 } + { (i1: Int, i2: Int) => if (Greater.compare(i1, i2)) 1 else 0 }, + { (d1: Double, d2: Double) => if (Greater.compare(d1, d2)) 1.0 else 0.0 } ) def >(other: Int): LazyRaster = LazyRaster.DualMap(List(self), - {(i: Int) => if (Greater.compare(i, other.toInt)) 1 else 0}, - {(d: Double) => if (Greater.compare(d, other)) 1.0 else 0.0 } + { (i: Int) => if (Greater.compare(i, other.toInt)) 1 else 0 }, + { (d: Double) => if (Greater.compare(d, other)) 1.0 else 0.0 } ) def >(other: Double): LazyRaster = LazyRaster.DualMap(List(self), - {(i: Int) => if (Greater.compare(i, other.toInt)) 1 else 0}, - {(d: Double) => if (Greater.compare(d, other)) 1.0 else 0.0 } - ) - - /** Trigonometric Operations */ - def sin: LazyRaster = LazyRaster.DualMap(List(self), - { z: Int => if(isNoData(z)) z else d2i(math.sin(z)) }, - { z => if(isNoData(z)) z else math.sin(z) } - ) - def cos: LazyRaster = LazyRaster.DualMap(List(self), - { z: Int => if(isNoData(z)) z else d2i(math.cos(z)) }, - { z => if(isNoData(z)) z else math.cos(z) } - ) - def tan: LazyRaster = LazyRaster.DualMap(List(self), - { z: Int => if(isNoData(z)) z else d2i(math.tan(z)) }, - { z => if(isNoData(z)) z else math.tan(z) } - ) - - def sinh: LazyRaster = LazyRaster.DualMap(List(self), - { z: Int => if(isNoData(z)) z else d2i(math.sinh(z)) }, - { z => if(isNoData(z)) z else math.sinh(z) } - ) - def cosh: LazyRaster = LazyRaster.DualMap(List(self), - { z: Int => if(isNoData(z)) z else d2i(math.cosh(z)) }, - { z => if(isNoData(z)) z else math.cosh(z) } - ) - def tanh: LazyRaster = LazyRaster.DualMap(List(self), - { z: Int => if(isNoData(z)) z else d2i(math.tanh(z)) }, - { z => if(isNoData(z)) z else math.tanh(z) } - ) - - def asin: LazyRaster = LazyRaster.DualMap(List(self), - { z: Int => if(isNoData(z)) z else d2i(math.asin(z)) }, - { z => if(isNoData(z)) z else math.asin(z) } - ) - def acos: LazyRaster = LazyRaster.DualMap(List(self), - { z: Int => if(isNoData(z)) z else d2i(math.acos(z)) }, - { z => if(isNoData(z)) z else math.acos(z) } - ) - def atan: LazyRaster = LazyRaster.DualMap(List(self), - { z: Int => if(isNoData(z)) z else d2i(math.atan(z)) }, - { z => if(isNoData(z)) z else math.atan(z) } - ) - - def atan2(other: LazyRaster) = LazyRaster.DualCombine(List(self, other), { (z1, z2) => d2i(math.atan2(i2d(z1), i2d(z2))) }, { (z1, z2) => math.atan2(z1, z2) }) + { (i: Int) => if (Greater.compare(i, other.toInt)) 1 else 0 }, + { (d: Double) => if (Greater.compare(d, other)) 1.0 else 0.0 } + ) + + /** + * Trigonometric Operations + */ + def sin: LazyRaster = + LazyRaster.DualMap(List(self), { z: Int => if (isNoData(z)) z else d2i(math.sin(z)) }, { z => if (isNoData(z)) z else math.sin(z) }) + def cos: LazyRaster = + LazyRaster.DualMap(List(self), { z: Int => if (isNoData(z)) z else d2i(math.cos(z)) }, { z => if (isNoData(z)) z else math.cos(z) }) + def tan: LazyRaster = + LazyRaster.DualMap(List(self), { z: Int => if (isNoData(z)) z else d2i(math.tan(z)) }, { z => if (isNoData(z)) z else math.tan(z) }) + + def sinh: LazyRaster = + LazyRaster.DualMap(List(self), { z: Int => if (isNoData(z)) z else d2i(math.sinh(z)) }, { z => if (isNoData(z)) z else math.sinh(z) }) + def cosh: LazyRaster = + LazyRaster.DualMap(List(self), { z: Int => if (isNoData(z)) z else d2i(math.cosh(z)) }, { z => if (isNoData(z)) z else math.cosh(z) }) + def tanh: LazyRaster = + LazyRaster.DualMap(List(self), { z: Int => if (isNoData(z)) z else d2i(math.tanh(z)) }, { z => if (isNoData(z)) z else math.tanh(z) }) + + def asin: LazyRaster = + LazyRaster.DualMap(List(self), { z: Int => if (isNoData(z)) z else d2i(math.asin(z)) }, { z => if (isNoData(z)) z else math.asin(z) }) + def acos: LazyRaster = + LazyRaster.DualMap(List(self), { z: Int => if (isNoData(z)) z else d2i(math.acos(z)) }, { z => if (isNoData(z)) z else math.acos(z) }) + def atan: LazyRaster = + LazyRaster.DualMap(List(self), { z: Int => if (isNoData(z)) z else d2i(math.atan(z)) }, { z => if (isNoData(z)) z else math.atan(z) }) + + def atan2(other: LazyRaster) = + LazyRaster.DualCombine(List(self, other), { (z1, z2) => d2i(math.atan2(i2d(z1), i2d(z2))) }, { (z1, z2) => math.atan2(z1, z2) }) def atan2(other: Int): LazyRaster = LazyRaster.DualMap(List(self), { i: Int => d2i(math.atan2(i, other)) }, { math.atan2(_, other) }) def atan2(other: Double): LazyRaster = LazyRaster.DualMap(List(self), { i: Int => d2i(math.atan2(i, other)) }, { math.atan2(_, other) }) - /** Rounding Operations */ - def round: LazyRaster = LazyRaster.DualMap(List(self), identity, { z => if(isNoData(z)) z else math.round(z) }) + /** + * Rounding Operations + */ + def round: LazyRaster = LazyRaster.DualMap(List(self), identity, { z => if (isNoData(z)) z else math.round(z) }) - def floor: LazyRaster = LazyRaster.DualMap(List(self), identity, { z => if(isNoData(z)) z else math.floor(z) }) + def floor: LazyRaster = LazyRaster.DualMap(List(self), identity, { z => if (isNoData(z)) z else math.floor(z) }) - def ceil: LazyRaster = LazyRaster.DualMap(List(self), identity, { z => if(isNoData(z)) z else math.ceil(z) }) + def ceil: LazyRaster = LazyRaster.DualMap(List(self), identity, { z => if (isNoData(z)) z else math.ceil(z) }) - /** Logical Operations */ + /** + * Logical Operations + */ // TODO: Look into GT implementations for logical operations... // The handling of nodata vs 0 vs false is not obvious def &&(other: LazyRaster): LazyRaster = LazyRaster.DualCombine(List(self, other), And.combine, And.combine) @@ -254,10 +222,12 @@ trait LazyRasterOperations { def xor(other: Int): LazyRaster = LazyRaster.DualMap(List(self), { Xor.combine(_, other) }, { Xor.combine(_, other) }) def xor(other: Double): LazyRaster = LazyRaster.DualMap(List(self), { Xor.combine(_, d2i(other)) }, { Xor.combine(_, other) }) - def not: LazyRaster = LazyRaster.DualMap(List(self), { z => if (isNoData(z)) z else if (z == 0) 1 else 0 }, { z => if (isNoData(z)) z else if (z == 0.0) 1.0 else 0.0 }) + def not: LazyRaster = + LazyRaster.DualMap(List(self), { z => if (isNoData(z)) z else if (z == 0) 1 else 0 }, { z => if (isNoData(z)) z else if (z == 0.0) 1.0 else 0.0 }) - /** Tile specific methods */ + /** + * Tile specific methods + */ def classify(breaks: BreakMap[Double, Int]) = LazyRaster.DualMap(List(self), { i => breaks(i2d(i)) }, { d => i2d(breaks(d)) }) } - diff --git a/jvm/src/main/scala/dsl/tile/package.scala b/jvm/src/main/scala/dsl/tile/package.scala index adbfaf7b..4c5f08da 100644 --- a/jvm/src/main/scala/dsl/tile/package.scala +++ b/jvm/src/main/scala/dsl/tile/package.scala @@ -2,7 +2,6 @@ package com.azavea.maml.dsl import com.azavea.maml.eval.tile._ - package object tile { implicit class LazyRasterExtensions(val self: LazyRaster) extends LazyRasterOperations implicit class LazyMultibandRasterExtensions(val self: LazyMultibandRaster) extends LazyMultibandRasterOperations diff --git a/jvm/src/main/scala/error/MamlException.scala b/jvm/src/main/scala/error/MamlException.scala index 3a677ab5..09d0dbe3 100644 --- a/jvm/src/main/scala/error/MamlException.scala +++ b/jvm/src/main/scala/error/MamlException.scala @@ -2,6 +2,4 @@ package com.azavea.maml.error import cats.data.NonEmptyList - case class MamlException(errors: NonEmptyList[MamlError]) extends Exception - diff --git a/jvm/src/main/scala/eval/BufferingInterpreter.scala b/jvm/src/main/scala/eval/BufferingInterpreter.scala index f5d57bef..53acd420 100644 --- a/jvm/src/main/scala/eval/BufferingInterpreter.scala +++ b/jvm/src/main/scala/eval/BufferingInterpreter.scala @@ -31,14 +31,12 @@ case class BufferingInterpreter( } } - val fallbackDirective: ScopedDirective[BufferingInterpreter.Scope] = - { case (exp, res, scope) => Invalid(NEL.of(UnhandledCase(exp, exp.kind))) } + val fallbackDirective: ScopedDirective[BufferingInterpreter.Scope] = { case (exp, res, scope) => Invalid(NEL.of(UnhandledCase(exp, exp.kind))) } def instructions(expression: Expression, children: Seq[Result], scope: BufferingInterpreter.Scope): Interpreted[Result] = directives.reduceLeft(_ orElse _).orElse(fallbackDirective)((expression, children, scope)) } - object BufferingInterpreter { case class Scope(buffer: Int, tileSize: Int) case class Options(tileSize: Int) @@ -50,13 +48,15 @@ object BufferingInterpreter { ScopedDirective.pure[DblLit](SourceDirectives.dblLiteral), ScopedDirective.pure[BoolLit](SourceDirectives.boolLiteral), ScopedDirective.pure[GeomLit](SourceDirectives.geoJson), - ScopedDirective.pure[Addition](OpDirectives.additionTile orElse OpDirectives.additionInt orElse OpDirectives.additionDouble), + ScopedDirective.pure[Addition](OpDirectives.additionTile.orElse(OpDirectives.additionInt).orElse(OpDirectives.additionDouble)), ScopedDirective.pure[Subtraction](OpDirectives.subtraction), - ScopedDirective.pure[Multiplication](OpDirectives.multiplicationTile orElse OpDirectives.multiplicationInt orElse OpDirectives.multiplicationDouble), + ScopedDirective.pure[Multiplication]( + OpDirectives.multiplicationTile.orElse(OpDirectives.multiplicationInt).orElse(OpDirectives.multiplicationDouble) + ), ScopedDirective.pure[Division](OpDirectives.division), ScopedDirective.pure[Pow](OpDirectives.pow), - ScopedDirective.pure[Max](OpDirectives.maxTile orElse OpDirectives.maxInt orElse OpDirectives.maxDouble), - ScopedDirective.pure[Min](OpDirectives.minTile orElse OpDirectives.minInt orElse OpDirectives.minDouble), + ScopedDirective.pure[Max](OpDirectives.maxTile.orElse(OpDirectives.maxInt).orElse(OpDirectives.maxDouble)), + ScopedDirective.pure[Min](OpDirectives.minTile.orElse(OpDirectives.minInt).orElse(OpDirectives.minDouble)), ScopedDirective.pure[Lesser](OpDirectives.lessThan), ScopedDirective.pure[LesserOrEqual](OpDirectives.lessThanOrEqualTo), ScopedDirective.pure[Equal](OpDirectives.equalTo), @@ -99,7 +99,8 @@ object BufferingInterpreter { focalStandardDeviation, ScopedDirective.pure[FocalSlope](FocalDirectives.slope), ScopedDirective.pure[FocalHillshade](FocalDirectives.hillshade) - ), Options(256) + ), + Options(256) ) val focalMax = ScopedDirective[Scope] { case (FocalMax(_, neighborhood, target), childResults, scope) => diff --git a/jvm/src/main/scala/eval/ConcurrentInterpreter.scala b/jvm/src/main/scala/eval/ConcurrentInterpreter.scala index e4538a1f..ac1fca9c 100644 --- a/jvm/src/main/scala/eval/ConcurrentInterpreter.scala +++ b/jvm/src/main/scala/eval/ConcurrentInterpreter.scala @@ -8,33 +8,30 @@ import cats._ import cats.implicits._ import cats.data.Validated._ import cats.data.{NonEmptyList => NEL, _} -import cats.effect.{Concurrent, Fiber} +import cats.effect.{Fiber, Spawn} import scala.reflect.ClassTag -class ConcurrentInterpreter[F[_]](directives: List[Directive])( - implicit Conc: Concurrent[F] +class ConcurrentInterpreter[F[_]](directives: List[Directive])(implicit + Conc: Spawn[F] ) extends Interpreter[F] { def apply(exp: Expression): F[Interpreted[Result]] = { val children = evalInF(exp) - val out = children map { - _.andThen({ childRes => + val out = children.map { + _.andThen { childRes => instructions(exp, childRes) - }) + } } out } def evalInF(expression: Expression): F[Interpreted[List[Result]]] = { - val fibsF: F[List[Fiber[F, Interpreted[Result]]]] = - expression.children traverse { expr => - Conc.start(apply(expr)) - } - fibsF flatMap { _.traverse { _.join } } map { _.sequence } + val fibsF: F[List[Fiber[F, Throwable, Interpreted[Result]]]] = expression.children.traverse { expr => Conc.start(apply(expr)) } + fibsF.flatMap { _.traverse { _.joinWithNever } }.map { _.sequence } } - val fallbackDirective: Directive = { - case (exp, res) => Invalid(NEL.of(UnhandledCase(exp, exp.kind))) + val fallbackDirective: Directive = { case (exp, res) => + Invalid(NEL.of(UnhandledCase(exp, exp.kind))) } def prependDirective(directive: Directive) = @@ -44,8 +41,8 @@ class ConcurrentInterpreter[F[_]](directives: List[Directive])( new ConcurrentInterpreter[F](directives :+ directive) def instructions( - expression: Expression, - children: List[Result] + expression: Expression, + children: List[Result] ): Interpreted[Result] = directives .reduceLeft(_ orElse _) @@ -53,6 +50,6 @@ class ConcurrentInterpreter[F[_]](directives: List[Directive])( } object ConcurrentInterpreter { - def DEFAULT[T[_]: Concurrent] = + def DEFAULT[T[_]: Spawn] = new ConcurrentInterpreter[T](NaiveInterpreter.DEFAULT.directives) } diff --git a/jvm/src/main/scala/eval/Interpreter.scala b/jvm/src/main/scala/eval/Interpreter.scala index 41db3f7b..e505b2df 100644 --- a/jvm/src/main/scala/eval/Interpreter.scala +++ b/jvm/src/main/scala/eval/Interpreter.scala @@ -10,7 +10,6 @@ import cats.data.{NonEmptyList => NEL, _} import scala.reflect.ClassTag - trait Interpreter[F[_]] { def apply(exp: Expression): F[Interpreted[Result]] } @@ -18,4 +17,3 @@ trait Interpreter[F[_]] { object Interpreter { val DEFAULT = NaiveInterpreter.DEFAULT } - diff --git a/jvm/src/main/scala/eval/NaiveInterpreter.scala b/jvm/src/main/scala/eval/NaiveInterpreter.scala index c31f8fc4..33ad7afb 100644 --- a/jvm/src/main/scala/eval/NaiveInterpreter.scala +++ b/jvm/src/main/scala/eval/NaiveInterpreter.scala @@ -11,12 +11,11 @@ import cats.data.{NonEmptyList => NEL, _} import scala.reflect.ClassTag - case class NaiveInterpreter(directives: List[Directive]) extends Interpreter[Id] { def apply(exp: Expression): Interpreted[Result] = { val children: Interpreted[List[Result]] = exp.children.traverse(apply) - children.andThen({ childRes => instructions(exp, childRes) }) + children.andThen { childRes => instructions(exp, childRes) } } def prependDirective(directive: Directive): Interpreter[Id] = @@ -25,8 +24,7 @@ case class NaiveInterpreter(directives: List[Directive]) extends Interpreter[Id] def appendDirective(directive: Directive): Interpreter[Id] = NaiveInterpreter(directives :+ directive) - val fallbackDirective: Directive = - { case (exp, res) => Invalid(NEL.of(UnhandledCase(exp, exp.kind))) } + val fallbackDirective: Directive = { case (exp, res) => Invalid(NEL.of(UnhandledCase(exp, exp.kind))) } def instructions(expression: Expression, children: List[Result]): Interpreted[Result] = directives.reduceLeft(_ orElse _).orElse(fallbackDirective)((expression, children)) @@ -41,13 +39,13 @@ object NaiveInterpreter { SourceDirectives.dblLiteral, SourceDirectives.boolLiteral, SourceDirectives.geoJson, - OpDirectives.additionTile orElse OpDirectives.additionInt orElse OpDirectives.additionDouble, + OpDirectives.additionTile.orElse(OpDirectives.additionInt).orElse(OpDirectives.additionDouble), OpDirectives.subtraction, - OpDirectives.multiplicationTile orElse OpDirectives.multiplicationInt orElse OpDirectives.multiplicationDouble, + OpDirectives.multiplicationTile.orElse(OpDirectives.multiplicationInt).orElse(OpDirectives.multiplicationDouble), OpDirectives.division, OpDirectives.pow, - OpDirectives.maxTile orElse OpDirectives.maxInt orElse OpDirectives.maxDouble, - OpDirectives.minTile orElse OpDirectives.minInt orElse OpDirectives.minDouble, + OpDirectives.maxTile.orElse(OpDirectives.maxInt).orElse(OpDirectives.maxDouble), + OpDirectives.minTile.orElse(OpDirectives.minInt).orElse(OpDirectives.minDouble), OpDirectives.lessThan, OpDirectives.lessThanOrEqualTo, OpDirectives.equalTo, diff --git a/jvm/src/main/scala/eval/ParallelInterpreter.scala b/jvm/src/main/scala/eval/ParallelInterpreter.scala index e27c4eba..7ac25d67 100644 --- a/jvm/src/main/scala/eval/ParallelInterpreter.scala +++ b/jvm/src/main/scala/eval/ParallelInterpreter.scala @@ -8,34 +8,24 @@ import cats._ import cats.implicits._ import cats.data.Validated._ import cats.data.{NonEmptyList => NEL, _} -import cats.effect.ContextShift +import cats.effect.kernel.Async -class ParallelInterpreter[F[_]: Monad, G[_]](directives: List[Directive])( - implicit Par: ParallelCompact[F, G], - contextShift: ContextShift[F] -) extends Interpreter[F] { +class ParallelInterpreter[F[_]: Monad: Parallel, G[_]](directives: List[Directive]) extends Interpreter[F] { def apply(exp: Expression): F[Interpreted[Result]] = { val children = evalInF(exp) - val out = children map { - _.andThen({ childRes => - instructions(exp, childRes) - }) - } + val out = children.map(_.andThen(instructions(exp, _))) out } def evalInF( - expression: Expression - )(implicit contextShift: ContextShift[F]): F[Interpreted[List[Result]]] = { - val resultsF: F[List[Interpreted[Result]]] = - expression.children parTraverse { expr => - apply(expr) - } - resultsF map { _.sequence } + expression: Expression + ): F[Interpreted[List[Result]]] = { + val resultsF: F[List[Interpreted[Result]]] = expression.children.parTraverse { expr => apply(expr) } + resultsF.map { _.sequence } } - val fallbackDirective: Directive = { - case (exp, res) => Invalid(NEL.of(UnhandledCase(exp, exp.kind))) + val fallbackDirective: Directive = { case (exp, res) => + Invalid(NEL.of(UnhandledCase(exp, exp.kind))) } def prependDirective(directive: Directive) = @@ -45,8 +35,8 @@ class ParallelInterpreter[F[_]: Monad, G[_]](directives: List[Directive])( new ParallelInterpreter[F, G](directives :+ directive) def instructions( - expression: Expression, - children: List[Result] + expression: Expression, + children: List[Result] ): Interpreted[Result] = directives .reduceLeft(_ orElse _) @@ -54,9 +44,5 @@ class ParallelInterpreter[F[_]: Monad, G[_]](directives: List[Directive])( } object ParallelInterpreter { - def DEFAULT[T[_], U[_]]( - implicit P: ParallelCompact[T, U], - M: Monad[T], - contextShift: ContextShift[T] - ) = new ParallelInterpreter[T, U](NaiveInterpreter.DEFAULT.directives) + def DEFAULT[T[_]: Parallel: Monad, U[_]] = new ParallelInterpreter[T, U](NaiveInterpreter.DEFAULT.directives) } diff --git a/jvm/src/main/scala/eval/Result.scala b/jvm/src/main/scala/eval/Result.scala index 24bf66f4..4f822729 100644 --- a/jvm/src/main/scala/eval/Result.scala +++ b/jvm/src/main/scala/eval/Result.scala @@ -13,7 +13,6 @@ import Validated._ import scala.reflect.ClassTag - trait Result { def as[T](implicit ct: ClassTag[T]): Interpreted[T] def kind: MamlKind @@ -22,9 +21,9 @@ trait Result { case class DoubleResult(res: Double) extends Result { def as[T](implicit ct: ClassTag[T]): Interpreted[T] = { val cls = ct.runtimeClass - if (classOf[Int] isAssignableFrom cls) + if (classOf[Int].isAssignableFrom(cls)) Valid(res.toInt.asInstanceOf[T]) - else if (classOf[Double] isAssignableFrom cls) + else if (classOf[Double].isAssignableFrom(cls)) Valid(res.asInstanceOf[T]) else Invalid(NEL.of(DivergingTypes(cls.getName, List("int", "double")))) @@ -35,9 +34,9 @@ case class DoubleResult(res: Double) extends Result { case class IntResult(res: Int) extends Result { def as[T](implicit ct: ClassTag[T]): Interpreted[T] = { val cls = ct.runtimeClass - if (classOf[Int] isAssignableFrom cls) + if (classOf[Int].isAssignableFrom(cls)) Valid(res.toInt.asInstanceOf[T]) - else if (classOf[Double] isAssignableFrom cls) + else if (classOf[Double].isAssignableFrom(cls)) Valid(res.toDouble.asInstanceOf[T]) else Invalid(NEL.of(DivergingTypes(cls.getName, List("int", "double")))) @@ -48,7 +47,7 @@ case class IntResult(res: Int) extends Result { case class GeomResult(res: Geometry) extends Result { def as[T](implicit ct: ClassTag[T]): Interpreted[T] = { val cls = ct.runtimeClass - if (classOf[Geometry] isAssignableFrom cls) + if (classOf[Geometry].isAssignableFrom(cls)) Valid(res.asInstanceOf[T]) else Invalid(NEL.of(DivergingTypes(cls.getName, List("geom")))) @@ -59,11 +58,11 @@ case class GeomResult(res: Geometry) extends Result { case class ImageResult(res: LazyMultibandRaster) extends Result { def as[T](implicit ct: ClassTag[T]): Interpreted[T] = { val cls = ct.runtimeClass - if (classOf[LazyMultibandRaster] isAssignableFrom cls) + if (classOf[LazyMultibandRaster].isAssignableFrom(cls)) Valid(res.asInstanceOf[T]) - else if (classOf[ProjectedRaster[MultibandTile]] isAssignableFrom cls) + else if (classOf[ProjectedRaster[MultibandTile]].isAssignableFrom(cls)) Valid(res.evaluateDouble.asInstanceOf[T]) - else if (classOf[MultibandTile] isAssignableFrom cls) + else if (classOf[MultibandTile].isAssignableFrom(cls)) Valid(res.evaluateDouble.raster.tile.asInstanceOf[T]) else Invalid(NEL.of(DivergingTypes(cls.getName, List("img")))) @@ -74,11 +73,10 @@ case class ImageResult(res: LazyMultibandRaster) extends Result { case class BoolResult(res: Boolean) extends Result { def as[T](implicit ct: ClassTag[T]): Interpreted[T] = { val cls = ct.runtimeClass - if (classOf[Boolean] isAssignableFrom cls) + if (classOf[Boolean].isAssignableFrom(cls)) Valid(res.asInstanceOf[T]) else Invalid(NEL.of(DivergingTypes(cls.getName, List("bool")))) } def kind: MamlKind = MamlKind.Bool } - diff --git a/jvm/src/main/scala/eval/ScopedInterpreter.scala b/jvm/src/main/scala/eval/ScopedInterpreter.scala index 3a86ab70..c29ad947 100644 --- a/jvm/src/main/scala/eval/ScopedInterpreter.scala +++ b/jvm/src/main/scala/eval/ScopedInterpreter.scala @@ -9,7 +9,6 @@ import cats.data.Validated._ import cats.data.{NonEmptyList => NEL, _} import geotrellis.raster.GridBounds - trait ScopedInterpreter[Scope] extends Interpreter[Id] { def scopeFor(exp: Expression, previous: Option[Scope]): Scope def appendDirective(directive: ScopedDirective[Scope]): ScopedInterpreter[Scope] @@ -20,13 +19,12 @@ trait ScopedInterpreter[Scope] extends Interpreter[Id] { def apply(exp: Expression): Interpreted[Result] = { def eval(exp: Expression, maybeScope: Option[Scope] = None): Interpreted[Result] = { val currentScope = scopeFor(exp, maybeScope) - val children: Interpreted[List[Result]] = exp.children.traverse({ childTree => + val children: Interpreted[List[Result]] = exp.children.traverse { childTree => val childScope = scopeFor(childTree, Some(currentScope)) eval(childTree, Some(childScope)) - }) - children.andThen({ childResult => instructions(exp, childResult, currentScope) }) + } + children.andThen { childResult => instructions(exp, childResult, currentScope) } } eval(exp) } } - diff --git a/jvm/src/main/scala/eval/directive/Directive.scala b/jvm/src/main/scala/eval/directive/Directive.scala index b3d24558..143b400f 100644 --- a/jvm/src/main/scala/eval/directive/Directive.scala +++ b/jvm/src/main/scala/eval/directive/Directive.scala @@ -11,8 +11,6 @@ import cats.implicits._ import scala.reflect.ClassTag - object Directive { def apply(ruleFn: PartialFunction[(Expression, Seq[Result]), Interpreted[Result]]): Directive = ruleFn } - diff --git a/jvm/src/main/scala/eval/directive/FocalDirectives.scala b/jvm/src/main/scala/eval/directive/FocalDirectives.scala index 73d97f7e..79cce6be 100644 --- a/jvm/src/main/scala/eval/directive/FocalDirectives.scala +++ b/jvm/src/main/scala/eval/directive/FocalDirectives.scala @@ -15,76 +15,81 @@ import cats.implicits._ import cats.data._ import Validated._ - object FocalDirectives { val max = Directive { case (FocalMax(_, neighborhood, target), childResults) => - childResults - .toList + childResults.toList .traverse { _.as[LazyMultibandRaster] } - .map({ lr => + .map { lr => ImageResult(lr.head.focal(NeighborhoodConversion(neighborhood), None, target, focal.Max.apply)) - }) + } } val min = Directive { case (FocalMin(_, neighborhood, target), childResults) => childResults - .map({ _.as[LazyMultibandRaster] }) - .toList.sequence - .map({ lr => + .map { _.as[LazyMultibandRaster] } + .toList + .sequence + .map { lr => ImageResult(lr.head.focal(NeighborhoodConversion(neighborhood), None, target, focal.Min.apply _)) - }) + } } val mean = Directive { case (FocalMean(_, neighborhood, target), childResults) => childResults - .map({ _.as[LazyMultibandRaster] }) - .toList.sequence - .map({ lr => + .map { _.as[LazyMultibandRaster] } + .toList + .sequence + .map { lr => ImageResult(lr.head.focal(NeighborhoodConversion(neighborhood), None, target, focal.Mean.apply _)) - }) + } } val median = Directive { case (FocalMedian(_, neighborhood, target), childResults) => childResults - .map({ _.as[LazyMultibandRaster] }) - .toList.sequence - .map({ lr => + .map { _.as[LazyMultibandRaster] } + .toList + .sequence + .map { lr => ImageResult(lr.head.focal(NeighborhoodConversion(neighborhood), None, target, focal.Median.apply _)) - }) + } } val mode = Directive { case (FocalMode(_, neighborhood, target), childResults) => childResults - .map({ _.as[LazyMultibandRaster] }) - .toList.sequence - .map({ lr => + .map { _.as[LazyMultibandRaster] } + .toList + .sequence + .map { lr => ImageResult(lr.head.focal(NeighborhoodConversion(neighborhood), None, target, focal.Mode.apply _)) - }) + } } val sum = Directive { case (FocalSum(_, neighborhood, target), childResults) => childResults - .map({ _.as[LazyMultibandRaster] }) - .toList.sequence - .map({ lr => + .map { _.as[LazyMultibandRaster] } + .toList + .sequence + .map { lr => ImageResult(lr.head.focal(NeighborhoodConversion(neighborhood), None, target, focal.Sum.apply _)) - }) + } } val standardDeviation = Directive { case (FocalStdDev(_, neighborhood, target), childResults) => childResults - .map({ _.as[LazyMultibandRaster] }) - .toList.sequence - .map({ lr => + .map { _.as[LazyMultibandRaster] } + .toList + .sequence + .map { lr => ImageResult(lr.head.focal(NeighborhoodConversion(neighborhood), None, target, focal.StandardDeviation.apply _)) - }) + } } val slope = Directive { case (FocalSlope(_, zf, target), childResults) => childResults - .map({ _.as[LazyMultibandRaster] }) - .toList.sequence - .map({ lr => + .map { _.as[LazyMultibandRaster] } + .toList + .sequence + .map { lr => val image = lr.head val re = image.rasterExtent val zfactor = zf.getOrElse { @@ -94,14 +99,15 @@ object FocalDirectives { 1 / (EQUATOR_METERS * math.cos(math.toRadians(middleY))) } ImageResult(image.slope(None, zfactor, re.cellSize, target)) - }) + } } val hillshade = Directive { case (FocalHillshade(_, azimuth, altitude, zf, target), childResults) => childResults - .map({ _.as[LazyMultibandRaster] }) - .toList.sequence - .map({ lr => + .map { _.as[LazyMultibandRaster] } + .toList + .sequence + .map { lr => val image = lr.head val re = image.rasterExtent val zfactor = zf.getOrElse { @@ -111,17 +117,18 @@ object FocalDirectives { 1 / (EQUATOR_METERS * math.cos(math.toRadians(middleY))) } ImageResult(image.hillshade(None, zfactor, re.cellSize, azimuth, altitude, target)) - }) + } } val aspect = Directive { case (FocalAspect(_, target), childResults) => childResults - .map({ _.as[LazyMultibandRaster] }) - .toList.sequence - .map({ lr => + .map { _.as[LazyMultibandRaster] } + .toList + .sequence + .map { lr => val image = lr.head val re = image.rasterExtent ImageResult(image.aspect(None, re.cellSize, target)) - }) + } } } diff --git a/jvm/src/main/scala/eval/directive/OpDirectives.scala b/jvm/src/main/scala/eval/directive/OpDirectives.scala index a047cf20..34a301fd 100644 --- a/jvm/src/main/scala/eval/directive/OpDirectives.scala +++ b/jvm/src/main/scala/eval/directive/OpDirectives.scala @@ -12,7 +12,7 @@ import cats.implicits._ import cats.data.{NonEmptyList => NEL, _} import Validated._ import geotrellis.vector._ -import geotrellis.raster.{Tile, isData} +import geotrellis.raster.{isData, Tile} import scala.reflect.ClassTag import scala.concurrent.duration._ @@ -21,7 +21,7 @@ import scala.util.Try object OpDirectives { private def asInstanceOfOption[T: ClassTag](o: Any): Option[T] = - Some(o) collect { case m: T => m} + Some(o).collect { case m: T => m } private def doubleResults(grouped: Map[MamlKind, Seq[Result]]): Interpreted[List[Double]] = grouped.getOrElse(MamlKind.Double, List.empty).map(_.as[Double]).toList.sequence @@ -35,358 +35,465 @@ object OpDirectives { private def not[A, B](f: (A, B) => Boolean): (A, B) => Boolean = !f(_, _) private def tileOrBoolReduction( - ti: (LazyMultibandRaster, Int) => LazyMultibandRaster, - it: (Int, LazyMultibandRaster) => LazyMultibandRaster, - td: (LazyMultibandRaster, Double) => LazyMultibandRaster, - dt: (Double, LazyMultibandRaster) => LazyMultibandRaster, - tt: (LazyMultibandRaster, LazyMultibandRaster) => LazyMultibandRaster, - ii: (Int, Int) => Boolean, - di: (Double, Int) => Boolean, - dd: (Double, Double) => Boolean, - id: (Int, Double) => Boolean, - res1: Result, - res2: Result + ti: (LazyMultibandRaster, Int) => LazyMultibandRaster, + it: (Int, LazyMultibandRaster) => LazyMultibandRaster, + td: (LazyMultibandRaster, Double) => LazyMultibandRaster, + dt: (Double, LazyMultibandRaster) => LazyMultibandRaster, + tt: (LazyMultibandRaster, LazyMultibandRaster) => LazyMultibandRaster, + ii: (Int, Int) => Boolean, + di: (Double, Int) => Boolean, + dd: (Double, Double) => Boolean, + id: (Int, Double) => Boolean, + res1: Result, + res2: Result ): Result = (res1, res2) match { - case (ImageResult(lt1), ImageResult(lt2)) => ImageResult(tt(lt1, lt2)) - case (ImageResult(lt), IntResult(int)) => ImageResult((ti(lt, int))) - case (IntResult(int), ImageResult(lt)) => ImageResult((it(int, lt))) - case (ImageResult(lt), DoubleResult(double)) => ImageResult(td(lt, double)) - case (DoubleResult(double), ImageResult(lt)) => ImageResult(dt(double, lt)) - case (IntResult(int1), IntResult(int2)) => BoolResult(ii(int1, int2)) - case (DoubleResult(dbl), IntResult(int)) => BoolResult(di(dbl, int)) - case (DoubleResult(dbl1), DoubleResult(dbl2)) => BoolResult(dd(dbl1, dbl2)) - case (IntResult(int), DoubleResult(dbl)) => BoolResult(id(int, dbl)) + case (ImageResult(lt1), ImageResult(lt2)) => ImageResult(tt(lt1, lt2)) + case (ImageResult(lt), IntResult(int)) => ImageResult(ti(lt, int)) + case (IntResult(int), ImageResult(lt)) => ImageResult(it(int, lt)) + case (ImageResult(lt), DoubleResult(double)) => ImageResult(td(lt, double)) + case (DoubleResult(double), ImageResult(lt)) => ImageResult(dt(double, lt)) + case (IntResult(int1), IntResult(int2)) => BoolResult(ii(int1, int2)) + case (DoubleResult(dbl), IntResult(int)) => BoolResult(di(dbl, int)) + case (DoubleResult(dbl1), DoubleResult(dbl2)) => BoolResult(dd(dbl1, dbl2)) + case (IntResult(int), DoubleResult(dbl)) => BoolResult(id(int, dbl)) } private def tileOrScalarReduction( - ti: (LazyMultibandRaster, Int) => LazyMultibandRaster, - it: (Int, LazyMultibandRaster) => LazyMultibandRaster, - td: (LazyMultibandRaster, Double) => LazyMultibandRaster, - dt: (Double, LazyMultibandRaster) => LazyMultibandRaster, - tt: (LazyMultibandRaster, LazyMultibandRaster) => LazyMultibandRaster, - ii: (Int, Int) => Int, - di: (Double, Int) => Double, - dd: (Double, Double) => Double, - id: (Int, Double) => Double, - res1: Result, - res2: Result + ti: (LazyMultibandRaster, Int) => LazyMultibandRaster, + it: (Int, LazyMultibandRaster) => LazyMultibandRaster, + td: (LazyMultibandRaster, Double) => LazyMultibandRaster, + dt: (Double, LazyMultibandRaster) => LazyMultibandRaster, + tt: (LazyMultibandRaster, LazyMultibandRaster) => LazyMultibandRaster, + ii: (Int, Int) => Int, + di: (Double, Int) => Double, + dd: (Double, Double) => Double, + id: (Int, Double) => Double, + res1: Result, + res2: Result ): Result = (res1, res2) match { - case (ImageResult(lt1), ImageResult(lt2)) => ImageResult(tt(lt1, lt2)) - case (ImageResult(lt), IntResult(int)) => ImageResult((ti(lt, int))) - case (IntResult(int), ImageResult(lt)) => ImageResult((it(int, lt))) - case (ImageResult(lt), DoubleResult(double)) => ImageResult(td(lt, double)) - case (DoubleResult(double), ImageResult(lt)) => ImageResult(dt(double, lt)) - case (IntResult(int1), IntResult(int2)) => IntResult(ii(int1, int2)) - case (DoubleResult(dbl), IntResult(int)) => DoubleResult(di(dbl, int)) - case (DoubleResult(dbl1), DoubleResult(dbl2)) => DoubleResult(dd(dbl1, dbl2)) - case (IntResult(int), DoubleResult(dbl)) => DoubleResult(id(int, dbl)) + case (ImageResult(lt1), ImageResult(lt2)) => ImageResult(tt(lt1, lt2)) + case (ImageResult(lt), IntResult(int)) => ImageResult(ti(lt, int)) + case (IntResult(int), ImageResult(lt)) => ImageResult(it(int, lt)) + case (ImageResult(lt), DoubleResult(double)) => ImageResult(td(lt, double)) + case (DoubleResult(double), ImageResult(lt)) => ImageResult(dt(double, lt)) + case (IntResult(int1), IntResult(int2)) => IntResult(ii(int1, int2)) + case (DoubleResult(dbl), IntResult(int)) => DoubleResult(di(dbl, int)) + case (DoubleResult(dbl1), DoubleResult(dbl2)) => DoubleResult(dd(dbl1, dbl2)) + case (IntResult(int), DoubleResult(dbl)) => DoubleResult(id(int, dbl)) } - /** Arithmetic Operations */ - val additionDouble = Directive { case (a@Addition(_), childResults) if (a.kind == MamlKind.Double) => - childResults - .map({ _.as[Double] }) - .toList.sequence - .andThen({ results => Valid(DoubleResult(results.reduce(_ + _))) }) + /** + * Arithmetic Operations + */ + val additionDouble = Directive { + case (a @ Addition(_), childResults) if a.kind == MamlKind.Double => + childResults + .map { _.as[Double] } + .toList + .sequence + .andThen { results => Valid(DoubleResult(results.reduce(_ + _))) } } - val additionInt = Directive { case (a@Addition(_), childResults) if (a.kind == MamlKind.Int) => - childResults - .map({ _.as[Int] }) - .toList.sequence - .andThen({ results => Valid(IntResult(results.reduce(_ + _))) }) + val additionInt = Directive { + case (a @ Addition(_), childResults) if a.kind == MamlKind.Int => + childResults + .map { _.as[Int] } + .toList + .sequence + .andThen { results => Valid(IntResult(results.reduce(_ + _))) } } - val additionTile = Directive { case (a@Addition(_), childResults) if (a.kind == MamlKind.Image) => - val grouped = childResults.groupBy(_.kind) + val additionTile = Directive { + case (a @ Addition(_), childResults) if a.kind == MamlKind.Image => + val grouped = childResults.groupBy(_.kind) - val scalarSums = - (doubleResults(grouped), intResults(grouped)).mapN { case (dbls, ints) => dbls.sum + ints.sum } + val scalarSums = + (doubleResults(grouped), intResults(grouped)).mapN { case (dbls, ints) => dbls.sum + ints.sum } - (imageResults(grouped), scalarSums).mapN { case (tiles, sums) => - val tileSum = tiles.reduce({ (lt1: LazyMultibandRaster, lt2: LazyMultibandRaster) => lt1.dualCombine(lt2, {_ + _}, {_ + _}) }) - ImageResult(tileSum.dualMap({ i: Int => i + sums.toInt }, { i: Double => i + sums })) - } + (imageResults(grouped), scalarSums).mapN { case (tiles, sums) => + val tileSum = tiles.reduce { (lt1: LazyMultibandRaster, lt2: LazyMultibandRaster) => lt1.dualCombine(lt2, { _ + _ }, { _ + _ }) } + ImageResult(tileSum.dualMap({ i: Int => i + sums.toInt }, { i: Double => i + sums })) + } } - val subtraction = Directive { case (a@Subtraction(_), childResults) => - val results = childResults.reduce({ (res1, res2) => - tileOrScalarReduction( - {_ - _}, {_ -: _}, {_ - _}, {_ -: _}, {_ - _}, - {_ - _}, {_ - _}, {_ - _}, {_ - _}, - res1, res2) - }) + val subtraction = Directive { case (a @ Subtraction(_), childResults) => + val results = childResults.reduce { (res1, res2) => + tileOrScalarReduction({ _ - _ }, { _ -: _ }, { _ - _ }, { _ -: _ }, { _ - _ }, { _ - _ }, { _ - _ }, { _ - _ }, { _ - _ }, res1, res2) + } Valid(results) } - val division = Directive { case (a@Division(_), childResults) => - val results = childResults.reduce({ (res1: Result, res2: Result) => - tileOrScalarReduction( - {_ / _}, {_ /: _}, {_ / _}, {_ /: _}, {_ / _}, - {_ / _}, {_ / _}, {_ / _}, {_ / _}, - res1, res2) - }) + val division = Directive { case (a @ Division(_), childResults) => + val results = childResults.reduce { (res1: Result, res2: Result) => + tileOrScalarReduction({ _ / _ }, { _ /: _ }, { _ / _ }, { _ /: _ }, { _ / _ }, { _ / _ }, { _ / _ }, { _ / _ }, { _ / _ }, res1, res2) + } Valid(results) } - val multiplicationDouble = Directive { case (a@Multiplication(_), childResults) if (a.kind == MamlKind.Double) => - childResults - .map({ _.as[Double] }) - .toList.sequence - .andThen({ results => Valid(DoubleResult(results.reduce(_ * _))) }) + val multiplicationDouble = Directive { + case (a @ Multiplication(_), childResults) if a.kind == MamlKind.Double => + childResults + .map { _.as[Double] } + .toList + .sequence + .andThen { results => Valid(DoubleResult(results.reduce(_ * _))) } } - val multiplicationInt = Directive { case (a@Multiplication(_), childResults) if (a.kind == MamlKind.Double) => - childResults - .map({ _.as[Double] }) - .toList.sequence - .andThen({ results => Valid(DoubleResult(results.reduce(_ * _))) }) + val multiplicationInt = Directive { + case (a @ Multiplication(_), childResults) if a.kind == MamlKind.Double => + childResults + .map { _.as[Double] } + .toList + .sequence + .andThen { results => Valid(DoubleResult(results.reduce(_ * _))) } } - val multiplicationTile = Directive { case (a@Multiplication(_), childResults) if (a.kind == MamlKind.Image) => - val grouped = childResults.groupBy(_.kind) + val multiplicationTile = Directive { + case (a @ Multiplication(_), childResults) if a.kind == MamlKind.Image => + val grouped = childResults.groupBy(_.kind) - val scalarProduct = - (doubleResults(grouped), intResults(grouped)).mapN { case (dbls, ints) => dbls.product * ints.product } + val scalarProduct = + (doubleResults(grouped), intResults(grouped)).mapN { case (dbls, ints) => dbls.product * ints.product } - (imageResults(grouped), scalarProduct).mapN { case (tiles, product) => - val tileProduct = tiles.reduce({ (lt1: LazyMultibandRaster, lt2: LazyMultibandRaster) => lt1.dualCombine(lt2, {_ * _}, {_ * _}) }) - ImageResult(tileProduct.dualMap({ i: Int => i * product.toInt }, { i: Double => i * product })) - } + (imageResults(grouped), scalarProduct).mapN { case (tiles, product) => + val tileProduct = tiles.reduce { (lt1: LazyMultibandRaster, lt2: LazyMultibandRaster) => lt1.dualCombine(lt2, { _ * _ }, { _ * _ }) } + ImageResult(tileProduct.dualMap({ i: Int => i * product.toInt }, { i: Double => i * product })) + } } - val pow = Directive { case (p@Pow(_), childResults) => - val results = childResults.reduce({ (res1: Result, res2: Result) => + val pow = Directive { case (p @ Pow(_), childResults) => + val results = childResults.reduce { (res1: Result, res2: Result) => tileOrScalarReduction( - {_ ** _}, {_ **: _}, {_ ** _}, {_ **: _}, {_ ** _}, - { math.pow(_, _).toInt }, { math.pow(_, _) }, { math.pow(_, _) }, { math.pow(_, _) }, - res1, res2 + { _ ** _ }, + { _ **: _ }, + { _ ** _ }, + { _ **: _ }, + { _ ** _ }, + { math.pow(_, _).toInt }, + { math.pow(_, _) }, + { math.pow(_, _) }, + { math.pow(_, _) }, + res1, + res2 ) - }) + } Valid(results) } - /** Numeric Comparison Operations */ - val maxDouble = Directive { case (a@Max(_), childResults) if (a.kind == MamlKind.Double) => - childResults - .map({ _.as[Double] }) - .toList.sequence - .andThen({ results => Valid(DoubleResult(results.reduce(_ max _))) }) + /** + * Numeric Comparison Operations + */ + val maxDouble = Directive { + case (a @ Max(_), childResults) if a.kind == MamlKind.Double => + childResults + .map { _.as[Double] } + .toList + .sequence + .andThen { results => Valid(DoubleResult(results.reduce(_ max _))) } } - val maxInt = Directive { case (a@Max(_), childResults) if (a.kind == MamlKind.Double) => - childResults - .map({ _.as[Double] }) - .toList.sequence - .andThen({ results => Valid(DoubleResult(results.reduce(_ max _))) }) + val maxInt = Directive { + case (a @ Max(_), childResults) if a.kind == MamlKind.Double => + childResults + .map { _.as[Double] } + .toList + .sequence + .andThen { results => Valid(DoubleResult(results.reduce(_ max _))) } } - val maxTile = Directive { case (a@Max(_), childResults) if a.kind == MamlKind.Image => - val grouped = childResults.groupBy(_.kind) - - val scalarMax: Interpreted[Option[Double]] = - (doubleResults(grouped), intResults(grouped)).mapN { case (dbls, ints) => - (Try(dbls.max).toOption, Try(ints.max).toOption) match { - case (Some(dbl), Some(int)) => Some(dbl max int) - case (None, Some(int)) => Some(int) - case (Some(dbl), None) => Some(dbl) - case _ => None + val maxTile = Directive { + case (a @ Max(_), childResults) if a.kind == MamlKind.Image => + val grouped = childResults.groupBy(_.kind) + + val scalarMax: Interpreted[Option[Double]] = + (doubleResults(grouped), intResults(grouped)).mapN { case (dbls, ints) => + (Try(dbls.max).toOption, Try(ints.max).toOption) match { + case (Some(dbl), Some(int)) => Some(dbl.max(int)) + case (None, Some(int)) => Some(int) + case (Some(dbl), None) => Some(dbl) + case _ => None + } } - } - (imageResults(grouped), scalarMax).mapN({ case (tiles, maximum) => - val tileMax = tiles.reduce({ (lt1: LazyMultibandRaster, lt2: LazyMultibandRaster) => - lt1.dualCombine(lt2, {_ max _}, {_ max _}) - }) - maximum match { - case Some(scalarMax) => - ImageResult(tileMax.dualMap({ i: Int => i max scalarMax.toInt }, { i: Double => i max scalarMax })) - case None => - ImageResult(tileMax) + (imageResults(grouped), scalarMax).mapN { case (tiles, maximum) => + val tileMax = tiles.reduce { (lt1: LazyMultibandRaster, lt2: LazyMultibandRaster) => + lt1.dualCombine(lt2, { _ max _ }, { _ max _ }) + } + maximum match { + case Some(scalarMax) => + ImageResult(tileMax.dualMap({ i: Int => i.max(scalarMax.toInt) }, { i: Double => i.max(scalarMax) })) + case None => + ImageResult(tileMax) + } } - }) } - val rgbTile = Directive { case (a @ RGB(_, rb, gb, bb), childResults) if a.kind == MamlKind.Image => - val grouped = childResults.groupBy(_.kind) - - imageResults(grouped).map { tiles => tiles.take(3) match { - case r :: g :: b :: Nil => - ImageResult(LazyMultibandRaster(Map("0" -> r.bands(rb), "1" -> g.bands(gb), "2" -> b.bands(bb)))) - case list => ImageResult(list.reduce { (lt1: LazyMultibandRaster, lt2: LazyMultibandRaster) => - LazyMultibandRaster(lt1.bands ++ lt2.bands) - }) + val rgbTile = Directive { + case (a @ RGB(_, rb, gb, bb), childResults) if a.kind == MamlKind.Image => + val grouped = childResults.groupBy(_.kind) + + imageResults(grouped).map { tiles => + tiles.take(3) match { + case r :: g :: b :: Nil => + ImageResult(LazyMultibandRaster(Map("0" -> r.bands(rb), "1" -> g.bands(gb), "2" -> b.bands(bb)))) + case list => + ImageResult(list.reduce { (lt1: LazyMultibandRaster, lt2: LazyMultibandRaster) => + LazyMultibandRaster(lt1.bands ++ lt2.bands) + }) + } } - } } - val assembleTile = Directive { case (a @ Assemble(_), childResults) if a.kind == MamlKind.Image => - val grouped = childResults.groupBy(_.kind) + val assembleTile = Directive { + case (a @ Assemble(_), childResults) if a.kind == MamlKind.Image => + val grouped = childResults.groupBy(_.kind) - imageResults(grouped).map { tiles => - val bands = - tiles.zipWithIndex.map { case (tile, index) => - index.toString -> tile.bands("0") - } + imageResults(grouped).map { tiles => + val bands = + tiles.zipWithIndex.map { case (tile, index) => + index.toString -> tile.bands("0") + } - ImageResult(LazyMultibandRaster(bands.toMap)) - } + ImageResult(LazyMultibandRaster(bands.toMap)) + } } - val minDouble = Directive { case (a@Min(_), childResults) if (a.kind == MamlKind.Double) => - childResults - .map({ _.as[Double] }) - .toList.sequence - .andThen({ results => Valid(DoubleResult(results.reduce(_ min _))) }) + val minDouble = Directive { + case (a @ Min(_), childResults) if a.kind == MamlKind.Double => + childResults + .map { _.as[Double] } + .toList + .sequence + .andThen { results => Valid(DoubleResult(results.reduce(_ min _))) } } - val minInt = Directive { case (a@Min(_), childResults) if (a.kind == MamlKind.Double) => - childResults - .map({ _.as[Double] }) - .toList.sequence - .andThen({ results => Valid(DoubleResult(results.reduce(_ min _))) }) + val minInt = Directive { + case (a @ Min(_), childResults) if a.kind == MamlKind.Double => + childResults + .map { _.as[Double] } + .toList + .sequence + .andThen { results => Valid(DoubleResult(results.reduce(_ min _))) } } - val minTile = Directive { case (a@Min(_), childResults) if (a.kind == MamlKind.Image) => - val grouped = childResults.groupBy(_.kind) + val minTile = Directive { + case (a @ Min(_), childResults) if a.kind == MamlKind.Image => + val grouped = childResults.groupBy(_.kind) + + val scalarMin: Interpreted[Option[Double]] = + (doubleResults(grouped), intResults(grouped)).mapN { case (dbls, ints) => + (Try(dbls.min).toOption, Try(ints.min).toOption) match { + case (Some(dbl), Some(int)) => Some(dbl.min(int)) + case (None, Some(int)) => Some(int) + case (Some(dbl), None) => Some(dbl) + case _ => None + } + } - val scalarMin: Interpreted[Option[Double]] = - (doubleResults(grouped), intResults(grouped)).mapN { case (dbls, ints) => - (Try(dbls.min).toOption, Try(ints.min).toOption) match { - case (Some(dbl), Some(int)) => Some(dbl min int) - case (None, Some(int)) => Some(int) - case (Some(dbl), None) => Some(dbl) - case _ => None + (imageResults(grouped), scalarMin).mapN { case (tiles, minimum) => + val tileMin = tiles.reduce { (lt1: LazyMultibandRaster, lt2: LazyMultibandRaster) => + lt1.dualCombine(lt2, { _ min _ }, { _ min _ }) + } + minimum match { + case Some(scalarMin) => + ImageResult(tileMin.dualMap({ i: Int => i.min(scalarMin.toInt) }, { i: Double => i.min(scalarMin) })) + case None => + ImageResult(tileMin) + } } - } - - (imageResults(grouped), scalarMin).mapN({ case (tiles, minimum) => - val tileMin = tiles.reduce({ (lt1: LazyMultibandRaster, lt2: LazyMultibandRaster) => - lt1.dualCombine(lt2, {_ min _}, {_ min _}) - }) - minimum match { - case Some(scalarMin) => - ImageResult(tileMin.dualMap({ i: Int => i min scalarMin.toInt }, { i: Double => i min scalarMin })) - case None => - ImageResult(tileMin) - } - }) } - /** Numeric Comparison Operations */ - val lessThan = Directive { case (a@Lesser(_), childResults) => - val results = childResults.reduce({ (res1: Result, res2: Result) => + /** + * Numeric Comparison Operations + */ + val lessThan = Directive { case (a @ Lesser(_), childResults) => + val results = childResults.reduce { (res1: Result, res2: Result) => tileOrBoolReduction( - {_ < _}, { (i, t) => t < i }, {_ < _}, { (d, t) => t < d }, {_ < _}, - {_ < _}, {_ < _}, {_ < _}, {_ < _.toInt}, - res1, res2 + { _ < _ }, + { (i, t) => t < i }, + { _ < _ }, + { (d, t) => t < d }, + { _ < _ }, + { _ < _ }, + { _ < _ }, + { _ < _ }, + { _ < _.toInt }, + res1, + res2 ) - }) + } Valid(results) } - val lessThanOrEqualTo = Directive { case (a@LesserOrEqual(_), childResults) => - val results = childResults.reduce({ (res1: Result, res2: Result) => + val lessThanOrEqualTo = Directive { case (a @ LesserOrEqual(_), childResults) => + val results = childResults.reduce { (res1: Result, res2: Result) => tileOrBoolReduction( - {_ <= _}, { (i, t) => t <= i }, {_ <= _}, { (d, t) => t <= d }, {_ <= _}, - {_ <= _}, {_ <= _}, {_ <= _}, {_ <= _.toInt}, - res1, res2 + { _ <= _ }, + { (i, t) => t <= i }, + { _ <= _ }, + { (d, t) => t <= d }, + { _ <= _ }, + { _ <= _ }, + { _ <= _ }, + { _ <= _ }, + { _ <= _.toInt }, + res1, + res2 ) - }) + } Valid(results) } - val equalTo = Directive { case (a@Equal(_), childResults) => - val results = childResults.reduce({ (res1: Result, res2: Result) => + val equalTo = Directive { case (a @ Equal(_), childResults) => + val results = childResults.reduce { (res1: Result, res2: Result) => tileOrBoolReduction( - {_ === _}, { (i, t) => t === i }, {_ === _}, { (d, t) => t === d }, {_ === _}, - {_ == _}, {_ == _}, {_ == _}, {_ == _.toInt}, - res1, res2 + { _ === _ }, + { (i, t) => t === i }, + { _ === _ }, + { (d, t) => t === d }, + { _ === _ }, + { _ == _ }, + { _ == _ }, + { _ == _ }, + { _ == _.toInt }, + res1, + res2 ) - }) + } Valid(results) } - val notEqualTo = Directive { case (a@Unequal(_), childResults) => - val results = childResults.reduce({ (res1: Result, res2: Result) => + val notEqualTo = Directive { case (a @ Unequal(_), childResults) => + val results = childResults.reduce { (res1: Result, res2: Result) => tileOrBoolReduction( - {_ !== _}, { (i, t) => t !== i }, {_ !== _}, { (d, t) => t !== d }, {_ !== _}, - {_ != _}, {_ != _}, {_ != _}, {_ != _.toInt}, - res1, res2 + { _ !== _ }, + { (i, t) => t !== i }, + { _ !== _ }, + { (d, t) => t !== d }, + { _ !== _ }, + { _ != _ }, + { _ != _ }, + { _ != _ }, + { _ != _.toInt }, + res1, + res2 ) - }) + } Valid(results) } - val greaterThan = Directive { case (a@Greater(_), childResults) => - val results = childResults.reduce({ (res1: Result, res2: Result) => + val greaterThan = Directive { case (a @ Greater(_), childResults) => + val results = childResults.reduce { (res1: Result, res2: Result) => tileOrBoolReduction( - {_ > _}, { (i, t) => t > i }, {_ > _}, { (d, t) => t > d }, {_ > _}, - {_ > _}, {_ > _}, {_ > _}, {_ > _.toInt}, - res1, res2 + { _ > _ }, + { (i, t) => t > i }, + { _ > _ }, + { (d, t) => t > d }, + { _ > _ }, + { _ > _ }, + { _ > _ }, + { _ > _ }, + { _ > _.toInt }, + res1, + res2 ) - }) + } Valid(results) } - val greaterThanOrEqualTo = Directive { case (a@GreaterOrEqual(_), childResults) => - val results = childResults.reduce({ (res1: Result, res2: Result) => + val greaterThanOrEqualTo = Directive { case (a @ GreaterOrEqual(_), childResults) => + val results = childResults.reduce { (res1: Result, res2: Result) => tileOrBoolReduction( - {_ >= _}, { (i, t) => t > i }, {_ >= _}, { (d, t) => t > d }, {_ >= _}, - {_ >= _}, {_ >= _}, {_ >= _}, {_ >= _.toInt}, - res1, res2 + { _ >= _ }, + { (i, t) => t > i }, + { _ >= _ }, + { (d, t) => t > d }, + { _ >= _ }, + { _ >= _ }, + { _ >= _ }, + { _ >= _ }, + { _ >= _.toInt }, + res1, + res2 ) - }) + } Valid(results) } - /** Logical Operations */ + /** + * Logical Operations + */ // TODO: Update these functions when the int/double distinction is removed so that bool args // are respected - val and = Directive { case (and@And(_), childResults) => - val results = childResults.reduce({ (res1: Result, res2: Result) => + val and = Directive { case (and @ And(_), childResults) => + val results = childResults.reduce { (res1: Result, res2: Result) => tileOrBoolReduction( - {_ && _}, {_ &&: _}, {_ && _}, { _ &&: _ }, {_ && _}, - {isData(_) && isData(_)}, {isData(_) && isData(_)}, {isData(_) && isData(_)}, {isData(_) && isData(_)}, - res1, res2 + { _ && _ }, + { _ &&: _ }, + { _ && _ }, + { _ &&: _ }, + { _ && _ }, + { isData(_) && isData(_) }, + { isData(_) && isData(_) }, + { isData(_) && isData(_) }, + { isData(_) && isData(_) }, + res1, + res2 ) - }) + } Valid(results) } - val or = Directive { case (or@Or(_), childResults) => - val results = childResults.reduce({ (res1: Result, res2: Result) => + val or = Directive { case (or @ Or(_), childResults) => + val results = childResults.reduce { (res1: Result, res2: Result) => tileOrBoolReduction( - {_ || _}, {_ ||: _}, {_ || _}, { _ ||: _ }, {_ || _}, - {isData(_) || isData(_)}, {isData(_) || isData(_)}, {isData(_) || isData(_)}, {isData(_) || isData(_)}, - res1, res2 + { _ || _ }, + { _ ||: _ }, + { _ || _ }, + { _ ||: _ }, + { _ || _ }, + { isData(_) || isData(_) }, + { isData(_) || isData(_) }, + { isData(_) || isData(_) }, + { isData(_) || isData(_) }, + res1, + res2 ) - }) + } Valid(results) } - val xor = Directive { case (xor@Xor(_), childResults) => - val results = childResults.reduce({ (res1: Result, res2: Result) => + val xor = Directive { case (xor @ Xor(_), childResults) => + val results = childResults.reduce { (res1: Result, res2: Result) => tileOrBoolReduction( - {_ xor _}, { (i, t) => t xor i}, {_ xor _}, { (d, t) => t xor d }, {_ xor _}, - {(x, y) => (isData(x) || isData(y) && !(isData(x) && isData(y)))}, - {(x, y) => (isData(x) || isData(y) && !(isData(x) && isData(y)))}, - {(x, y) => (isData(x) || isData(y) && !(isData(x) && isData(y)))}, - {(x, y) => (isData(x) || isData(y) && !(isData(x) && isData(y)))}, - res1, res2 + { _ xor _ }, + { (i, t) => t.xor(i) }, + { _ xor _ }, + { (d, t) => t.xor(d) }, + { _ xor _ }, + { (x, y) => isData(x) || isData(y) && !(isData(x) && isData(y)) }, + { (x, y) => isData(x) || isData(y) && !(isData(x) && isData(y)) }, + { (x, y) => isData(x) || isData(y) && !(isData(x) && isData(y)) }, + { (x, y) => isData(x) || isData(y) && !(isData(x) && isData(y)) }, + res1, + res2 ) - }) + } Valid(results) } - /** Sleeping */ + /** + * Sleeping + */ val sleep = Directive { case (Sleep(n, _), childResults) => Thread.sleep(n * 1000) Valid(childResults.head) } - /** Tile-specific Operations */ - val masking = Directive { case (mask@Masking(_), childResults) => + /** + * Tile-specific Operations + */ + val masking = Directive { case (mask @ Masking(_), childResults) => ((childResults(0), childResults(1)) match { case (ImageResult(lzRaster), GeomResult(geom)) => Valid((lzRaster, geom)) @@ -394,25 +501,35 @@ object OpDirectives { Valid((lzRaster, geom)) case _ => Invalid(NEL.of(NonEvaluableNode(mask, Some("Masking operation requires both a tile and a vector argument")))) - }).andThen({ case (lzRaster, geom) => + }).andThen { case (lzRaster, geom) => asInstanceOfOption[MultiPolygon](geom) match { case Some(mp) => Valid(ImageResult(lzRaster.mask(mp))) case None => Invalid(NEL.of(NonEvaluableNode(mask, Some("Masking operation requires its vector argument to be a multipolygon")))) } - }) + } } - /** Trigonometric Operations */ - val atan2 = Directive { case (atan2@Atan2(_), childResults) => - val results = childResults.reduce({ (res1: Result, res2: Result) => + /** + * Trigonometric Operations + */ + val atan2 = Directive { case (atan2 @ Atan2(_), childResults) => + val results = childResults.reduce { (res1: Result, res2: Result) => tileOrScalarReduction( - {_.atan2(_)}, { (i, t) => t.atan2(i) }, {_.atan2(_)}, { (d, t) => t.atan2(d) }, {_.atan2(_)}, - { math.atan2(_, _).toInt }, { math.atan2(_, _) }, { math.atan2(_, _) }, { math.atan2(_, _) }, - res1, res2 + { _.atan2(_) }, + { (i, t) => t.atan2(i) }, + { _.atan2(_) }, + { (d, t) => t.atan2(d) }, + { _.atan2(_) }, + { math.atan2(_, _).toInt }, + { math.atan2(_, _) }, + { math.atan2(_, _) }, + { math.atan2(_, _) }, + res1, + res2 ) - }) + } Valid(results) } } diff --git a/jvm/src/main/scala/eval/directive/ScopedDirective.scala b/jvm/src/main/scala/eval/directive/ScopedDirective.scala index 856036db..3b5c462b 100644 --- a/jvm/src/main/scala/eval/directive/ScopedDirective.scala +++ b/jvm/src/main/scala/eval/directive/ScopedDirective.scala @@ -11,13 +11,15 @@ import cats.implicits._ import scala.reflect.ClassTag - object ScopedDirective { - /** Lift a simple directive into a scoped context */ - def pure[Exp <: Expression : ClassTag](ruleFn: Directive): ScopedDirective[Any] = - { case (exp: Exp, results: Seq[Result], _: Any) => ruleFn(exp, results) } + + /** + * Lift a simple directive into a scoped context + */ + def pure[Exp <: Expression: ClassTag](ruleFn: Directive): ScopedDirective[Any] = { case (exp: Exp, results: Seq[Result], _: Any) => + ruleFn(exp, results) + } def apply[Scope](ruleFn: PartialFunction[(Expression, Seq[Result], Scope), Interpreted[Result]]): ScopedDirective[Scope] = ruleFn } - diff --git a/jvm/src/main/scala/eval/directive/SourceDirectives.scala b/jvm/src/main/scala/eval/directive/SourceDirectives.scala index c9b43d41..5fb39e0b 100644 --- a/jvm/src/main/scala/eval/directive/SourceDirectives.scala +++ b/jvm/src/main/scala/eval/directive/SourceDirectives.scala @@ -16,7 +16,7 @@ import geotrellis.layer._ import cats.data.{NonEmptyList => NEL, _} import Validated._ -import scala.util.{Try, Success, Failure} +import scala.util.{Failure, Success, Try} object SourceDirectives { val intLiteral = Directive { case (IntLit(int), _) => Valid(IntResult(int)) } @@ -32,7 +32,7 @@ object SourceDirectives { case (RasterLit(r), _) if r.isInstanceOf[LazyMultibandRaster] => val mbRaster = r.asInstanceOf[LazyMultibandRaster] Valid(ImageResult(mbRaster)) - case (rl@RasterLit(r), _) => + case (rl @ RasterLit(r), _) => Invalid(NEL.of(NonEvaluableNode(rl, Some("Unable to treat raster literal contents as type Raster")))) } @@ -41,7 +41,7 @@ object SourceDirectives { case Success(geom) => Valid(GeomResult(geom)) case Failure(e) => parse(jsonString) match { - case Right(json) => Invalid(NEL.of(ASTDecodeError(json, "provided JSON is not valid GeoJson"))) + case Right(json) => Invalid(NEL.of(ASTDecodeError(json, "provided JSON is not valid GeoJson"))) case Left(parsingFailure) => Invalid(NEL.of(ASTParseError(jsonString, parsingFailure.message))) } } diff --git a/jvm/src/main/scala/eval/directive/UnaryDirectives.scala b/jvm/src/main/scala/eval/directive/UnaryDirectives.scala index 0348ab98..0ee44573 100644 --- a/jvm/src/main/scala/eval/directive/UnaryDirectives.scala +++ b/jvm/src/main/scala/eval/directive/UnaryDirectives.scala @@ -15,7 +15,6 @@ import geotrellis.raster.render._ import scala.util.Try - object UnaryDirectives { private def not[A](f: A => Boolean): A => Boolean = !f(_) @@ -26,8 +25,8 @@ object UnaryDirectives { d: Double => Double, arg: Result ): Result = arg match { - case ImageResult(lt) => ImageResult(t(lt)) - case IntResult(int) => DoubleResult(i(int)) + case ImageResult(lt) => ImageResult(t(lt)) + case IntResult(int) => DoubleResult(i(int)) case DoubleResult(dbl) => DoubleResult(d(dbl)) } @@ -37,125 +36,135 @@ object UnaryDirectives { d: Double => Boolean, arg: Result ): Result = arg match { - case ImageResult(lt) => ImageResult(t(lt)) - case IntResult(int) => BoolResult(i(int)) + case ImageResult(lt) => ImageResult(t(lt)) + case IntResult(int) => BoolResult(i(int)) case DoubleResult(dbl) => BoolResult(d(dbl)) } - /** Trigonometric Operations */ - val sin = Directive { case (s@Sin(_), childResults) => + /** + * Trigonometric Operations + */ + val sin = Directive { case (s @ Sin(_), childResults) => val result = imageOrScalarResult({ _.sin }, { math.sin(_) }, { math.sin(_) }, childResults.head) Valid(result) } - val cos = Directive { case (s@Cos(_), childResults) => + val cos = Directive { case (s @ Cos(_), childResults) => val result = imageOrScalarResult({ _.cos }, { math.cos(_) }, { math.cos(_) }, childResults.head) Valid(result) } - val tan = Directive { case (s@Tan(_), childResults) => + val tan = Directive { case (s @ Tan(_), childResults) => val result = imageOrScalarResult({ _.tan }, { math.tan(_) }, { math.tan(_) }, childResults.head) Valid(result) } - val sinh = Directive { case (s@Sinh(_), childResults) => + val sinh = Directive { case (s @ Sinh(_), childResults) => val result = imageOrScalarResult({ _.sinh }, { math.sinh(_) }, { math.sinh(_) }, childResults.head) Valid(result) } - val cosh = Directive { case (s@Cosh(_), childResults) => + val cosh = Directive { case (s @ Cosh(_), childResults) => val result = imageOrScalarResult({ _.cosh }, { math.cosh(_) }, { math.cosh(_) }, childResults.head) Valid(result) } - val tanh = Directive { case (s@Tanh(_), childResults) => + val tanh = Directive { case (s @ Tanh(_), childResults) => val result = imageOrScalarResult({ _.tanh }, { math.tanh(_) }, { math.tanh(_) }, childResults.head) Valid(result) } - val asin = Directive { case (s@Asin(_), childResults) => + val asin = Directive { case (s @ Asin(_), childResults) => val result = imageOrScalarResult({ _.asin }, { math.asin(_) }, { math.asin(_) }, childResults.head) Valid(result) } - val acos = Directive { case (s@Acos(_), childResults) => + val acos = Directive { case (s @ Acos(_), childResults) => val result = imageOrScalarResult({ _.acos }, { math.acos(_) }, { math.acos(_) }, childResults.head) Valid(result) } - val atan = Directive { case (s@Atan(_), childResults) => + val atan = Directive { case (s @ Atan(_), childResults) => val result = imageOrScalarResult({ _.atan }, { math.atan(_) }, { math.atan(_) }, childResults.head) Valid(result) } - /** Rounding Operations */ - val round = Directive { case (r@Round(_), childResults) => + /** + * Rounding Operations + */ + val round = Directive { case (r @ Round(_), childResults) => val result = imageOrScalarResult({ _.round }, identity, { math.round(_) }, childResults.head) Valid(result) } - val floor = Directive { case (r@Floor(_), childResults) => + val floor = Directive { case (r @ Floor(_), childResults) => val result = imageOrScalarResult({ _.floor }, identity, { math.floor(_) }, childResults.head) Valid(result) } - val ceil = Directive { case (r@Ceil(_), childResults) => + val ceil = Directive { case (r @ Ceil(_), childResults) => val result = imageOrScalarResult({ _.ceil }, identity, { math.ceil(_) }, childResults.head) Valid(result) } - /** Arithmetic Operations */ - val naturalLog = Directive { case (nl@LogE(_), childResults) => + /** + * Arithmetic Operations + */ + val naturalLog = Directive { case (nl @ LogE(_), childResults) => val result = imageOrScalarResult({ _.logE }, { i: Int => math.log(i2d(i)) }, { math.log(_) }, childResults.head) Valid(result) } - val log10 = Directive { case (nl@Log10(_), childResults) => + val log10 = Directive { case (nl @ Log10(_), childResults) => val result = imageOrScalarResult({ _.log10 }, { i: Int => math.log10(i2d(i)) }, { math.log10(_) }, childResults.head) Valid(result) } - val sqrt = Directive { case (sqrt@SquareRoot(_), childResults) => + val sqrt = Directive { case (sqrt @ SquareRoot(_), childResults) => val result = imageOrScalarResult({ _.sqrt }, { i: Int => math.sqrt(i2d(i)) }, { math.sqrt(_) }, childResults.head) Valid(result) } - val abs = Directive { case (sqrt@SquareRoot(_), childResults) => + val abs = Directive { case (sqrt @ SquareRoot(_), childResults) => val result = imageOrScalarResult({ _.abs }, { i: Int => math.abs(i) }, { math.abs(_) }, childResults.head) Valid(result) } - val isDefined = Directive { case (d@Defined(_), childResults) => + val isDefined = Directive { case (d @ Defined(_), childResults) => val result = imageOrBoolResult({ _.isDefined }, { i: Int => isData(i) }, { isData(_) }, childResults.head) Valid(result) } - val isUndefined = Directive { case (d@Undefined(_), childResults) => + val isUndefined = Directive { case (d @ Undefined(_), childResults) => val result = imageOrBoolResult({ _.isUndefined }, { i: Int => isNoData(i) }, { isNoData(_) }, childResults.head) Valid(result) } - val numericNegation = Directive { case (nn@NumericNegation(_), childResults) => - val result = imageOrScalarResult({ _.changeSign }, { _ * -1 }, {_ * -1}, childResults.head) + val numericNegation = Directive { case (nn @ NumericNegation(_), childResults) => + val result = imageOrScalarResult({ _.changeSign }, { _ * -1 }, { _ * -1 }, childResults.head) Valid(result) } - /** Logical Operations */ + /** + * Logical Operations + */ val logicalNegation = Directive { case (LogicalNegation(_), childResults) => - val result = imageOrBoolResult({ _.not }, {not(isData(_))}, {not(isData(_))}, childResults.head) + val result = imageOrBoolResult({ _.not }, { not(isData(_)) }, { not(isData(_)) }, childResults.head) Valid(result) } - /** Tile-specific Operations */ + /** + * Tile-specific Operations + */ val classification = Directive { case (classify @ Classification(_, classMap), childResults) => childResults.head match { case ImageResult(lzTile) => Valid(ImageResult(lzTile.classify(BreakMap(classMap.classifications)))) - case _ => Invalid(NEL.of(NonEvaluableNode(classify, Some("Classification node requires multiband lazyraster argument")))) + case _ => Invalid(NEL.of(NonEvaluableNode(classify, Some("Classification node requires multiband lazyraster argument")))) } } val imageSelection = Directive { case (imgSel @ ImageSelect(_, labels), childResults) => childResults.head match { case ImageResult(mbLzTile) => Valid(ImageResult(mbLzTile.select(labels))) - case _ => Invalid(NEL.of(NonEvaluableNode(imgSel, Some("ImageSelect node requires multiband lazyraster argument")))) + case _ => Invalid(NEL.of(NonEvaluableNode(imgSel, Some("ImageSelect node requires multiband lazyraster argument")))) } } @@ -183,4 +192,3 @@ object UnaryDirectives { } } } - diff --git a/jvm/src/main/scala/eval/package.scala b/jvm/src/main/scala/eval/package.scala index 319aeeb7..b6d4608f 100644 --- a/jvm/src/main/scala/eval/package.scala +++ b/jvm/src/main/scala/eval/package.scala @@ -12,7 +12,6 @@ import cats.data._ import cats.data.Validated._ import cats.implicits._ - package object eval { type Directive = PartialFunction[(Expression, Seq[Result]), Interpreted[Result]] type ScopedDirective[Scope] = PartialFunction[(Expression, Seq[Result], Scope), Interpreted[Result]] diff --git a/jvm/src/main/scala/eval/tile/Classify.scala b/jvm/src/main/scala/eval/tile/Classify.scala index 4f082147..c356aba1 100644 --- a/jvm/src/main/scala/eval/tile/Classify.scala +++ b/jvm/src/main/scala/eval/tile/Classify.scala @@ -2,7 +2,6 @@ package com.azavea.maml.eval.tile import geotrellis.raster._ - case class Classify(children: List[LazyRaster], f: Double => Int) extends LazyRaster.UnaryBranch { def get(col: Int, row: Int) = f(fst.getDouble(col, row)) def getDouble(col: Int, row: Int) = i2d(get(col, row)) diff --git a/jvm/src/main/scala/eval/tile/LazyMultibandRaster.scala b/jvm/src/main/scala/eval/tile/LazyMultibandRaster.scala index f2952ba7..bc8f870e 100644 --- a/jvm/src/main/scala/eval/tile/LazyMultibandRaster.scala +++ b/jvm/src/main/scala/eval/tile/LazyMultibandRaster.scala @@ -37,14 +37,17 @@ case class LazyMultibandRaster(bands: Map[String, LazyRaster]) { f: (Int, Int) => Int, g: (Double, Double) => Double ): LazyMultibandRaster = { - val newBands = bands.values.zip(other.bands.values).map { case (v1, v2) => - LazyRaster.DualCombine(List(v1, v2), f, g) - }.toList + val newBands = bands.values + .zip(other.bands.values) + .map { case (v1, v2) => + LazyRaster.DualCombine(List(v1, v2), f, g) + } + .toList LazyMultibandRaster(newBands) } def dualMap(f: Int => Int, g: Double => Double): LazyMultibandRaster = - LazyMultibandRaster(bands.mapValues({ lt => LazyRaster.DualMap(List(lt), f, g) })) + LazyMultibandRaster(bands.mapValues { lt => LazyRaster.DualMap(List(lt), f, g) }) def focal( neighborhood: Neighborhood, @@ -52,7 +55,7 @@ case class LazyMultibandRaster(bands: Map[String, LazyRaster]) { target: TargetCell, focalFn: (Tile, Neighborhood, Option[GridBounds[Int]], TargetCell) => Tile ): LazyMultibandRaster = { - val lztiles = bands.mapValues({ lt => LazyRaster.Focal(List(lt), neighborhood, gridbounds, target, focalFn) }) + val lztiles = bands.mapValues { lt => LazyRaster.Focal(List(lt), neighborhood, gridbounds, target, focalFn) } LazyMultibandRaster(lztiles) } @@ -62,7 +65,7 @@ case class LazyMultibandRaster(bands: Map[String, LazyRaster]) { cs: CellSize, target: TargetCell ): LazyMultibandRaster = { - val lztiles = bands.mapValues({ lt => LazyRaster.Slope(List(lt), gridbounds, zFactor, cs, target) }) + val lztiles = bands.mapValues { lt => LazyRaster.Slope(List(lt), gridbounds, zFactor, cs, target) } LazyMultibandRaster(lztiles) } @@ -74,7 +77,7 @@ case class LazyMultibandRaster(bands: Map[String, LazyRaster]) { altitude: Double, target: TargetCell ): LazyMultibandRaster = { - val lztiles = bands.mapValues({ lt => LazyRaster.Hillshade(List(lt), gridbounds, zFactor, cs, azimuth, altitude, target) }) + val lztiles = bands.mapValues { lt => LazyRaster.Hillshade(List(lt), gridbounds, zFactor, cs, azimuth, altitude, target) } LazyMultibandRaster(lztiles) } @@ -83,31 +86,31 @@ case class LazyMultibandRaster(bands: Map[String, LazyRaster]) { cs: CellSize, target: TargetCell ): LazyMultibandRaster = { - val lztiles = bands.mapValues({ lt => + val lztiles = bands.mapValues { lt => LazyRaster.Aspect(List(lt), gridbounds, cs, target) - }) + } LazyMultibandRaster(lztiles) } def mask( maskPoly: MultiPolygon ): LazyMultibandRaster = { - val lztiles = bands.mapValues({ lt => MaskingNode(List(lt), maskPoly) }) + val lztiles = bands.mapValues { lt => MaskingNode(List(lt), maskPoly) } LazyMultibandRaster(lztiles) } def rescale(newMin: Double, newMax: Double): LazyMultibandRaster = { - val lztiles = bands.mapValues({ lt => LazyRaster.Rescale(List(lt), newMin, newMax) }) + val lztiles = bands.mapValues { lt => LazyRaster.Rescale(List(lt), newMin, newMax) } LazyMultibandRaster(lztiles) } def normalize(oldMin: Double, oldMax: Double, newMin: Double, newMax: Double): LazyMultibandRaster = { - val lztiles = bands.mapValues({ lt => LazyRaster.Normalize(List(lt), oldMin, oldMax, newMin, newMax) }) + val lztiles = bands.mapValues { lt => LazyRaster.Normalize(List(lt), oldMin, oldMax, newMin, newMax) } LazyMultibandRaster(lztiles) } def clamp(min: Double, max: Double): LazyMultibandRaster = { - val lztiles = bands.mapValues({ lt => LazyRaster.Clamp(List(lt), min, max) }) + val lztiles = bands.mapValues { lt => LazyRaster.Clamp(List(lt), min, max) } LazyMultibandRaster(lztiles) } } diff --git a/jvm/src/main/scala/eval/tile/LazyRaster.scala b/jvm/src/main/scala/eval/tile/LazyRaster.scala index bf31fe01..453cfe8c 100644 --- a/jvm/src/main/scala/eval/tile/LazyRaster.scala +++ b/jvm/src/main/scala/eval/tile/LazyRaster.scala @@ -2,7 +2,7 @@ package com.azavea.maml.eval.tile import cats.Semigroup import geotrellis.raster._ -import geotrellis.raster.mapalgebra.focal.{Neighborhood, Square, TargetCell, Aspect => GTAspect, Slope => GTFocalSlope} +import geotrellis.raster.mapalgebra.focal.{Aspect => GTAspect, Neighborhood, Slope => GTFocalSlope, Square, TargetCell} import geotrellis.raster.mapalgebra.focal.hillshade.{Hillshade => GTHillshade} import geotrellis.vector.Extent import geotrellis.proj4.CRS @@ -55,18 +55,20 @@ object LazyRaster { def apply(raster: Raster[Tile], crs: CRS): LazyRaster = Bound(raster.tile, raster.rasterExtent, crs) - /** A LazyRaster.Tree has a left and right. */ + /** + * A LazyRaster.Tree has a left and right. + */ trait Branch extends LazyRaster { lazy val cols = { val colList = children.map(_.cols).distinct // This require block breaks things when there's an imbalance of focal operations on the children - //require(colList.length == 1, "Ambiguous column count") + // require(colList.length == 1, "Ambiguous column count") colList.head } lazy val rows = { val rowList = children.map(_.rows).distinct // This require block breaks things when there's an imbalance of focal operations on the children - //require(rowList.length == 1, "Ambiguous row count") + // require(rowList.length == 1, "Ambiguous row count") rowList.head } } @@ -92,11 +94,13 @@ object LazyRaster { def children: List[LazyRaster] = List.empty } - /** This object represents tile data sources */ + /** + * This object represents tile data sources + */ case class Bound(tile: Tile, rasterExtent: RasterExtent, crs: CRS) extends Terminal { def cols: Int = tile.cols def rows: Int = tile.rows - def get(col: Int, row: Int): Int = tile.get(col,row) + def get(col: Int, row: Int): Int = tile.get(col, row) def getDouble(col: Int, row: Int): Double = tile.getDouble(col, row) } @@ -177,12 +181,18 @@ object LazyRaster { val maxInt = max.toInt def clampInt(z: Int): Int = - if(isData(z)) { if(z > maxInt) { maxInt } else if(z < minInt) { minInt } else { z } } - else { z } + if (isData(z)) { + if (z > maxInt) { maxInt } + else if (z < minInt) { minInt } + else { z } + } else { z } def clampDouble(z: Double): Double = - if(isData(z)) { if(z > max) { max } else if(z < min) { min } else { z } } - else { z } + if (isData(z)) { + if (z > max) { max } + else if (z < min) { min } + else { z } + } else { z } lazy val intTile = fst.evaluate.map(clampInt _) lazy val dblTile = fst.evaluateDouble.mapDouble(clampDouble _) @@ -217,7 +227,7 @@ object LazyRaster { override lazy val cols: Int = gridbounds.map(_.width).getOrElse(fst.cols) override lazy val rows: Int = gridbounds.map(_.height).getOrElse(fst.rows) lazy val intTile = GTFocalSlope(fst.evaluate, Square(1), gridbounds, cs, zFactor, target) - lazy val dblTile = GTFocalSlope(fst.evaluateDouble, Square(1), gridbounds, cs, zFactor, target) + lazy val dblTile = GTFocalSlope(fst.evaluateDouble, Square(1), gridbounds, cs, zFactor, target) def get(col: Int, row: Int) = intTile.get(col, row) def getDouble(col: Int, row: Int) = dblTile.getDouble(col, row) diff --git a/jvm/src/main/scala/eval/tile/Masking.scala b/jvm/src/main/scala/eval/tile/Masking.scala index 2be92e4e..bbeae87b 100644 --- a/jvm/src/main/scala/eval/tile/Masking.scala +++ b/jvm/src/main/scala/eval/tile/Masking.scala @@ -8,15 +8,15 @@ case class MaskingNode(children: List[LazyRaster], mask: MultiPolygon) extends L val masky = ArrayTile.empty(BitCellType, this.cols, this.rows) rasterExtent - .foreach(mask)({ (col, row) => masky.set(col, row, 1) }) + .foreach(mask) { (col, row) => masky.set(col, row, 1) } masky } - /** Perform the NODATA checks ahead of time, in case the underlying Tile - * is sparse. This will then only check for Mask intersection if the value to - * give back could be something other than NODATA. - */ + /** + * Perform the NODATA checks ahead of time, in case the underlying Tile is sparse. This will then only check for Mask intersection if the value to + * give back could be something other than NODATA. + */ def get(col: Int, row: Int): Int = { val v: Int = fst.get(col, row) @@ -28,4 +28,3 @@ case class MaskingNode(children: List[LazyRaster], mask: MultiPolygon) extends L if (isNoData(v)) v else if (cellMask.get(col, row) == 1) v else Double.NaN } } - diff --git a/jvm/src/main/scala/eval/tile/TileLayouts.scala b/jvm/src/main/scala/eval/tile/TileLayouts.scala index b878a687..8824c3df 100644 --- a/jvm/src/main/scala/eval/tile/TileLayouts.scala +++ b/jvm/src/main/scala/eval/tile/TileLayouts.scala @@ -4,13 +4,12 @@ import geotrellis.proj4.WebMercator import geotrellis.raster._ import geotrellis.layer._ - -/** This interpreter handles resource resolution and compilation of MapAlgebra ASTs */ +/** + * This interpreter handles resource resolution and compilation of MapAlgebra ASTs + */ object TileLayouts { - private val layouts: Array[LayoutDefinition] = (0 to 30).map(n => - ZoomedLayoutScheme.layoutForZoom(n, WebMercator.worldExtent, 256) - ).toArray + private val layouts: Array[LayoutDefinition] = (0 to 30).map(n => ZoomedLayoutScheme.layoutForZoom(n, WebMercator.worldExtent, 256)).toArray def apply(i: Int) = layouts(i) } diff --git a/jvm/src/main/scala/eval/tile/TileWithNeighbors.scala b/jvm/src/main/scala/eval/tile/TileWithNeighbors.scala index e3f1444f..4712b31b 100644 --- a/jvm/src/main/scala/eval/tile/TileWithNeighbors.scala +++ b/jvm/src/main/scala/eval/tile/TileWithNeighbors.scala @@ -4,7 +4,6 @@ import geotrellis.raster._ import java.lang.IllegalStateException - case class NeighboringTiles( tl: Tile, tm: Tile, @@ -22,9 +21,15 @@ case class TileWithNeighbors(centerTile: Tile, buffers: Option[NeighboringTiles] if (buffer > 0) { CompositeTile( Seq( - buf.tl, buf.tm, buf.tr, - buf.ml, centerTile, buf.mr, - buf.bl, buf.bm, buf.br + buf.tl, + buf.tm, + buf.tr, + buf.ml, + centerTile, + buf.mr, + buf.bl, + buf.bm, + buf.br ), TileLayout(3, 3, centerTile.cols, centerTile.rows) ).crop( @@ -33,13 +38,11 @@ case class TileWithNeighbors(centerTile: Tile, buffers: Option[NeighboringTiles] centerTile.cols * 2 + buffer - 1, centerTile.rows * 2 + buffer - 1 ) - } - else + } else centerTile - case None if (buffer == 0) => + case None if buffer == 0 => centerTile case _ => throw new IllegalStateException(s"tile buffer > 0 ($buffer) but no neighboring tiles found") } } - diff --git a/jvm/src/main/scala/util/NeighborhoodConversion.scala b/jvm/src/main/scala/util/NeighborhoodConversion.scala index c18ca457..432a25f0 100644 --- a/jvm/src/main/scala/util/NeighborhoodConversion.scala +++ b/jvm/src/main/scala/util/NeighborhoodConversion.scala @@ -2,24 +2,22 @@ package com.azavea.maml.util import geotrellis.raster.mapalgebra.focal - object NeighborhoodConversion { def apply(mamlN: Neighborhood): focal.Neighborhood = mamlN match { - case Square(extent) => focal.Square(extent) - case Circle(radius) => focal.Circle(radius) - case Nesw(extent) => focal.Nesw(extent) + case Square(extent) => focal.Square(extent) + case Circle(radius) => focal.Circle(radius) + case Nesw(extent) => focal.Nesw(extent) case Wedge(radius, startAngle, endAngle) => focal.Wedge(radius, startAngle, endAngle) - case Annulus(innerRadius, outerRadius) => focal.Annulus(innerRadius, outerRadius) + case Annulus(innerRadius, outerRadius) => focal.Annulus(innerRadius, outerRadius) } def apply(mamlN: focal.Neighborhood): Neighborhood = mamlN match { - case focal.Square(extent) => Square(extent) - case focal.Circle(radius) => Circle(radius) - case focal.Nesw(extent) => Nesw(extent) + case focal.Square(extent) => Square(extent) + case focal.Circle(radius) => Circle(radius) + case focal.Nesw(extent) => Nesw(extent) case focal.Wedge(radius, startAngle, endAngle) => Wedge(radius, startAngle, endAngle) - case focal.Annulus(innerRadius, outerRadius) => Annulus(innerRadius, outerRadius) + case focal.Annulus(innerRadius, outerRadius) => Annulus(innerRadius, outerRadius) } } - diff --git a/jvm/src/main/scala/util/Vars.scala b/jvm/src/main/scala/util/Vars.scala index 62083738..694c0314 100644 --- a/jvm/src/main/scala/util/Vars.scala +++ b/jvm/src/main/scala/util/Vars.scala @@ -9,7 +9,6 @@ import cats.implicits._ import java.security.InvalidParameterException - object Vars { def vars(expr: Expression): Map[String, MamlKind] = varsWithBuffer(expr).map { case (name, (kind, _)) => name -> kind } @@ -28,9 +27,8 @@ object Vars { // max by the buffer to ensure that we have enough data for all operations eval(expr, 0) .groupBy(_._1) - .mapValues({ values => values.maxBy(_._3) }) - .map({ case (name, (_, kind, buffer)) => name -> (kind, buffer) }) + .mapValues { values => values.maxBy(_._3) } + .map { case (name, (_, kind, buffer)) => name -> (kind, buffer) } .toMap } } - diff --git a/jvm/src/test/scala/eval/ConcurrentEvaluationSpec.scala b/jvm/src/test/scala/eval/ConcurrentEvaluationSpec.scala index a1b59f57..3c4e39db 100644 --- a/jvm/src/test/scala/eval/ConcurrentEvaluationSpec.scala +++ b/jvm/src/test/scala/eval/ConcurrentEvaluationSpec.scala @@ -17,19 +17,19 @@ import Validated._ import scala.reflect._ import org.scalatest._ +import org.scalatest.funspec.AnyFunSpec +import org.scalatest.matchers.should.Matchers import scala.concurrent.ExecutionContext.Implicits.global import java.time.Instant -class ConcurrentEvaluationSpec - extends FunSpec - with Matchers - with ExpressionTreeCodec { - implicit val cs = IO.contextShift(global) +class ConcurrentEvaluationSpec extends AnyFunSpec with Matchers with ExpressionTreeCodec { + import cats.effect.unsafe.implicits.global + val interpreter = ConcurrentInterpreter.DEFAULT[IO].prependDirective(sleep) implicit def tileIsTileLiteral( - tile: Tile + tile: Tile ): RasterLit[ProjectedRaster[MultibandTile]] = RasterLit( ProjectedRaster( @@ -40,7 +40,7 @@ class ConcurrentEvaluationSpec ) implicit def tileIsTileLiteral( - tile: MultibandTile + tile: MultibandTile ): RasterLit[ProjectedRaster[MultibandTile]] = RasterLit( ProjectedRaster( @@ -59,9 +59,7 @@ class ConcurrentEvaluationSpec it("should take less time than the total duration of its leaves") { val sleepDuration = 3L - val expr = Addition(List( - Sleep(sleepDuration, List(IntLit(1))), - Sleep(sleepDuration, List(IntLit(1))))) + val expr = Addition(List(Sleep(sleepDuration, List(IntLit(1))), Sleep(sleepDuration, List(IntLit(1))))) val now1 = Instant.now.toEpochMilli interpreter(expr).unsafeRunSync.as[Int] should be(Valid(2)) val now2 = Instant.now.toEpochMilli @@ -200,21 +198,23 @@ class ConcurrentEvaluationSpec } } - it("Should interpret and evaluate tile assembly") { - interpreter(ast.Assemble( - List( - IntArrayTile(1 to 100 toArray, 10, 10), - IntArrayTile(101 to 200 toArray, 10, 10), - IntArrayTile(201 to 300 toArray, 10, 10) + interpreter( + ast.Assemble( + List( + IntArrayTile(1 to 100 toArray, 10, 10), + IntArrayTile(101 to 200 toArray, 10, 10), + IntArrayTile(201 to 300 toArray, 10, 10) + ) ) - )).unsafeRunSync.as[MultibandTile] match { - case Valid(t) => t.bands match { - case Vector(r, g, b) => - r.get(0, 0) should be(1) - g.get(0, 0) should be(101) - b.get(0, 0) should be(201) - } + ).unsafeRunSync.as[MultibandTile] match { + case Valid(t) => + t.bands match { + case Vector(r, g, b) => + r.get(0, 0) should be(1) + g.get(0, 0) should be(101) + b.get(0, 0) should be(201) + } case i @ Invalid(_) => fail(s"$i") } } @@ -339,20 +339,23 @@ class ConcurrentEvaluationSpec case Valid(t) => t.bands.head.get(5, 5) should be(354) case i @ Invalid(_) => fail(s"$i") } - interpreter(ast.RGB( - List( - IntArrayTile(1 to 100 toArray, 10, 10), - IntArrayTile(101 to 200 toArray, 10, 10), - IntArrayTile(201 to 300 toArray, 10, 10) + interpreter( + ast.RGB( + List( + IntArrayTile(1 to 100 toArray, 10, 10), + IntArrayTile(101 to 200 toArray, 10, 10), + IntArrayTile(201 to 300 toArray, 10, 10) + ) ) - )).unsafeRunSync.as[MultibandTile] match { - case Valid(t) => t.bands match { - case Vector(r, g, b) => - r.get(0, 0) should be(1) - g.get(0, 0) should be(101) - b.get(0, 0) should be(201) - } - case i@Invalid(_) => fail(s"$i") + ).unsafeRunSync.as[MultibandTile] match { + case Valid(t) => + t.bands match { + case Vector(r, g, b) => + r.get(0, 0) should be(1) + g.get(0, 0) should be(101) + b.get(0, 0) should be(201) + } + case i @ Invalid(_) => fail(s"$i") } val mbt: Expression = MultibandTile( @@ -361,71 +364,83 @@ class ConcurrentEvaluationSpec IntArrayTile(201 to 300 toArray, 10, 10) ) interpreter(ast.RGB(List(mbt, mbt, mbt), "0", "1", "2")).unsafeRunSync.as[MultibandTile] match { - case Valid(t) => t.bands match { - case Vector(r, g, b) => - r.get(0, 0) should be(1) - g.get(0, 0) should be(101) - b.get(0, 0) should be(201) - } + case Valid(t) => + t.bands match { + case Vector(r, g, b) => + r.get(0, 0) should be(1) + g.get(0, 0) should be(101) + b.get(0, 0) should be(201) + } case i @ Invalid(_) => fail(s"$i") } - interpreter(Rescale(ast.RGB( - List( - IntArrayTile(1 to 100 toArray, 10, 10), - IntArrayTile(101 to 200 toArray, 10, 10), - IntArrayTile(201 to 300 toArray, 10, 10) + interpreter( + Rescale(ast.RGB( + List( + IntArrayTile(1 to 100 toArray, 10, 10), + IntArrayTile(101 to 200 toArray, 10, 10), + IntArrayTile(201 to 300 toArray, 10, 10) + ) + ) :: Nil, + 10, + 11 ) - ) :: Nil, 10, 11)).unsafeRunSync.as[MultibandTile] match { - case Valid(t) => t.bands match { - case Vector(r, g, b) => - r.get(0, 0) should be(10) - g.get(0, 0) should be(10) - b.get(0, 0) should be(10) - } + ).unsafeRunSync.as[MultibandTile] match { + case Valid(t) => + t.bands match { + case Vector(r, g, b) => + r.get(0, 0) should be(10) + g.get(0, 0) should be(10) + b.get(0, 0) should be(10) + } case i @ Invalid(_) => fail(s"$i") } - interpreter(ast.RGB( - List( - Rescale(IntArrayTile(1 to 100 toArray, 10, 10) :: Nil, 10, 11), - Rescale(IntArrayTile(101 to 200 toArray, 10, 10) :: Nil, 20, 21), - Rescale(IntArrayTile(201 to 300 toArray, 10, 10) :: Nil, 30, 31) + interpreter( + ast.RGB( + List( + Rescale(IntArrayTile(1 to 100 toArray, 10, 10) :: Nil, 10, 11), + Rescale(IntArrayTile(101 to 200 toArray, 10, 10) :: Nil, 20, 21), + Rescale(IntArrayTile(201 to 300 toArray, 10, 10) :: Nil, 30, 31) + ) ) - )).unsafeRunSync.as[MultibandTile] match { - case Valid(t) => t.bands match { - case Vector(r, g, b) => - r.get(0, 0) should be(10) - g.get(0, 0) should be(20) - b.get(0, 0) should be(30) - } + ).unsafeRunSync.as[MultibandTile] match { + case Valid(t) => + t.bands match { + case Vector(r, g, b) => + r.get(0, 0) should be(10) + g.get(0, 0) should be(20) + b.get(0, 0) should be(30) + } case i @ Invalid(_) => fail(s"$i") } - interpreter(ast.RGB( - List( - Clamp(IntArrayTile(1 to 100 toArray, 10, 10) :: Nil, 10, 11), - Clamp(IntArrayTile(101 to 200 toArray, 10, 10) :: Nil, 120, 121), - Clamp(IntArrayTile(201 to 300 toArray, 10, 10) :: Nil, 230, 231) + interpreter( + ast.RGB( + List( + Clamp(IntArrayTile(1 to 100 toArray, 10, 10) :: Nil, 10, 11), + Clamp(IntArrayTile(101 to 200 toArray, 10, 10) :: Nil, 120, 121), + Clamp(IntArrayTile(201 to 300 toArray, 10, 10) :: Nil, 230, 231) + ) ) - )).unsafeRunSync.as[MultibandTile] match { - case Valid(t) => t.bands match { - case Vector(r, g, b) => - r.get(0, 0) should be(10) - g.get(0, 0) should be(120) - b.get(0, 0) should be(230) - } + ).unsafeRunSync.as[MultibandTile] match { + case Valid(t) => + t.bands match { + case Vector(r, g, b) => + r.get(0, 0) should be(10) + g.get(0, 0) should be(120) + b.get(0, 0) should be(230) + } case i @ Invalid(_) => fail(s"$i") } - /** The hillshade test is a bit more involved than some of the above - * See http://bit.ly/Qj0YPg for more information about the proper interpretation - * of hillshade values - **/ + /** + * The hillshade test is a bit more involved than some of the above See http://bit.ly/Qj0YPg for more information about the proper interpretation + * of hillshade values + */ val hillshadeTile = IntArrayTile( - Array(0, 0, 0, 0, 0, 0, 2450, 2461, 2483, 0, 0, 2452, 2461, 2483, 0, 0, - 2447, 2455, 2477, 0, 0, 0, 0, 0, 0), + Array(0, 0, 0, 0, 0, 0, 2450, 2461, 2483, 0, 0, 2452, 2461, 2483, 0, 0, 2447, 2455, 2477, 0, 0, 0, 0, 0, 0), 5, 5 ) @@ -440,7 +455,7 @@ class ConcurrentEvaluationSpec interpreter( FocalHillshade(List(RasterLit(hillshadeProjectedRaster)), 315, 45) ).unsafeRunSync.as[MultibandTile] match { - case Valid(t) => t.bands.head.get(2, 2) should be(77) + case Valid(t) => t.bands.head.get(2, 2) should be(90) case i @ Invalid(_) => fail(s"$i") } } diff --git a/jvm/src/test/scala/eval/EvaluationSpec.scala b/jvm/src/test/scala/eval/EvaluationSpec.scala index d0bbb690..a3300565 100644 --- a/jvm/src/test/scala/eval/EvaluationSpec.scala +++ b/jvm/src/test/scala/eval/EvaluationSpec.scala @@ -11,10 +11,12 @@ import geotrellis.proj4.WebMercator import cats.data._ import Validated._ import org.scalatest._ +import org.scalatest.funspec.AnyFunSpec +import org.scalatest.matchers.should.Matchers import scala.reflect._ -class EvaluationSpec extends FunSpec with Matchers with ExpressionTreeCodec { +class EvaluationSpec extends AnyFunSpec with Matchers with ExpressionTreeCodec { implicit def tileIsTileLiteral(tile: Tile): RasterLit[ProjectedRaster[MultibandTile]] = RasterLit(ProjectedRaster(MultibandTile(tile), Extent(0, 0, 0.05, 0.05), WebMercator)) @@ -24,193 +26,186 @@ class EvaluationSpec extends FunSpec with Matchers with ExpressionTreeCodec { implicit class TypeRefinement(self: Interpreted[Result]) { def as[T: ClassTag]: Interpreted[T] = self match { - case Valid(r) => r.as[T] - case i@Invalid(_) => i + case Valid(r) => r.as[T] + case i @ Invalid(_) => i } } val interpreter = Interpreter.DEFAULT it("Should interpret and evaluate to Boolean literals") { - interpreter(BoolLit(true)).as[Boolean] should be (Valid(true)) - interpreter(false).as[Boolean] should be (Valid(false)) - interpreter(true).as[Boolean] should be (Valid(true)) + interpreter(BoolLit(true)).as[Boolean] should be(Valid(true)) + interpreter(false).as[Boolean] should be(Valid(false)) + interpreter(true).as[Boolean] should be(Valid(true)) } it("Should interpret and evaluate to Int literals") { - interpreter(IntLit(42)).as[Int] should be (Valid(42)) - interpreter(IntLit(4200)).as[Int] should be (Valid(4200)) + interpreter(IntLit(42)).as[Int] should be(Valid(42)) + interpreter(IntLit(4200)).as[Int] should be(Valid(4200)) } - it("Should interpret and evaluate to double literals") { - interpreter(DblLit(42.0)).as[Double] should be (Valid(42.0)) - interpreter(DblLit(4200.0123)).as[Double] should be (Valid(4200.0123)) + interpreter(DblLit(42.0)).as[Double] should be(Valid(42.0)) + interpreter(DblLit(4200.0123)).as[Double] should be(Valid(4200.0123)) } it("Should interpret and evaluate addition with scalars") { - interpreter(IntLit(42) + DblLit(42)).as[Double] should be (Valid(84.0)) + interpreter(IntLit(42) + DblLit(42)).as[Double] should be(Valid(84.0)) } it("Should interpret and evaluate multiplication with scalars") { - interpreter(IntLit(2) * DblLit(42)).as[Double] should be (Valid(84.0)) + interpreter(IntLit(2) * DblLit(42)).as[Double] should be(Valid(84.0)) } it("Should interpret and evaluate division with scalars") { - interpreter(DblLit(20) / DblLit(2) / DblLit(2)).as[Double] should be (Valid(5.0)) + interpreter(DblLit(20) / DblLit(2) / DblLit(2)).as[Double] should be(Valid(5.0)) } it("Should interpret and evaluate comparisions with scalars") { - interpreter(DblLit(20) < DblLit(20)).as[Boolean] should be (Valid(false)) - interpreter(DblLit(19) < DblLit(20)).as[Boolean] should be (Valid(true)) - interpreter(DblLit(29) < DblLit(20)).as[Boolean] should be (Valid(false)) + interpreter(DblLit(20) < DblLit(20)).as[Boolean] should be(Valid(false)) + interpreter(DblLit(19) < DblLit(20)).as[Boolean] should be(Valid(true)) + interpreter(DblLit(29) < DblLit(20)).as[Boolean] should be(Valid(false)) - interpreter(DblLit(20) <= DblLit(20)).as[Boolean] should be (Valid(true)) - interpreter(DblLit(19) <= DblLit(20)).as[Boolean] should be (Valid(true)) - interpreter(DblLit(29) <= DblLit(20)).as[Boolean] should be (Valid(false)) + interpreter(DblLit(20) <= DblLit(20)).as[Boolean] should be(Valid(true)) + interpreter(DblLit(19) <= DblLit(20)).as[Boolean] should be(Valid(true)) + interpreter(DblLit(29) <= DblLit(20)).as[Boolean] should be(Valid(false)) - interpreter(DblLit(20) === DblLit(20)).as[Boolean] should be (Valid(true)) - interpreter(DblLit(19) === DblLit(20)).as[Boolean] should be (Valid(false)) - interpreter(DblLit(29) === DblLit(20)).as[Boolean] should be (Valid(false)) + interpreter(DblLit(20) === DblLit(20)).as[Boolean] should be(Valid(true)) + interpreter(DblLit(19) === DblLit(20)).as[Boolean] should be(Valid(false)) + interpreter(DblLit(29) === DblLit(20)).as[Boolean] should be(Valid(false)) - interpreter(DblLit(20) >= DblLit(20)).as[Boolean] should be (Valid(true)) - interpreter(DblLit(19) >= DblLit(20)).as[Boolean] should be (Valid(false)) - interpreter(DblLit(29) >= DblLit(20)).as[Boolean] should be (Valid(true)) + interpreter(DblLit(20) >= DblLit(20)).as[Boolean] should be(Valid(true)) + interpreter(DblLit(19) >= DblLit(20)).as[Boolean] should be(Valid(false)) + interpreter(DblLit(29) >= DblLit(20)).as[Boolean] should be(Valid(true)) - interpreter(DblLit(20) > DblLit(20)).as[Boolean] should be (Valid(false)) - interpreter(DblLit(19) > DblLit(20)).as[Boolean] should be (Valid(false)) - interpreter(DblLit(29) > DblLit(20)).as[Boolean] should be (Valid(true)) + interpreter(DblLit(20) > DblLit(20)).as[Boolean] should be(Valid(false)) + interpreter(DblLit(19) > DblLit(20)).as[Boolean] should be(Valid(false)) + interpreter(DblLit(29) > DblLit(20)).as[Boolean] should be(Valid(true)) } it("Should interpret and evaluate ndvi") { interpreter((DblLit(5) - DblLit(2)) / (DblLit(5) + DblLit(2))).as[Double] match { - case Valid(x) => x should be (0.42857 +- 0.001) - case i@Invalid(_) => fail(s"$i") + case Valid(x) => x should be(0.42857 +- 0.001) + case i @ Invalid(_) => fail(s"$i") } } it("Should interpret and evaluate tile addition") { interpreter(IntArrayTile(1 to 4 toArray, 2, 2) + IntArrayTile(1 to 4 toArray, 2, 2)).as[MultibandTile] match { - case Valid(t) => t.bands.head.get(0, 0) should be (2) - case i@Invalid(_) => fail(s"$i") + case Valid(t) => t.bands.head.get(0, 0) should be(2) + case i @ Invalid(_) => fail(s"$i") } } it("Should interpret and evaluate tile subtraction") { interpreter(IntArrayTile(1 to 4 toArray, 2, 2) - IntArrayTile(1 to 4 toArray, 2, 2)).as[MultibandTile] match { - case Valid(t) => t.bands.head.get(0, 0) should be (0) - case i@Invalid(_) => fail(s"$i") + case Valid(t) => t.bands.head.get(0, 0) should be(0) + case i @ Invalid(_) => fail(s"$i") } } it("Should interpret and evaluate tile multiplication") { interpreter(IntArrayTile(1 to 4 toArray, 2, 2) * IntArrayTile(1 to 4 toArray, 2, 2)).as[MultibandTile] match { - case Valid(t) => t.bands.head.get(1, 0) should be (4) - case i@Invalid(_) => fail(s"$i") + case Valid(t) => t.bands.head.get(1, 0) should be(4) + case i @ Invalid(_) => fail(s"$i") } } it("Should interpret and evaluate tile division") { interpreter(IntArrayTile(1 to 4 toArray, 2, 2) / IntArrayTile(1 to 4 toArray, 2, 2)).as[MultibandTile] match { - case Valid(t) => t.bands.head.get(1, 0) should be (1) - case i@Invalid(_) => fail(s"$i") + case Valid(t) => t.bands.head.get(1, 0) should be(1) + case i @ Invalid(_) => fail(s"$i") } } it("should interpret and evaluate tile comparison") { interpreter(IntArrayTile(1 to 4 toArray, 2, 2) < IntArrayTile(2 to 5 toArray, 2, 2)).as[MultibandTile] match { - case Valid(t) => t.bands.head.get(0, 0) should be (1) - case i@Invalid(_) => fail(s"$i") + case Valid(t) => t.bands.head.get(0, 0) should be(1) + case i @ Invalid(_) => fail(s"$i") } interpreter(IntArrayTile(1 to 4 toArray, 2, 2) < IntArrayTile(1 to 4 toArray, 2, 2)).as[MultibandTile] match { - case Valid(t) => t.bands.head.get(0, 0) should be (0) - case i@Invalid(_) => fail(s"$i") + case Valid(t) => t.bands.head.get(0, 0) should be(0) + case i @ Invalid(_) => fail(s"$i") } interpreter(IntArrayTile(1 to 4 toArray, 2, 2) < IntArrayTile(0 to 3 toArray, 2, 2)).as[MultibandTile] match { - case Valid(t) => t.bands.head.get(0, 0) should be (0) - case i@Invalid(_) => fail(s"$i") + case Valid(t) => t.bands.head.get(0, 0) should be(0) + case i @ Invalid(_) => fail(s"$i") } interpreter(IntArrayTile(1 to 4 toArray, 2, 2) <= IntArrayTile(2 to 5 toArray, 2, 2)).as[MultibandTile] match { - case Valid(t) => t.bands.head.get(0, 0) should be (1) - case i@Invalid(_) => fail(s"$i") + case Valid(t) => t.bands.head.get(0, 0) should be(1) + case i @ Invalid(_) => fail(s"$i") } interpreter(IntArrayTile(1 to 4 toArray, 2, 2) <= IntArrayTile(1 to 4 toArray, 2, 2)).as[MultibandTile] match { - case Valid(t) => t.bands.head.get(0, 0) should be (1) - case i@Invalid(_) => fail(s"$i") + case Valid(t) => t.bands.head.get(0, 0) should be(1) + case i @ Invalid(_) => fail(s"$i") } interpreter(IntArrayTile(1 to 4 toArray, 2, 2) <= IntArrayTile(0 to 3 toArray, 2, 2)).as[MultibandTile] match { - case Valid(t) => t.bands.head.get(0, 0) should be (0) - case i@Invalid(_) => fail(s"$i") + case Valid(t) => t.bands.head.get(0, 0) should be(0) + case i @ Invalid(_) => fail(s"$i") } interpreter(Equal(List(IntArrayTile(1 to 4 toArray, 2, 2), IntArrayTile(2 to 5 toArray, 2, 2)))).as[MultibandTile] match { - case Valid(t) => t.bands.head.get(0, 0) should be (0) - case i@Invalid(_) => fail(s"$i") + case Valid(t) => t.bands.head.get(0, 0) should be(0) + case i @ Invalid(_) => fail(s"$i") } interpreter(Equal(List(IntArrayTile(1 to 4 toArray, 2, 2), IntArrayTile(1 to 4 toArray, 2, 2)))).as[MultibandTile] match { - case Valid(t) => t.bands.head.get(0, 0) should be (1) - case i@Invalid(_) => fail(s"$i") + case Valid(t) => t.bands.head.get(0, 0) should be(1) + case i @ Invalid(_) => fail(s"$i") } interpreter(Equal(List(IntArrayTile(1 to 4 toArray, 2, 2), IntArrayTile(0 to 3 toArray, 2, 2)))).as[MultibandTile] match { - case Valid(t) => t.bands.head.get(0, 0) should be (0) - case i@Invalid(_) => fail(s"$i") + case Valid(t) => t.bands.head.get(0, 0) should be(0) + case i @ Invalid(_) => fail(s"$i") } interpreter(IntArrayTile(1 to 4 toArray, 2, 2) >= IntArrayTile(2 to 5 toArray, 2, 2)).as[MultibandTile] match { - case Valid(t) => t.bands.head.get(0, 0) should be (0) - case i@Invalid(_) => fail(s"$i") + case Valid(t) => t.bands.head.get(0, 0) should be(0) + case i @ Invalid(_) => fail(s"$i") } interpreter(IntArrayTile(1 to 4 toArray, 2, 2) >= IntArrayTile(1 to 4 toArray, 2, 2)).as[MultibandTile] match { - case Valid(t) => t.bands.head.get(0, 0) should be (1) - case i@Invalid(_) => fail(s"$i") + case Valid(t) => t.bands.head.get(0, 0) should be(1) + case i @ Invalid(_) => fail(s"$i") } interpreter(IntArrayTile(1 to 4 toArray, 2, 2) >= IntArrayTile(0 to 3 toArray, 2, 2)).as[MultibandTile] match { - case Valid(t) => t.bands.head.get(0, 0) should be (1) - case i@Invalid(_) => fail(s"$i") + case Valid(t) => t.bands.head.get(0, 0) should be(1) + case i @ Invalid(_) => fail(s"$i") } interpreter(IntArrayTile(1 to 4 toArray, 2, 2) > IntArrayTile(2 to 5 toArray, 2, 2)).as[MultibandTile] match { - case Valid(t) => t.bands.head.get(0, 0) should be (0) - case i@Invalid(_) => fail(s"$i") + case Valid(t) => t.bands.head.get(0, 0) should be(0) + case i @ Invalid(_) => fail(s"$i") } interpreter(IntArrayTile(1 to 4 toArray, 2, 2) > IntArrayTile(1 to 4 toArray, 2, 2)).as[MultibandTile] match { - case Valid(t) => t.bands.head.get(0, 0) should be (0) - case i@Invalid(_) => fail(s"$i") + case Valid(t) => t.bands.head.get(0, 0) should be(0) + case i @ Invalid(_) => fail(s"$i") } interpreter(IntArrayTile(1 to 4 toArray, 2, 2) > IntArrayTile(0 to 3 toArray, 2, 2)).as[MultibandTile] match { - case Valid(t) => t.bands.head.get(0, 0) should be (1) - case i@Invalid(_) => fail(s"$i") + case Valid(t) => t.bands.head.get(0, 0) should be(1) + case i @ Invalid(_) => fail(s"$i") } interpreter(FocalSlope(List(IntArrayTile(1 to 100 toArray, 10, 10)))).as[MultibandTile] match { - case Valid(t) => t.bands.head.get(5, 5) should be (10) - case i@Invalid(_) => fail(s"$i") + case Valid(t) => t.bands.head.get(5, 5) should be(10) + case i @ Invalid(_) => fail(s"$i") } interpreter(FocalAspect(List(IntArrayTile(1 to 100 toArray, 10, 10)))).as[MultibandTile] match { - case Valid(t) => t.bands.head.get(5, 5) should be (354) - case i@Invalid(_) => fail(s"$i") + case Valid(t) => t.bands.head.get(5, 5) should be(354) + case i @ Invalid(_) => fail(s"$i") } - /** The hillshade test is a bit more involved than some of the above - * See http://bit.ly/Qj0YPg for more information about the proper interpretation - * of hillshade values - **/ + /** + * The hillshade test is a bit more involved than some of the above See http://bit.ly/Qj0YPg for more information about the proper interpretation + * of hillshade values + */ val hillshadeTile = - IntArrayTile( - Array(0, 0, 0, 0, 0, - 0, 2450, 2461, 2483, 0, - 0, 2452, 2461, 2483, 0, - 0, 2447, 2455, 2477, 0, - 0, 0, 0, 0, 0), - 5, 5) + IntArrayTile(Array(0, 0, 0, 0, 0, 0, 2450, 2461, 2483, 0, 0, 2452, 2461, 2483, 0, 0, 2447, 2455, 2477, 0, 0, 0, 0, 0, 0), 5, 5) val hillshadeE = Extent(0, 0, 25, 25) val hillshadeProjectedRaster = ProjectedRaster(Raster(MultibandTile(hillshadeTile), hillshadeE), WebMercator) interpreter(FocalHillshade(List(RasterLit(hillshadeProjectedRaster)), 315, 45)).as[MultibandTile] match { - case Valid(t) => t.bands.head.get(2, 2) should be (77) - case i@Invalid(_) => fail(s"$i") + case Valid(t) => t.bands.head.get(2, 2) should be(90) + case i @ Invalid(_) => fail(s"$i") } } } diff --git a/jvm/src/test/scala/eval/MultibandSelectionSpec.scala b/jvm/src/test/scala/eval/MultibandSelectionSpec.scala index aa2cb5a7..4ece6519 100644 --- a/jvm/src/test/scala/eval/MultibandSelectionSpec.scala +++ b/jvm/src/test/scala/eval/MultibandSelectionSpec.scala @@ -7,9 +7,10 @@ import geotrellis.vector._ import geotrellis.proj4.WebMercator import org.scalatest._ +import org.scalatest.funspec.AnyFunSpec +import org.scalatest.matchers.should.Matchers - -class MultibandSelectionSpec extends FunSpec with Matchers { +class MultibandSelectionSpec extends AnyFunSpec with Matchers { def someRaster(v: Int) = { val someTile = ArrayTile(Array(v, v, v, v), 2, 2) @@ -19,21 +20,23 @@ class MultibandSelectionSpec extends FunSpec with Matchers { it("Should allow for the selection of bands by idx") { val imagery = LazyMultibandRaster(List(someRaster(1), someRaster(2), someRaster(3))) - imagery.select(List("1", "2")) should be (LazyMultibandRaster(Map("1" -> someRaster(2), "2" -> someRaster(3)))) + imagery.select(List("1", "2")) should be(LazyMultibandRaster(Map("1" -> someRaster(2), "2" -> someRaster(3)))) } it("Should allow for the selection of bands by label") { val imagery = LazyMultibandRaster(Map("red" -> someRaster(1), "green" -> someRaster(2), "blue" -> someRaster(3))) - imagery.select(List("green", "blue")) should be (LazyMultibandRaster(Map("green" -> someRaster(2), "blue" -> someRaster(3)))) + imagery.select(List("green", "blue")) should be(LazyMultibandRaster(Map("green" -> someRaster(2), "blue" -> someRaster(3)))) } it("Should allow for selection of bands which encode transformations") { val ast = - Addition(List( - RasterLit(LazyMultibandRaster(Map("green" -> someRaster(3)))), - ImageSelect(List(RasterLit(LazyMultibandRaster(Map("red" -> someRaster(1), "green" -> someRaster(3))))), List("green")) - )) - - BufferingInterpreter.DEFAULT(ast).andThen(_.as[MultibandTile]).map(_.bandCount).toOption should be (Some(1)) + Addition( + List( + RasterLit(LazyMultibandRaster(Map("green" -> someRaster(3)))), + ImageSelect(List(RasterLit(LazyMultibandRaster(Map("red" -> someRaster(1), "green" -> someRaster(3))))), List("green")) + ) + ) + + BufferingInterpreter.DEFAULT(ast).andThen(_.as[MultibandTile]).map(_.bandCount).toOption should be(Some(1)) } } diff --git a/jvm/src/test/scala/eval/ParallelEvaluationSpec.scala b/jvm/src/test/scala/eval/ParallelEvaluationSpec.scala index 56993b2a..90594c92 100644 --- a/jvm/src/test/scala/eval/ParallelEvaluationSpec.scala +++ b/jvm/src/test/scala/eval/ParallelEvaluationSpec.scala @@ -20,24 +20,23 @@ import cats._ import cats.data.{NonEmptyList => NEL, _} import cats.effect._ import Validated._ -import org.scalatest._ import scala.reflect._ import org.scalatest._ +import org.scalatest.funspec.AnyFunSpec +import org.scalatest.matchers.should.Matchers import scala.concurrent.ExecutionContext.Implicits.global import java.time.Instant -class ParallelEvaluationSpec - extends FunSpec - with Matchers - with ExpressionTreeCodec { - implicit val cs = IO.contextShift(global) +class ParallelEvaluationSpec extends AnyFunSpec with Matchers with ExpressionTreeCodec { + import cats.effect.unsafe.implicits.global + val interpreter = ParallelInterpreter.DEFAULT[IO, IO.Par].prependDirective(sleep) implicit def tileIsTileLiteral( - tile: Tile + tile: Tile ): RasterLit[ProjectedRaster[MultibandTile]] = RasterLit( ProjectedRaster( @@ -57,9 +56,7 @@ class ParallelEvaluationSpec // WARN: this test depends on the amount of directives in the interpreter BEFORE the Sleep directive it("should take less time than the total duration of its leaves") { val sleepDuration = 3L - val expr = Addition(List( - Sleep(sleepDuration, List(IntLit(1))), - Sleep(sleepDuration, List(IntLit(1))))) + val expr = Addition(List(Sleep(sleepDuration, List(IntLit(1))), Sleep(sleepDuration, List(IntLit(1))))) val now1 = Instant.now.toEpochMilli interpreter(expr).unsafeRunSync.as[Int] should be(Valid(2)) val now2 = Instant.now.toEpochMilli @@ -314,14 +311,13 @@ class ParallelEvaluationSpec case i @ Invalid(_) => fail(s"$i") } - /** The hillshade test is a bit more involved than some of the above - * See http://bit.ly/Qj0YPg for more information about the proper interpretation - * of hillshade values - **/ + /** + * The hillshade test is a bit more involved than some of the above See http://bit.ly/Qj0YPg for more information about the proper interpretation + * of hillshade values + */ val hillshadeTile = IntArrayTile( - Array(0, 0, 0, 0, 0, 0, 2450, 2461, 2483, 0, 0, 2452, 2461, 2483, 0, 0, - 2447, 2455, 2477, 0, 0, 0, 0, 0, 0), + Array(0, 0, 0, 0, 0, 0, 2450, 2461, 2483, 0, 0, 2452, 2461, 2483, 0, 0, 2447, 2455, 2477, 0, 0, 0, 0, 0, 0), 5, 5 ) @@ -336,7 +332,7 @@ class ParallelEvaluationSpec interpreter( FocalHillshade(List(RasterLit(hillshadeProjectedRaster)), 315, 45) ).unsafeRunSync.as[MultibandTile] match { - case Valid(t) => t.bands.head.get(2, 2) should be(77) + case Valid(t) => t.bands.head.get(2, 2) should be(90) case i @ Invalid(_) => fail(s"$i") } } diff --git a/jvm/src/test/scala/eval/ResultSpec.scala b/jvm/src/test/scala/eval/ResultSpec.scala index 55c23238..c445f779 100644 --- a/jvm/src/test/scala/eval/ResultSpec.scala +++ b/jvm/src/test/scala/eval/ResultSpec.scala @@ -14,47 +14,48 @@ import cats._ import cats.data.{NonEmptyList => NEL, _} import Validated._ import org.scalatest._ +import org.scalatest.funspec.AnyFunSpec +import org.scalatest.matchers.should.Matchers - -class ResultSpec extends FunSpec with Matchers { +class ResultSpec extends AnyFunSpec with Matchers { it("Evaluate to desired output (int)") { - IntResult(42 + 1).as[Int] should be (Valid(43)) + IntResult(42 + 1).as[Int] should be(Valid(43)) } it("Evaluate to desired output (double)") { - IntResult(42 + 1).as[Double] should be (Valid(43.0)) + IntResult(42 + 1).as[Double] should be(Valid(43.0)) } it("Evaluate to desired output (tile)") { - val anImage = ImageResult(LazyMultibandRaster(List( - LazyRaster(IntArrayTile(1 to 4 toArray, 2, 2), Extent(0,0,1,1), WebMercator)) - )) + val anImage = ImageResult(LazyMultibandRaster(List(LazyRaster(IntArrayTile(1 to 4 toArray, 2, 2), Extent(0, 0, 1, 1), WebMercator)))) val anInt = IntResult(1) - anImage.as[Int] should be (Invalid(NEL.of(DivergingTypes("int", List("img"))))) + anImage.as[Int] should be(Invalid(NEL.of(DivergingTypes("int", List("img"))))) anImage.as[MultibandTile] should matchPattern { case Valid(_) => } anInt.as[MultibandTile] should matchPattern { case Invalid(_) => } - val complexImage = ImageResult(LazyMultibandRaster(List( - LazyRaster.MapInt(List(LazyRaster(IntArrayTile(1 to 4 toArray, 2, 2), Extent(0,0,1,1), WebMercator)), { i: Int => i + 4 }) - ))) + val complexImage = ImageResult( + LazyMultibandRaster( + List( + LazyRaster.MapInt(List(LazyRaster(IntArrayTile(1 to 4 toArray, 2, 2), Extent(0, 0, 1, 1), WebMercator)), { i: Int => i + 4 }) + ) + ) + ) complexImage.as[MultibandTile] should matchPattern { case Valid(_) => } } it("Evaluate float tile with different cols / rows") { val zero = LazyRaster(FloatArrayTile.fill(0, 52, 36), Extent(0, 0, 4, 4), WebMercator) val one = LazyRaster(FloatArrayTile.fill(1, 52, 36), Extent(0, 0, 4, 4), WebMercator) - val tr = ImageResult(LazyMultibandRaster(List( - LazyRaster.DualCombine(List(zero, one), _ - _, _ - _)) - )) + val tr = ImageResult(LazyMultibandRaster(List(LazyRaster.DualCombine(List(zero, one), _ - _, _ - _)))) val tile = tr.as[MultibandTile].valueOr(r => throw new Exception(r.toString)) - tr.res.bands.head._2.cols should be (zero.cols) - tr.res.bands.head._2.rows should be (zero.rows) + tr.res.bands.head._2.cols should be(zero.cols) + tr.res.bands.head._2.rows should be(zero.rows) - tr.res.bands.head._2.cols should be (tile.cols) - tr.res.bands.head._2.rows should be (tile.rows) + tr.res.bands.head._2.cols should be(tile.cols) + tr.res.bands.head._2.rows should be(tile.rows) } it("Evaluate mask out data according to a mask") { @@ -62,23 +63,21 @@ class ResultSpec extends FunSpec with Matchers { val mask = MultiPolygon(Extent(0, 0, 1, 1).toPolygon) val maskResult = ImageResult(LazyMultibandRaster(List(MaskingNode(List(rasterOnes), mask)))) - val maskResultSB = ImageResult(LazyMultibandRaster(List(MaskingNode(List(rasterOnes), mask)))) - val maskResultRGB = ImageResult(LazyMultibandRaster( - List(rasterOnes, rasterOnes, rasterOnes)).mask(mask)) + val maskResultRGB = ImageResult(LazyMultibandRaster(List(rasterOnes, rasterOnes, rasterOnes)).mask(mask)) for { - x <- (0 to 3 toArray) - y <- (0 to 3 toArray) + x <- (0 to 3).toArray + y <- (0 to 3).toArray } yield { val fetchedSB = maskResultSB.res.bands.head._2.get(x, y) - val fetchedRGB = maskResultRGB.res.bands.toList map { _._2.get(x, y) } + val fetchedRGB = maskResultRGB.res.bands.toList.map { _._2.get(x, y) } if ((x, y) == (0, 3)) { - isData(fetchedSB) should be (true) - fetchedRGB map { isData(_) } should be (List(true, true, true)) + isData(fetchedSB) should be(true) + fetchedRGB.map { isData(_) } should be(List(true, true, true)) } else { - isData(fetchedSB) should be (false) - fetchedRGB map { isData(_) } should be (List(false, false, false)) + isData(fetchedSB) should be(false) + fetchedRGB.map { isData(_) } should be(List(false, false, false)) } } } diff --git a/jvm/src/test/scala/eval/ScopedEvaluationSpec.scala b/jvm/src/test/scala/eval/ScopedEvaluationSpec.scala index 264acd86..f50d85b5 100644 --- a/jvm/src/test/scala/eval/ScopedEvaluationSpec.scala +++ b/jvm/src/test/scala/eval/ScopedEvaluationSpec.scala @@ -15,18 +15,20 @@ import cats._ import cats.data.{NonEmptyList => NEL, _} import Validated._ import org.scalatest._ +import org.scalatest.funspec.AnyFunSpec +import org.scalatest.matchers.should.Matchers import scala.reflect._ -class ScopedEvaluationSpec extends FunSpec with Matchers { +class ScopedEvaluationSpec extends AnyFunSpec with Matchers { def tileToLit(tile: Tile): RasterLit[ProjectedRaster[MultibandTile]] = RasterLit(ProjectedRaster(MultibandTile(tile), Extent(0, 0, 1, 1), WebMercator)) implicit class TypeRefinement(self: Interpreted[Result]) { def as[T](implicit ct: ClassTag[T]): Interpreted[T] = self match { - case Valid(r) => r.as[T] - case i@Invalid(_) => i + case Valid(r) => r.as[T] + case i @ Invalid(_) => i } } @@ -34,13 +36,13 @@ class ScopedEvaluationSpec extends FunSpec with Matchers { it("Should interpret and evaluate focal operation") { interpreter(FocalMax(List(tileToLit(IntArrayTile(1 to 4 toArray, 2, 2))), Square(1))).as[MultibandTile] match { - case Valid(tile) => tile.bands.head.get(0, 0) should be (4) - case i@Invalid(_) => fail(s"$i") + case Valid(tile) => tile.bands.head.get(0, 0) should be(4) + case i @ Invalid(_) => fail(s"$i") } } it("Should interpret and evaluate Int literals") { - interpreter(IntLit(42)).as[Int] should be (Valid(42)) - interpreter(IntLit(4200)).as[Int] should be (Valid(4200)) + interpreter(IntLit(42)).as[Int] should be(Valid(42)) + interpreter(IntLit(4200)).as[Int] should be(Valid(4200)) } } diff --git a/jvm/src/test/scala/eval/VariableSpec.scala b/jvm/src/test/scala/eval/VariableSpec.scala index 30dbf2c3..9d244b55 100644 --- a/jvm/src/test/scala/eval/VariableSpec.scala +++ b/jvm/src/test/scala/eval/VariableSpec.scala @@ -17,41 +17,47 @@ import cats._ import cats.data.{NonEmptyList => NEL, _} import Validated._ import org.scalatest._ +import org.scalatest.funspec.AnyFunSpec +import org.scalatest.matchers.should.Matchers import scala.reflect._ - -class VariableSpec extends FunSpec with Matchers { +class VariableSpec extends AnyFunSpec with Matchers { implicit class TypeRefinement(self: Interpreted[Result]) { def as[T: ClassTag]: Interpreted[T] = self match { - case Valid(r) => r.as[T] - case i@Invalid(_) => i + case Valid(r) => r.as[T] + case i @ Invalid(_) => i } } val interpreter = NaiveInterpreter.DEFAULT it("should produce an accurate variable map in a simple case") { - Vars.vars(BoolVar("predicate1")) should be (Map("predicate1" -> MamlKind.Bool)) + Vars.vars(BoolVar("predicate1")) should be(Map("predicate1" -> MamlKind.Bool)) } it("should produce an accurate variable map in a complex case") { - Vars.vars(Addition(List(IntVar("arg1"), IntVar("arg2")))) should be (Map("arg1" -> MamlKind.Int, "arg2" -> MamlKind.Int)) + Vars.vars(Addition(List(IntVar("arg1"), IntVar("arg2")))) should be(Map("arg1" -> MamlKind.Int, "arg2" -> MamlKind.Int)) } it("should produce an accurate variable map with buffer in a simple case") { - Vars.varsWithBuffer(FocalMax(List(RasterVar("someRaster")), Square(1))) should be (Map("someRaster" -> (MamlKind.Image, 1))) + Vars.varsWithBuffer(FocalMax(List(RasterVar("someRaster")), Square(1))) should be(Map("someRaster" -> (MamlKind.Image, 1))) } it("should produce an accurate variable map with buffer in an ambiguous case") { - val ast = Addition(List( + val ast = Addition( + List( FocalMax(List( - FocalMax(List(RasterVar("someRaster")), Square(1)) - ), Square(1), TargetCell.All), + FocalMax(List(RasterVar("someRaster")), Square(1)) + ), + Square(1), + TargetCell.All + ), RasterVar("someRaster") - )) + ) + ) - Vars.varsWithBuffer(ast) should be (Map("someRaster" -> (MamlKind.Image, 2))) + Vars.varsWithBuffer(ast) should be(Map("someRaster" -> (MamlKind.Image, 2))) } } diff --git a/jvm/version.sbt b/jvm/version.sbt deleted file mode 100644 index dfd7e193..00000000 --- a/jvm/version.sbt +++ /dev/null @@ -1 +0,0 @@ -version in ThisBuild := "0.3.4-SNAPSHOT" diff --git a/project/Dependencies.scala b/project/Dependencies.scala index 87fe1f70..0bfce5ee 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -18,36 +18,42 @@ import sbt._ import sbt.Keys._ object Dependencies { - private def ver(v211: String, v212: String) = Def.setting { + private def ver(v212: String, v213: String) = Def.setting { CrossVersion.partialVersion(scalaVersion.value) match { - case Some((2, 11)) => v211 case Some((2, 12)) => v212 - case v => sys.error(s"scala version: $v") + case Some((2, 13)) => v213 + case v => sys.error(s"scala version: $v") } } def circe(module: String) = Def.setting { - module match { - case "optics" => "io.circe" %% s"circe-$module" % ver("0.11.0", "0.12.0").value - case _ => "io.circe" %% s"circe-$module" % ver("0.11.1", "0.12.2").value } + val version = module match { + case "generic-extras" => "0.14.3" + case "optics" => "0.14.1" + case _ => "0.14.7" + } + + "io.circe" %% s"circe-$module" % version } def cats(module: String) = Def.setting { - module match { - case "core" => "org.typelevel" %% s"cats-$module" % ver("1.6.1", "2.0.0").value - case "effect" => "org.typelevel" %% s"cats-$module" % ver("1.3.1", "2.0.0").value + val version = module match { + case "core" => "2.10.0" + case "effect" => "3.5.4" } + + "org.typelevel" %% s"cats-$module" % version } def spark(module: String) = Def.setting { - "org.apache.spark" %% s"spark-$module" % "2.4.4" + "org.apache.spark" %% s"spark-$module" % "3.5.1" } def geotrellis(module: String) = Def.setting { - "org.locationtech.geotrellis" %% s"geotrellis-$module" % "3.2.0" + "org.locationtech.geotrellis" %% s"geotrellis-$module" % "3.7.1" } - val logging = "org.log4s" %% "log4s" % "1.8.2" - val scalatest = "org.scalatest" %% "scalatest" % "3.0.1" - val scalacheck = "org.scalacheck" %% "scalacheck" % "1.13.4" + val logging = "org.log4s" %% "log4s" % "1.10.0" + val scalatest = "org.scalatest" %% "scalatest" % "3.2.18" + val scalacheck = "org.scalacheck" %% "scalacheck" % "1.18.0" } diff --git a/project/build.properties b/project/build.properties index c0bab049..081fdbbc 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=1.2.8 +sbt.version=1.10.0 diff --git a/project/plugins.sbt b/project/plugins.sbt index 32d6b1ee..110e78ad 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -1,21 +1,10 @@ -addSbtPlugin("com.timushev.sbt" % "sbt-updates" % "0.4.0") - -addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.14.9") - -addSbtPlugin("net.virtual-void" % "sbt-dependency-graph" % "0.9.2") - -addSbtPlugin("org.scala-js" % "sbt-scalajs" % "0.6.27") - -addSbtPlugin("io.spray" % "sbt-revolver" % "0.9.1") - -addSbtPlugin("com.typesafe.sbt" % "sbt-native-packager" % "1.3.19") - -addSbtPlugin("com.47deg" % "sbt-microsites" % "0.9.0") - -addSbtPlugin("com.scalapenos" % "sbt-prompt" % "1.0.2") - -addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "2.5") - -addSbtPlugin("io.crashbox" % "sbt-gpg" % "0.2.0") - -addSbtPlugin("com.typesafe.sbt" % "sbt-git" % "1.0.0") +addDependencyTreePlugin + +addSbtPlugin("com.github.sbt" % "sbt-ci-release" % "1.5.12") +addSbtPlugin("com.timushev.sbt" % "sbt-updates" % "0.6.4") +addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "2.2.0") +addSbtPlugin("com.github.sbt" % "sbt-native-packager" % "1.10.0") +addSbtPlugin("com.47deg" % "sbt-microsites" % "1.4.4") +addSbtPlugin("org.scala-js" % "sbt-scalajs" % "1.16.0") +addSbtPlugin("org.portable-scala" % "sbt-scalajs-crossproject" % "1.3.2") +addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.5.2") diff --git a/project/project/plugins.sbt b/project/project/plugins.sbt deleted file mode 100644 index ef1f82c5..00000000 --- a/project/project/plugins.sbt +++ /dev/null @@ -1 +0,0 @@ -addSbtPlugin("io.get-coursier" % "sbt-coursier" % "1.0.2") diff --git a/sbt b/sbt deleted file mode 100755 index 4785673e..00000000 --- a/sbt +++ /dev/null @@ -1,578 +0,0 @@ -#!/usr/bin/env bash -# -# A more capable sbt runner, coincidentally also called sbt. -# Author: Paul Phillips -# https://github.com/paulp/sbt-extras - -set -o pipefail - -declare -r sbt_release_version="1.2.8" -declare -r sbt_unreleased_version="1.2.8" - -declare -r latest_213="2.13.0-RC1" -declare -r latest_212="2.12.8" -declare -r latest_211="2.11.12" -declare -r latest_210="2.10.7" -declare -r latest_29="2.9.3" -declare -r latest_28="2.8.2" - -declare -r buildProps="project/build.properties" - -declare -r sbt_launch_ivy_release_repo="http://repo.typesafe.com/typesafe/ivy-releases" -declare -r sbt_launch_ivy_snapshot_repo="https://repo.scala-sbt.org/scalasbt/ivy-snapshots" -declare -r sbt_launch_mvn_release_repo="http://repo.scala-sbt.org/scalasbt/maven-releases" -declare -r sbt_launch_mvn_snapshot_repo="http://repo.scala-sbt.org/scalasbt/maven-snapshots" - -declare -r default_jvm_opts_common="-Xms512m -Xss2m" -declare -r noshare_opts="-Dsbt.global.base=project/.sbtboot -Dsbt.boot.directory=project/.boot -Dsbt.ivy.home=project/.ivy" - -declare sbt_jar sbt_dir sbt_create sbt_version sbt_script sbt_new -declare sbt_explicit_version -declare verbose noshare batch trace_level -declare debugUs - -declare java_cmd="java" -declare sbt_launch_dir="$HOME/.sbt/launchers" -declare sbt_launch_repo - -# pull -J and -D options to give to java. -declare -a java_args scalac_args sbt_commands residual_args - -# args to jvm/sbt via files or environment variables -declare -a extra_jvm_opts extra_sbt_opts - -echoerr () { echo >&2 "$@"; } -vlog () { [[ -n "$verbose" ]] && echoerr "$@"; } -die () { echo "Aborting: $*" ; exit 1; } - -setTrapExit () { - # save stty and trap exit, to ensure echo is re-enabled if we are interrupted. - SBT_STTY="$(stty -g 2>/dev/null)" - export SBT_STTY - - # restore stty settings (echo in particular) - onSbtRunnerExit() { - [ -t 0 ] || return - vlog "" - vlog "restoring stty: $SBT_STTY" - stty "$SBT_STTY" - } - - vlog "saving stty: $SBT_STTY" - trap onSbtRunnerExit EXIT -} - -# this seems to cover the bases on OSX, and someone will -# have to tell me about the others. -get_script_path () { - local path="$1" - [[ -L "$path" ]] || { echo "$path" ; return; } - - local -r target="$(readlink "$path")" - if [[ "${target:0:1}" == "/" ]]; then - echo "$target" - else - echo "${path%/*}/$target" - fi -} - -script_path="$(get_script_path "${BASH_SOURCE[0]}")" -declare -r script_path -script_name="${script_path##*/}" -declare -r script_name - -init_default_option_file () { - local overriding_var="${!1}" - local default_file="$2" - if [[ ! -r "$default_file" && "$overriding_var" =~ ^@(.*)$ ]]; then - local envvar_file="${BASH_REMATCH[1]}" - if [[ -r "$envvar_file" ]]; then - default_file="$envvar_file" - fi - fi - echo "$default_file" -} - -sbt_opts_file="$(init_default_option_file SBT_OPTS .sbtopts)" -jvm_opts_file="$(init_default_option_file JVM_OPTS .jvmopts)" - -build_props_sbt () { - [[ -r "$buildProps" ]] && \ - grep '^sbt\.version' "$buildProps" | tr '=\r' ' ' | awk '{ print $2; }' -} - -set_sbt_version () { - sbt_version="${sbt_explicit_version:-$(build_props_sbt)}" - [[ -n "$sbt_version" ]] || sbt_version=$sbt_release_version - export sbt_version -} - -url_base () { - local version="$1" - - case "$version" in - 0.7.*) echo "http://simple-build-tool.googlecode.com" ;; - 0.10.* ) echo "$sbt_launch_ivy_release_repo" ;; - 0.11.[12]) echo "$sbt_launch_ivy_release_repo" ;; - 0.*-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]-[0-9][0-9][0-9][0-9][0-9][0-9]) # ie "*-yyyymmdd-hhMMss" - echo "$sbt_launch_ivy_snapshot_repo" ;; - 0.*) echo "$sbt_launch_ivy_release_repo" ;; - *-[0-9][0-9][0-9][0-9][0-9][0-9][0-9][0-9]-[0-9][0-9][0-9][0-9][0-9][0-9]) # ie "*-yyyymmdd-hhMMss" - echo "$sbt_launch_mvn_snapshot_repo" ;; - *) echo "$sbt_launch_mvn_release_repo" ;; - esac -} - -make_url () { - local version="$1" - - local base="${sbt_launch_repo:-$(url_base "$version")}" - - case "$version" in - 0.7.*) echo "$base/files/sbt-launch-0.7.7.jar" ;; - 0.10.* ) echo "$base/org.scala-tools.sbt/sbt-launch/$version/sbt-launch.jar" ;; - 0.11.[12]) echo "$base/org.scala-tools.sbt/sbt-launch/$version/sbt-launch.jar" ;; - 0.*) echo "$base/org.scala-sbt/sbt-launch/$version/sbt-launch.jar" ;; - *) echo "$base/org/scala-sbt/sbt-launch/$version/sbt-launch.jar" ;; - esac -} - -addJava () { vlog "[addJava] arg = '$1'" ; java_args+=("$1"); } -addSbt () { vlog "[addSbt] arg = '$1'" ; sbt_commands+=("$1"); } -addScalac () { vlog "[addScalac] arg = '$1'" ; scalac_args+=("$1"); } -addResidual () { vlog "[residual] arg = '$1'" ; residual_args+=("$1"); } - -addResolver () { addSbt "set resolvers += $1"; } -addDebugger () { addJava "-Xdebug" ; addJava "-Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=$1"; } -setThisBuild () { - vlog "[addBuild] args = '$*'" - local key="$1" && shift - addSbt "set $key in ThisBuild := $*" -} -setScalaVersion () { - [[ "$1" == *"-SNAPSHOT" ]] && addResolver 'Resolver.sonatypeRepo("snapshots")' - addSbt "++ $1" -} -setJavaHome () { - java_cmd="$1/bin/java" - setThisBuild javaHome "_root_.scala.Some(file(\"$1\"))" - export JAVA_HOME="$1" - export JDK_HOME="$1" - export PATH="$JAVA_HOME/bin:$PATH" -} - -getJavaVersion() { - local -r str=$("$1" -version 2>&1 | grep -E -e '(java|openjdk) version' | awk '{ print $3 }' | tr -d '"') - - # java -version on java8 says 1.8.x - # but on 9 and 10 it's 9.x.y and 10.x.y. - if [[ "$str" =~ ^1\.([0-9]+)\..*$ ]]; then - echo "${BASH_REMATCH[1]}" - elif [[ "$str" =~ ^([0-9]+)\..*$ ]]; then - echo "${BASH_REMATCH[1]}" - elif [[ -n "$str" ]]; then - echoerr "Can't parse java version from: $str" - fi -} - -checkJava() { - # Warn if there is a Java version mismatch between PATH and JAVA_HOME/JDK_HOME - - [[ -n "$JAVA_HOME" && -e "$JAVA_HOME/bin/java" ]] && java="$JAVA_HOME/bin/java" - [[ -n "$JDK_HOME" && -e "$JDK_HOME/lib/tools.jar" ]] && java="$JDK_HOME/bin/java" - - if [[ -n "$java" ]]; then - pathJavaVersion=$(getJavaVersion java) - homeJavaVersion=$(getJavaVersion "$java") - if [[ "$pathJavaVersion" != "$homeJavaVersion" ]]; then - echoerr "Warning: Java version mismatch between PATH and JAVA_HOME/JDK_HOME, sbt will use the one in PATH" - echoerr " Either: fix your PATH, remove JAVA_HOME/JDK_HOME or use -java-home" - echoerr " java version from PATH: $pathJavaVersion" - echoerr " java version from JAVA_HOME/JDK_HOME: $homeJavaVersion" - fi - fi -} - -java_version () { - local -r version=$(getJavaVersion "$java_cmd") - vlog "Detected Java version: $version" - echo "$version" -} - -# MaxPermSize critical on pre-8 JVMs but incurs noisy warning on 8+ -default_jvm_opts () { - local -r v="$(java_version)" - if [[ $v -ge 8 ]]; then - echo "$default_jvm_opts_common" - else - echo "-XX:MaxPermSize=384m $default_jvm_opts_common" - fi -} - -build_props_scala () { - if [[ -r "$buildProps" ]]; then - versionLine="$(grep '^build.scala.versions' "$buildProps")" - versionString="${versionLine##build.scala.versions=}" - echo "${versionString%% .*}" - fi -} - -execRunner () { - # print the arguments one to a line, quoting any containing spaces - vlog "# Executing command line:" && { - for arg; do - if [[ -n "$arg" ]]; then - if printf "%s\n" "$arg" | grep -q ' '; then - printf >&2 "\"%s\"\n" "$arg" - else - printf >&2 "%s\n" "$arg" - fi - fi - done - vlog "" - } - - setTrapExit - - if [[ -n "$batch" ]]; then - "$@" < /dev/null - else - "$@" - fi -} - -jar_url () { make_url "$1"; } - -is_cygwin () { [[ "$(uname -a)" == "CYGWIN"* ]]; } - -jar_file () { - is_cygwin \ - && cygpath -w "$sbt_launch_dir/$1/sbt-launch.jar" \ - || echo "$sbt_launch_dir/$1/sbt-launch.jar" -} - -download_url () { - local url="$1" - local jar="$2" - - echoerr "Downloading sbt launcher for $sbt_version:" - echoerr " From $url" - echoerr " To $jar" - - mkdir -p "${jar%/*}" && { - if command -v curl > /dev/null 2>&1; then - curl --fail --silent --location "$url" --output "$jar" - elif command -v wget > /dev/null 2>&1; then - wget -q -O "$jar" "$url" - fi - } && [[ -r "$jar" ]] -} - -acquire_sbt_jar () { - { - sbt_jar="$(jar_file "$sbt_version")" - [[ -r "$sbt_jar" ]] - } || { - sbt_jar="$HOME/.ivy2/local/org.scala-sbt/sbt-launch/$sbt_version/jars/sbt-launch.jar" - [[ -r "$sbt_jar" ]] - } || { - sbt_jar="$(jar_file "$sbt_version")" - download_url "$(make_url "$sbt_version")" "$sbt_jar" - } -} - -usage () { - set_sbt_version - cat < display stack traces with a max of frames (default: -1, traces suppressed) - -debug-inc enable debugging log for the incremental compiler - -no-colors disable ANSI color codes - -sbt-create start sbt even if current directory contains no sbt project - -sbt-dir path to global settings/plugins directory (default: ~/.sbt/) - -sbt-boot path to shared boot directory (default: ~/.sbt/boot in 0.11+) - -ivy path to local Ivy repository (default: ~/.ivy2) - -no-share use all local caches; no sharing - -offline put sbt in offline mode - -jvm-debug Turn on JVM debugging, open at the given port. - -batch Disable interactive mode - -prompt Set the sbt prompt; in expr, 's' is the State and 'e' is Extracted - -script Run the specified file as a scala script - - # sbt version (default: sbt.version from $buildProps if present, otherwise $sbt_release_version) - -sbt-force-latest force the use of the latest release of sbt: $sbt_release_version - -sbt-version use the specified version of sbt (default: $sbt_release_version) - -sbt-dev use the latest pre-release version of sbt: $sbt_unreleased_version - -sbt-jar use the specified jar as the sbt launcher - -sbt-launch-dir directory to hold sbt launchers (default: $sbt_launch_dir) - -sbt-launch-repo repo url for downloading sbt launcher jar (default: $(url_base "$sbt_version")) - - # scala version (default: as chosen by sbt) - -28 use $latest_28 - -29 use $latest_29 - -210 use $latest_210 - -211 use $latest_211 - -212 use $latest_212 - -213 use $latest_213 - -scala-home use the scala build at the specified directory - -scala-version use the specified version of scala - -binary-version use the specified scala version when searching for dependencies - - # java version (default: java from PATH, currently $(java -version 2>&1 | grep version)) - -java-home alternate JAVA_HOME - - # passing options to the jvm - note it does NOT use JAVA_OPTS due to pollution - # The default set is used if JVM_OPTS is unset and no -jvm-opts file is found - $(default_jvm_opts) - JVM_OPTS environment variable holding either the jvm args directly, or - the reference to a file containing jvm args if given path is prepended by '@' (e.g. '@/etc/jvmopts') - Note: "@"-file is overridden by local '.jvmopts' or '-jvm-opts' argument. - -jvm-opts file containing jvm args (if not given, .jvmopts in project root is used if present) - -Dkey=val pass -Dkey=val directly to the jvm - -J-X pass option -X directly to the jvm (-J is stripped) - - # passing options to sbt, OR to this runner - SBT_OPTS environment variable holding either the sbt args directly, or - the reference to a file containing sbt args if given path is prepended by '@' (e.g. '@/etc/sbtopts') - Note: "@"-file is overridden by local '.sbtopts' or '-sbt-opts' argument. - -sbt-opts file containing sbt args (if not given, .sbtopts in project root is used if present) - -S-X add -X to sbt's scalacOptions (-S is stripped) -EOM -} - -process_args () { - require_arg () { - local type="$1" - local opt="$2" - local arg="$3" - - if [[ -z "$arg" ]] || [[ "${arg:0:1}" == "-" ]]; then - die "$opt requires <$type> argument" - fi - } - while [[ $# -gt 0 ]]; do - case "$1" in - -h|-help) usage; exit 0 ;; - -v) verbose=true && shift ;; - -d) addSbt "--debug" && shift ;; - -w) addSbt "--warn" && shift ;; - -q) addSbt "--error" && shift ;; - -x) debugUs=true && shift ;; - -trace) require_arg integer "$1" "$2" && trace_level="$2" && shift 2 ;; - -ivy) require_arg path "$1" "$2" && addJava "-Dsbt.ivy.home=$2" && shift 2 ;; - -no-colors) addJava "-Dsbt.log.noformat=true" && shift ;; - -no-share) noshare=true && shift ;; - -sbt-boot) require_arg path "$1" "$2" && addJava "-Dsbt.boot.directory=$2" && shift 2 ;; - -sbt-dir) require_arg path "$1" "$2" && sbt_dir="$2" && shift 2 ;; - -debug-inc) addJava "-Dxsbt.inc.debug=true" && shift ;; - -offline) addSbt "set offline in Global := true" && shift ;; - -jvm-debug) require_arg port "$1" "$2" && addDebugger "$2" && shift 2 ;; - -batch) batch=true && shift ;; - -prompt) require_arg "expr" "$1" "$2" && setThisBuild shellPrompt "(s => { val e = Project.extract(s) ; $2 })" && shift 2 ;; - -script) require_arg file "$1" "$2" && sbt_script="$2" && addJava "-Dsbt.main.class=sbt.ScriptMain" && shift 2 ;; - - -sbt-create) sbt_create=true && shift ;; - -sbt-jar) require_arg path "$1" "$2" && sbt_jar="$2" && shift 2 ;; - -sbt-version) require_arg version "$1" "$2" && sbt_explicit_version="$2" && shift 2 ;; - -sbt-force-latest) sbt_explicit_version="$sbt_release_version" && shift ;; - -sbt-dev) sbt_explicit_version="$sbt_unreleased_version" && shift ;; - -sbt-launch-dir) require_arg path "$1" "$2" && sbt_launch_dir="$2" && shift 2 ;; - -sbt-launch-repo) require_arg path "$1" "$2" && sbt_launch_repo="$2" && shift 2 ;; - -scala-version) require_arg version "$1" "$2" && setScalaVersion "$2" && shift 2 ;; - -binary-version) require_arg version "$1" "$2" && setThisBuild scalaBinaryVersion "\"$2\"" && shift 2 ;; - -scala-home) require_arg path "$1" "$2" && setThisBuild scalaHome "_root_.scala.Some(file(\"$2\"))" && shift 2 ;; - -java-home) require_arg path "$1" "$2" && setJavaHome "$2" && shift 2 ;; - -sbt-opts) require_arg path "$1" "$2" && sbt_opts_file="$2" && shift 2 ;; - -jvm-opts) require_arg path "$1" "$2" && jvm_opts_file="$2" && shift 2 ;; - - -D*) addJava "$1" && shift ;; - -J*) addJava "${1:2}" && shift ;; - -S*) addScalac "${1:2}" && shift ;; - -28) setScalaVersion "$latest_28" && shift ;; - -29) setScalaVersion "$latest_29" && shift ;; - -210) setScalaVersion "$latest_210" && shift ;; - -211) setScalaVersion "$latest_211" && shift ;; - -212) setScalaVersion "$latest_212" && shift ;; - -213) setScalaVersion "$latest_213" && shift ;; - new) sbt_new=true && : ${sbt_explicit_version:=$sbt_release_version} && addResidual "$1" && shift ;; - *) addResidual "$1" && shift ;; - esac - done -} - -# process the direct command line arguments -process_args "$@" - -# skip #-styled comments and blank lines -readConfigFile() { - local end=false - until $end; do - read -r || end=true - [[ $REPLY =~ ^# ]] || [[ -z $REPLY ]] || echo "$REPLY" - done < "$1" -} - -# if there are file/environment sbt_opts, process again so we -# can supply args to this runner -if [[ -r "$sbt_opts_file" ]]; then - vlog "Using sbt options defined in file $sbt_opts_file" - while read -r opt; do extra_sbt_opts+=("$opt"); done < <(readConfigFile "$sbt_opts_file") -elif [[ -n "$SBT_OPTS" && ! ("$SBT_OPTS" =~ ^@.*) ]]; then - vlog "Using sbt options defined in variable \$SBT_OPTS" - IFS=" " read -r -a extra_sbt_opts <<< "$SBT_OPTS" -else - vlog "No extra sbt options have been defined" -fi - -[[ -n "${extra_sbt_opts[*]}" ]] && process_args "${extra_sbt_opts[@]}" - -# reset "$@" to the residual args -set -- "${residual_args[@]}" -argumentCount=$# - -# set sbt version -set_sbt_version - -checkJava - -# only exists in 0.12+ -setTraceLevel() { - case "$sbt_version" in - "0.7."* | "0.10."* | "0.11."* ) echoerr "Cannot set trace level in sbt version $sbt_version" ;; - *) setThisBuild traceLevel "$trace_level" ;; - esac -} - -# set scalacOptions if we were given any -S opts -[[ ${#scalac_args[@]} -eq 0 ]] || addSbt "set scalacOptions in ThisBuild += \"${scalac_args[*]}\"" - -[[ -n "$sbt_explicit_version" && -z "$sbt_new" ]] && addJava "-Dsbt.version=$sbt_explicit_version" -vlog "Detected sbt version $sbt_version" - -if [[ -n "$sbt_script" ]]; then - residual_args=( "$sbt_script" "${residual_args[@]}" ) -else - # no args - alert them there's stuff in here - (( argumentCount > 0 )) || { - vlog "Starting $script_name: invoke with -help for other options" - residual_args=( shell ) - } -fi - -# verify this is an sbt dir, -create was given or user attempts to run a scala script -[[ -r ./build.sbt || -d ./project || -n "$sbt_create" || -n "$sbt_script" || -n "$sbt_new" ]] || { - cat < require(children.length == 2, s"Incorrect number of arguments to a binary expression. Expected 2, found ${children.length}") val kindDerivation: (MamlKind, MamlKind) => MamlKind - lazy val kind = expression.children.map({ _.kind }).reduce({ kindDerivation(_, _) }) + lazy val kind = expression.children.map { _.kind }.reduce { kindDerivation(_, _) } } object BinaryExpression { def scalarCompareDerivation(k1: MamlKind, k2: MamlKind): MamlKind = (k1, k2) match { - case (MamlKind.Image, MamlKind.Image) => MamlKind.Image - case (MamlKind.Image, MamlKind.Int) => MamlKind.Image - case (MamlKind.Image, MamlKind.Double) => MamlKind.Image - case (MamlKind.Int, MamlKind.Image) => MamlKind.Image - case (MamlKind.Double, MamlKind.Image) => MamlKind.Image - case (MamlKind.Int, MamlKind.Int) => MamlKind.Bool + case (MamlKind.Image, MamlKind.Image) => MamlKind.Image + case (MamlKind.Image, MamlKind.Int) => MamlKind.Image + case (MamlKind.Image, MamlKind.Double) => MamlKind.Image + case (MamlKind.Int, MamlKind.Image) => MamlKind.Image + case (MamlKind.Double, MamlKind.Image) => MamlKind.Image + case (MamlKind.Int, MamlKind.Int) => MamlKind.Bool case (MamlKind.Double, MamlKind.Double) => MamlKind.Bool - case (x1, x2) => throw new InvalidParameterException(s"Expected image or scalar kinds. Found $x1 and $x2") + case (x1, x2) => throw new InvalidParameterException(s"Expected image or scalar kinds. Found $x1 and $x2") } } - diff --git a/shared/src/main/scala/ast/Expression.scala b/shared/src/main/scala/ast/Expression.scala index aa916eb3..ba490ff4 100644 --- a/shared/src/main/scala/ast/Expression.scala +++ b/shared/src/main/scala/ast/Expression.scala @@ -17,7 +17,7 @@ object Expression { params.get(v.name) match { case Some(literal) if literal.asInstanceOf[Expression].kind == subExpr.kind => Valid(literal.asInstanceOf[Expression]) case Some(literal) => Invalid(NEL.of(DivergingTypes(literal.asInstanceOf[Expression].kind.toString, List(subExpr.kind.toString)))) - case None => Invalid(NEL.of(NoVariableBinding(v, params))) + case None => Invalid(NEL.of(NoVariableBinding(v, params))) } case _ => subExpr.children.traverse(eval(_)).map(subExpr.withChildren) @@ -26,7 +26,9 @@ object Expression { } } -/** The ur-type for a recursive representation of MapAlgebra operations */ +/** + * The ur-type for a recursive representation of MapAlgebra operations + */ sealed abstract class Expression(val sym: String) extends Product with Serializable { def symbol: String = sym def children: List[Expression] @@ -67,13 +69,12 @@ case class Min(children: List[Expression]) extends Expression("min") with Foldab def withChildren(newChildren: List[Expression]): Expression = copy(children = newChildren) } - case class Masking(children: List[Expression]) extends Expression("mask") with BinaryExpression { val kindDerivation = { (k1: MamlKind, k2: MamlKind) => (k1, k2) match { case (MamlKind.Image, MamlKind.Geom) => MamlKind.Image case (MamlKind.Geom, MamlKind.Image) => MamlKind.Image - case (x1, x2) => throw new InvalidParameterException(s"Expected image and geometry kinds. Found $x1 and $x2") + case (x1, x2) => throw new InvalidParameterException(s"Expected image and geometry kinds. Found $x1 and $x2") } } @@ -81,14 +82,15 @@ case class Masking(children: List[Expression]) extends Expression("mask") with B } /** - * It is a [[FoldableExpression]] though expects only 3 rasters to be passed. - * It would use the first band of every raster and combine them into a single RGB raster. + * It is a [[FoldableExpression]] though expects only 3 rasters to be passed. It would use the first band of every raster and combine them into a + * single RGB raster. * - * redBand - the nam / number of the band from the first (red) argument - * greenBand - the name / number of the band from the second (green) argument + * redBand - the nam / number of the band from the first (red) argument greenBand - the name / number of the band from the second (green) argument * blueBand - the name / number of the band from the third (blue) argument */ -case class RGB(children: List[Expression], redBand: String = "0", blueBand: String = "0", greenBand: String = "0") extends Expression("rgb") with FoldableExpression { +case class RGB(children: List[Expression], redBand: String = "0", blueBand: String = "0", greenBand: String = "0") + extends Expression("rgb") + with FoldableExpression { val kindDerivation = FoldableExpression.imageOrScalarDerivation(this)(_, _) def withChildren(newChildren: List[Expression]): Expression = copy(children = newChildren) } @@ -96,16 +98,16 @@ case class RGB(children: List[Expression], redBand: String = "0", blueBand: Stri case class Pow(children: List[Expression]) extends Expression("**") with BinaryExpression { val kindDerivation = { (k1: MamlKind, k2: MamlKind) => (k1, k2) match { - case (MamlKind.Image, MamlKind.Image) => MamlKind.Image - case (MamlKind.Image, MamlKind.Int) => MamlKind.Image - case (MamlKind.Int, MamlKind.Image) => MamlKind.Image - case (MamlKind.Image, MamlKind.Double) => MamlKind.Image - case (MamlKind.Double, MamlKind.Image) => MamlKind.Image - case (MamlKind.Double, MamlKind.Int) => MamlKind.Double - case (MamlKind.Int, MamlKind.Double) => MamlKind.Double - case (MamlKind.Int, MamlKind.Int) => MamlKind.Double + case (MamlKind.Image, MamlKind.Image) => MamlKind.Image + case (MamlKind.Image, MamlKind.Int) => MamlKind.Image + case (MamlKind.Int, MamlKind.Image) => MamlKind.Image + case (MamlKind.Image, MamlKind.Double) => MamlKind.Image + case (MamlKind.Double, MamlKind.Image) => MamlKind.Image + case (MamlKind.Double, MamlKind.Int) => MamlKind.Double + case (MamlKind.Int, MamlKind.Double) => MamlKind.Double + case (MamlKind.Int, MamlKind.Int) => MamlKind.Double case (MamlKind.Double, MamlKind.Double) => MamlKind.Double - case (x1, x2) => throw new InvalidParameterException(s"Expected image or scalar kinds. Found $x1 and $x2") + case (x1, x2) => throw new InvalidParameterException(s"Expected image or scalar kinds. Found $x1 and $x2") } } def withChildren(newChildren: List[Expression]): Expression = copy(children = newChildren) @@ -161,16 +163,23 @@ case class Atan2(children: List[Expression]) extends Expression("atan2") with Bi def withChildren(newChildren: List[Expression]): Expression = copy(children = newChildren) } -/** FLOW CONTROL */ +/** + * FLOW CONTROL + */ case class Branch(children: List[Expression]) extends Expression("ifelse") { require(children.length == 3, s"Incorrect number of arguments to a branching/if-else expression. Expected 3, found ${children.length}") require(children(0).kind == MamlKind.Bool, s"The first argument to branching/if-else must have Kind Bool. Found ${children(0).kind}") - require(children(1).kind == children(2).kind, s"Unable to determine branching/if-else kind. If and Else body must be of the same kind. If-body: ${children(1).kind}. Else-body: ${children(2).kind}") + require( + children(1).kind == children(2).kind, + s"Unable to determine branching/if-else kind. If and Else body must be of the same kind. If-body: ${children(1).kind}. Else-body: ${children(2).kind}" + ) lazy val kind = children(1).kind def withChildren(newChildren: List[Expression]): Expression = copy(children = newChildren) } -/** Operations which should only have one argument. */ +/** + * Operations which should only have one argument. + */ case class Classification(children: List[Expression], classMap: ClassMap) extends Expression("classify") with UnaryExpression { val kindDerivation: Map[MamlKind, MamlKind] = UnaryExpression.imageOrScalar def withChildren(newChildren: List[Expression]): Expression = copy(children = newChildren) @@ -236,7 +245,9 @@ case class Ceil(children: List[Expression]) extends Expression("ceil") with Unar def withChildren(newChildren: List[Expression]): Expression = copy(children = newChildren) } -/** Natural Log */ +/** + * Natural Log + */ case class LogE(children: List[Expression]) extends Expression("loge") with UnaryExpression { val kindDerivation: Map[MamlKind, MamlKind] = UnaryExpression.imageOrScalar def withChildren(newChildren: List[Expression]): Expression = copy(children = newChildren) @@ -277,12 +288,16 @@ case class LogicalNegation(children: List[Expression]) extends Expression("lneg" def withChildren(newChildren: List[Expression]): Expression = copy(children = newChildren) } -case class Rescale(children: List[Expression], newMin: Double, newMax: Double, band: Option[String] = None) extends Expression("rescale") with UnaryExpression { +case class Rescale(children: List[Expression], newMin: Double, newMax: Double, band: Option[String] = None) + extends Expression("rescale") + with UnaryExpression { val kindDerivation: Map[MamlKind, MamlKind] = UnaryExpression.imageOrScalar def withChildren(newChildren: List[Expression]): Expression = copy(children = newChildren) } -case class Normalize(children: List[Expression], oldMin: Double, oldMax: Double, newMin: Double, newMax: Double, band: Option[String] = None) extends Expression("normalize") with UnaryExpression { +case class Normalize(children: List[Expression], oldMin: Double, oldMax: Double, newMin: Double, newMax: Double, band: Option[String] = None) + extends Expression("normalize") + with UnaryExpression { val kindDerivation: Map[MamlKind, MamlKind] = UnaryExpression.imageOrScalar def withChildren(newChildren: List[Expression]): Expression = copy(children = newChildren) } @@ -292,41 +307,63 @@ case class Clamp(children: List[Expression], min: Double, max: Double, band: Opt def withChildren(newChildren: List[Expression]): Expression = copy(children = newChildren) } -case class FocalMax(children: List[Expression], neighborhood: Neighborhood, target: TargetCell = TargetCell.All) extends Expression("fmax") with FocalExpression { +case class FocalMax(children: List[Expression], neighborhood: Neighborhood, target: TargetCell = TargetCell.All) + extends Expression("fmax") + with FocalExpression { def withChildren(newChildren: List[Expression]): Expression = copy(children = newChildren) } -case class FocalMin(children: List[Expression], neighborhood: Neighborhood, target: TargetCell = TargetCell.All) extends Expression("fmin") with FocalExpression { +case class FocalMin(children: List[Expression], neighborhood: Neighborhood, target: TargetCell = TargetCell.All) + extends Expression("fmin") + with FocalExpression { def withChildren(newChildren: List[Expression]): Expression = copy(children = newChildren) } -case class FocalMean(children: List[Expression], neighborhood: Neighborhood, target: TargetCell = TargetCell.All) extends Expression("fmean") with FocalExpression { +case class FocalMean(children: List[Expression], neighborhood: Neighborhood, target: TargetCell = TargetCell.All) + extends Expression("fmean") + with FocalExpression { def withChildren(newChildren: List[Expression]): Expression = copy(children = newChildren) } -case class FocalMedian(children: List[Expression], neighborhood: Neighborhood, target: TargetCell = TargetCell.All) extends Expression("fmedian") with FocalExpression { +case class FocalMedian(children: List[Expression], neighborhood: Neighborhood, target: TargetCell = TargetCell.All) + extends Expression("fmedian") + with FocalExpression { def withChildren(newChildren: List[Expression]): Expression = copy(children = newChildren) } -case class FocalMode(children: List[Expression], neighborhood: Neighborhood, target: TargetCell = TargetCell.All) extends Expression("fmode") with FocalExpression { +case class FocalMode(children: List[Expression], neighborhood: Neighborhood, target: TargetCell = TargetCell.All) + extends Expression("fmode") + with FocalExpression { def withChildren(newChildren: List[Expression]): Expression = copy(children = newChildren) } -case class FocalSum(children: List[Expression], neighborhood: Neighborhood, target: TargetCell = TargetCell.All) extends Expression("fsum") with FocalExpression { +case class FocalSum(children: List[Expression], neighborhood: Neighborhood, target: TargetCell = TargetCell.All) + extends Expression("fsum") + with FocalExpression { def withChildren(newChildren: List[Expression]): Expression = copy(children = newChildren) } -case class FocalStdDev(children: List[Expression], neighborhood: Neighborhood, target: TargetCell = TargetCell.All) extends Expression("fstddev") with FocalExpression { +case class FocalStdDev(children: List[Expression], neighborhood: Neighborhood, target: TargetCell = TargetCell.All) + extends Expression("fstddev") + with FocalExpression { def withChildren(newChildren: List[Expression]): Expression = copy(children = newChildren) } -case class FocalSlope(children: List[Expression], zFactor: Option[Double] = None, target: TargetCell = TargetCell.All) extends Expression("fslope") with FocalExpression { +case class FocalSlope(children: List[Expression], zFactor: Option[Double] = None, target: TargetCell = TargetCell.All) + extends Expression("fslope") + with FocalExpression { // Not used in this focal operation def neighborhood = Square(1) def withChildren(newChildren: List[Expression]): Expression = copy(children = newChildren) } -case class FocalHillshade(children: List[Expression], azimuth: Double, altitude: Double, zFactor: Option[Double] = None, target: TargetCell = TargetCell.All) extends Expression("fhillshade") with FocalExpression { +case class FocalHillshade(children: List[Expression], + azimuth: Double, + altitude: Double, + zFactor: Option[Double] = None, + target: TargetCell = TargetCell.All +) extends Expression("fhillshade") + with FocalExpression { // Not used in this focal operation def neighborhood = Square(1) def withChildren(newChildren: List[Expression]): Expression = copy(children = newChildren) @@ -343,7 +380,9 @@ case class ImageSelect(children: List[Expression], labels: List[String]) extends def withChildren(newChildren: List[Expression]): Expression = copy(children = newChildren) } -/** Assemble first bands from each child expression into a multiband image */ +/** + * Assemble first bands from each child expression into a multiband image + */ case class Assemble(children: List[Expression]) extends Expression("assemble") with FoldableExpression { val kindDerivation = FoldableExpression.imageOnly(this)(_, _) def withChildren(newChildren: List[Expression]): Expression = copy(children = newChildren) diff --git a/shared/src/main/scala/ast/FocalExpression.scala b/shared/src/main/scala/ast/FocalExpression.scala index 79b4ce01..90a23cf8 100644 --- a/shared/src/main/scala/ast/FocalExpression.scala +++ b/shared/src/main/scala/ast/FocalExpression.scala @@ -4,10 +4,8 @@ import MamlKind._ import com.azavea.maml.ast._ import com.azavea.maml.util._ - // TODO maybe don't use lists for these unary things trait FocalExpression extends UnaryExpression { expression: Expression => def neighborhood: Neighborhood def kindDerivation: Map[MamlKind, MamlKind] = Map(MamlKind.Image -> MamlKind.Image) } - diff --git a/shared/src/main/scala/ast/FoldableExpression.scala b/shared/src/main/scala/ast/FoldableExpression.scala index 69e5d064..af878c6c 100644 --- a/shared/src/main/scala/ast/FoldableExpression.scala +++ b/shared/src/main/scala/ast/FoldableExpression.scala @@ -5,25 +5,25 @@ import java.security.InvalidParameterException trait FoldableExpression { expression: Expression => require(expression.children.length > 1, s"Incorrect number of arguments to a foldable expression. Expected >1, found ${expression.children.length}") val kindDerivation: (MamlKind, MamlKind) => MamlKind - lazy val kind = this.children.map({ _.kind }).reduce({ kindDerivation(_, _) }) + lazy val kind = this.children.map { _.kind }.reduce { kindDerivation(_, _) } } object FoldableExpression { def imageOrScalarDerivation(exp: FoldableExpression)(k1: MamlKind, k2: MamlKind): MamlKind = (k1, k2) match { - case (MamlKind.Image, MamlKind.Image) => MamlKind.Image - case (MamlKind.Int, MamlKind.Int) => MamlKind.Int - case (MamlKind.Image, MamlKind.Int) => MamlKind.Image - case (MamlKind.Int, MamlKind.Image) => MamlKind.Image + case (MamlKind.Image, MamlKind.Image) => MamlKind.Image + case (MamlKind.Int, MamlKind.Int) => MamlKind.Int + case (MamlKind.Image, MamlKind.Int) => MamlKind.Image + case (MamlKind.Int, MamlKind.Image) => MamlKind.Image case (MamlKind.Double, MamlKind.Double) => MamlKind.Double - case (MamlKind.Image, MamlKind.Double) => MamlKind.Image - case (MamlKind.Double, MamlKind.Image) => MamlKind.Image - case (MamlKind.Double, MamlKind.Int) => MamlKind.Double - case (MamlKind.Int, MamlKind.Double) => MamlKind.Double - case (x1, x2) => throw new InvalidParameterException(s"Expected image, int, or double kind. Found $x1 $x2") + case (MamlKind.Image, MamlKind.Double) => MamlKind.Image + case (MamlKind.Double, MamlKind.Image) => MamlKind.Image + case (MamlKind.Double, MamlKind.Int) => MamlKind.Double + case (MamlKind.Int, MamlKind.Double) => MamlKind.Double + case (x1, x2) => throw new InvalidParameterException(s"Expected image, int, or double kind. Found $x1 $x2") } def imageOnly(exp: FoldableExpression)(k1: MamlKind, k2: MamlKind): MamlKind = (k1, k2) match { case (MamlKind.Image, MamlKind.Image) => MamlKind.Image - case (x1, x2) => throw new InvalidParameterException(s"Expected image kind. Found $x1 $x2") + case (x1, x2) => throw new InvalidParameterException(s"Expected image kind. Found $x1 $x2") } } diff --git a/shared/src/main/scala/ast/MamlKind.scala b/shared/src/main/scala/ast/MamlKind.scala index 22fa9fc2..6b765301 100644 --- a/shared/src/main/scala/ast/MamlKind.scala +++ b/shared/src/main/scala/ast/MamlKind.scala @@ -6,7 +6,6 @@ import cats.implicits._ import scala.util.Try - sealed trait MamlKind { def repr: String } object MamlKind { case object Bool extends MamlKind { def repr: String = "bool" } @@ -19,11 +18,11 @@ object MamlKind { def fromString(str: String): Option[MamlKind] = Try { str match { - case "bool" => MamlKind.Bool - case "int" => MamlKind.Int - case "double" => MamlKind.Double - case "img" => MamlKind.Image - case "geom" => MamlKind.Geom + case "bool" => MamlKind.Bool + case "int" => MamlKind.Int + case "double" => MamlKind.Double + case "img" => MamlKind.Image + case "geom" => MamlKind.Geom case "nothing" => MamlKind.Nothing } }.toOption diff --git a/shared/src/main/scala/ast/Source.scala b/shared/src/main/scala/ast/Source.scala index d13e5d62..8fac4a19 100644 --- a/shared/src/main/scala/ast/Source.scala +++ b/shared/src/main/scala/ast/Source.scala @@ -10,7 +10,6 @@ import io.circe.generic.JsonCodec import java.lang.IllegalArgumentException - trait Source trait Literal extends Source { expression: Expression => @@ -23,4 +22,3 @@ trait Variable extends Source { expression: Expression => val children: List[Expression] = List.empty def withChildren(children: List[Expression]) = expression } - diff --git a/shared/src/main/scala/ast/UnaryExpression.scala b/shared/src/main/scala/ast/UnaryExpression.scala index a84fe6e0..85318886 100644 --- a/shared/src/main/scala/ast/UnaryExpression.scala +++ b/shared/src/main/scala/ast/UnaryExpression.scala @@ -1,6 +1,8 @@ package com.azavea.maml.ast -/** Operations which should only have one argument. */ +/** + * Operations which should only have one argument. + */ trait UnaryExpression { expression: Expression => require(expression.children.length == 1, s"Incorrect number of arguments to a unary expression. Expected 1, found ${expression.children.length}") lazy val kind = kindDerivation(expression.children.head.kind) @@ -15,4 +17,3 @@ object UnaryExpression { val scalar = intOnly ++ dblOnly val imageOrScalar = imageOnly ++ intOnly ++ dblOnly } - diff --git a/shared/src/main/scala/ast/codec/MamlCodecInstances.scala b/shared/src/main/scala/ast/codec/MamlCodecInstances.scala index 400040ce..6da99307 100644 --- a/shared/src/main/scala/ast/codec/MamlCodecInstances.scala +++ b/shared/src/main/scala/ast/codec/MamlCodecInstances.scala @@ -11,49 +11,49 @@ trait MamlCodecInstances extends MamlUtilityCodecs { implicit def totalEncoder: Encoder[Expression] implicit lazy val decodeAddition: Decoder[Addition] = - Decoder.forProduct1("args"){ args: List[Expression] => Addition(args) } + Decoder.forProduct1("args") { args: List[Expression] => Addition(args) } implicit lazy val encodeAddition: Encoder[Addition] = Encoder.forProduct2("args", "symbol")(u => (u.children, u.sym)) implicit lazy val decodeSubtraction: Decoder[Subtraction] = - Decoder.forProduct1("args"){ args: List[Expression] => Subtraction(args) } + Decoder.forProduct1("args") { args: List[Expression] => Subtraction(args) } implicit lazy val encodeSubtraction: Encoder[Subtraction] = Encoder.forProduct2("args", "symbol")(u => (u.children, u.sym)) implicit lazy val decodeMultiplication: Decoder[Multiplication] = - Decoder.forProduct1("args"){ args: List[Expression] => Multiplication(args) } + Decoder.forProduct1("args") { args: List[Expression] => Multiplication(args) } implicit lazy val encodeMultiplication: Encoder[Multiplication] = Encoder.forProduct2("args", "symbol")(u => (u.children, u.sym)) implicit lazy val decodeDivision: Decoder[Division] = - Decoder.forProduct1("args"){ args: List[Expression] => Division(args) } + Decoder.forProduct1("args") { args: List[Expression] => Division(args) } implicit lazy val encodeDivision: Encoder[Division] = Encoder.forProduct2("args", "symbol")(u => (u.children, u.sym)) implicit lazy val decodeMax: Decoder[Max] = - Decoder.forProduct1("args"){ args: List[Expression] => Max(args) } + Decoder.forProduct1("args") { args: List[Expression] => Max(args) } implicit lazy val encodeMax: Encoder[Max] = Encoder.forProduct2("args", "symbol")(u => (u.children, u.sym)) implicit lazy val decodeMin: Decoder[Min] = - Decoder.forProduct1("args"){ args: List[Expression] => Min(args) } + Decoder.forProduct1("args") { args: List[Expression] => Min(args) } implicit lazy val encodeMin: Encoder[Min] = Encoder.forProduct2("args", "symbol")(u => (u.children, u.sym)) implicit lazy val decodeMasking: Decoder[Masking] = - Decoder.forProduct1("args"){ args: List[Expression] => Masking(args) } + Decoder.forProduct1("args") { args: List[Expression] => Masking(args) } implicit lazy val encodeMasking: Encoder[Masking] = Encoder.forProduct2("args", "symbol")(u => (u.children, u.sym)) implicit lazy val decoderSleep: Decoder[Sleep] = - Decoder.forProduct2("seconds", "args"){ - (seconds: Long, args: List[Expression]) => Sleep(seconds, args) + Decoder.forProduct2("seconds", "args") { (seconds: Long, args: List[Expression]) => + Sleep(seconds, args) } implicit lazy val encoderSleep: Encoder[Sleep] = Encoder.forProduct2("seconds", "args")(u => (u.seconds, u.children)) implicit lazy val decodePow: Decoder[Pow] = - Decoder.forProduct1("args"){ args: List[Expression] => Pow(args) } + Decoder.forProduct1("args") { args: List[Expression] => Pow(args) } implicit lazy val encodePow: Encoder[Pow] = Encoder.forProduct2("args", "symbol")(u => (u.children, u.sym)) @@ -63,22 +63,22 @@ trait MamlCodecInstances extends MamlUtilityCodecs { Encoder.forProduct3("args", "classifications", "symbol")(u => (u.children, u.classMap, u.sym)) implicit lazy val decodeFocalMax: Decoder[FocalMax] = - Decoder.forProduct3[FocalMax, List[Expression], Neighborhood, Option[TargetCell]]("args", "neighborhood", "target") { - (args, zFactor, target) => FocalMax(args, zFactor, target.getOrElse(TargetCell.All)) + Decoder.forProduct3[FocalMax, List[Expression], Neighborhood, Option[TargetCell]]("args", "neighborhood", "target") { (args, zFactor, target) => + FocalMax(args, zFactor, target.getOrElse(TargetCell.All)) } implicit lazy val encodeFocalMax: Encoder[FocalMax] = - Encoder.forProduct4("args", "neighborhood", "target","symbol")(u => (u.children, u.neighborhood, u.target, u.sym)) + Encoder.forProduct4("args", "neighborhood", "target", "symbol")(u => (u.children, u.neighborhood, u.target, u.sym)) implicit lazy val decodeFocalMin: Decoder[FocalMin] = - Decoder.forProduct3[FocalMin, List[Expression], Neighborhood, Option[TargetCell]]("args", "neighborhood", "target") { - (args, zFactor, target) => FocalMin(args, zFactor, target.getOrElse(TargetCell.All)) + Decoder.forProduct3[FocalMin, List[Expression], Neighborhood, Option[TargetCell]]("args", "neighborhood", "target") { (args, zFactor, target) => + FocalMin(args, zFactor, target.getOrElse(TargetCell.All)) } implicit lazy val encodeFocalMin: Encoder[FocalMin] = Encoder.forProduct4("args", "neighborhood", "target", "symbol")(u => (u.children, u.neighborhood, u.target, u.sym)) implicit lazy val decodeFocalMean: Decoder[FocalMean] = - Decoder.forProduct3[FocalMean, List[Expression], Neighborhood, Option[TargetCell]]("args", "neighborhood", "target") { - (args, zFactor, target) => FocalMean(args, zFactor, target.getOrElse(TargetCell.All)) + Decoder.forProduct3[FocalMean, List[Expression], Neighborhood, Option[TargetCell]]("args", "neighborhood", "target") { (args, zFactor, target) => + FocalMean(args, zFactor, target.getOrElse(TargetCell.All)) } implicit lazy val encodeFocalMean: Encoder[FocalMean] = Encoder.forProduct4("args", "neighborhood", "target", "symbol")(u => (u.children, u.neighborhood, u.target, u.sym)) @@ -91,15 +91,15 @@ trait MamlCodecInstances extends MamlUtilityCodecs { Encoder.forProduct4("args", "neighborhood", "target", "symbol")(u => (u.children, u.neighborhood, u.target, u.sym)) implicit lazy val decodeFocalMode: Decoder[FocalMode] = - Decoder.forProduct3[FocalMode, List[Expression], Neighborhood, Option[TargetCell]]("args", "neighborhood", "target") { - (args, zFactor, target) => FocalMode(args, zFactor, target.getOrElse(TargetCell.All)) + Decoder.forProduct3[FocalMode, List[Expression], Neighborhood, Option[TargetCell]]("args", "neighborhood", "target") { (args, zFactor, target) => + FocalMode(args, zFactor, target.getOrElse(TargetCell.All)) } implicit lazy val encodeFocalMode: Encoder[FocalMode] = Encoder.forProduct4("args", "neighborhood", "target", "symbol")(u => (u.children, u.neighborhood, u.target, u.sym)) implicit lazy val decodeFocalSum: Decoder[FocalSum] = - Decoder.forProduct3[FocalSum, List[Expression], Neighborhood, Option[TargetCell]]("args", "neighborhood", "target") { - (args, zFactor, target) => FocalSum(args, zFactor, target.getOrElse(TargetCell.All)) + Decoder.forProduct3[FocalSum, List[Expression], Neighborhood, Option[TargetCell]]("args", "neighborhood", "target") { (args, zFactor, target) => + FocalSum(args, zFactor, target.getOrElse(TargetCell.All)) } implicit lazy val encodeFocalSum: Encoder[FocalSum] = Encoder.forProduct4("args", "neighborhood", "target", "symbol")(u => (u.children, u.neighborhood, u.target, u.sym)) @@ -112,15 +112,20 @@ trait MamlCodecInstances extends MamlUtilityCodecs { Encoder.forProduct4("args", "neighborhood", "target", "symbol")(u => (u.children, u.neighborhood, u.target, u.sym)) implicit lazy val decodeFocalSlope: Decoder[FocalSlope] = - Decoder.forProduct3[FocalSlope, List[Expression], Option[Double], Option[TargetCell]]("args", "zFactor", "target") { - (args, zFactor, target) => FocalSlope(args, zFactor, target.getOrElse(TargetCell.All)) + Decoder.forProduct3[FocalSlope, List[Expression], Option[Double], Option[TargetCell]]("args", "zFactor", "target") { (args, zFactor, target) => + FocalSlope(args, zFactor, target.getOrElse(TargetCell.All)) } implicit lazy val encodeFocalSlope: Encoder[FocalSlope] = Encoder.forProduct4("args", "zFactor", "target", "symbol")(u => (u.children, u.zFactor, u.target, u.sym)) implicit lazy val decodeFocalHillshade: Decoder[FocalHillshade] = - Decoder.forProduct5[FocalHillshade, List[Expression], Double, Double, Option[Double], Option[TargetCell]]("args", "azimuth", "altitude", "zFactor", "target") { - (args, azimuth, altitude, zFactor, target) => FocalHillshade(args, azimuth, altitude, zFactor, target.getOrElse(TargetCell.All)) + Decoder.forProduct5[FocalHillshade, List[Expression], Double, Double, Option[Double], Option[TargetCell]]("args", + "azimuth", + "altitude", + "zFactor", + "target" + ) { (args, azimuth, altitude, zFactor, target) => + FocalHillshade(args, azimuth, altitude, zFactor, target.getOrElse(TargetCell.All)) } implicit lazy val encodeFocalHillshade: Encoder[FocalHillshade] = Encoder.forProduct6("args", "azimuth", "altitude", "zFactor", "target", "symbol")(u => @@ -128,116 +133,114 @@ trait MamlCodecInstances extends MamlUtilityCodecs { ) implicit lazy val decodeFocalAspect: Decoder[FocalAspect] = - Decoder.forProduct2[FocalAspect, List[Expression], Option[TargetCell]]("args", "target") { - (args, target) => FocalAspect(args, target.getOrElse(TargetCell.All)) + Decoder.forProduct2[FocalAspect, List[Expression], Option[TargetCell]]("args", "target") { (args, target) => + FocalAspect(args, target.getOrElse(TargetCell.All)) } implicit lazy val encodeFocalAspect: Encoder[FocalAspect] = - Encoder.forProduct3("args", "target", "symbol")(u => - (u.children, u.target, u.sym) - ) + Encoder.forProduct3("args", "target", "symbol")(u => (u.children, u.target, u.sym)) implicit lazy val decodeGreater: Decoder[Greater] = - Decoder.forProduct1("args"){ args: List[Expression] => Greater(args) } + Decoder.forProduct1("args") { args: List[Expression] => Greater(args) } implicit lazy val encodeGreater: Encoder[Greater] = Encoder.forProduct2("args", "symbol")(u => (u.children, u.sym)) implicit lazy val decodeGreaterOrEqual: Decoder[GreaterOrEqual] = - Decoder.forProduct1("args"){ args: List[Expression] => GreaterOrEqual(args) } + Decoder.forProduct1("args") { args: List[Expression] => GreaterOrEqual(args) } implicit lazy val encodeGreaterOrEqual: Encoder[GreaterOrEqual] = Encoder.forProduct2("args", "symbol")(u => (u.children, u.sym)) implicit lazy val decodeLesserOrEqual: Decoder[LesserOrEqual] = - Decoder.forProduct1("args"){ args: List[Expression] => LesserOrEqual(args) } + Decoder.forProduct1("args") { args: List[Expression] => LesserOrEqual(args) } implicit lazy val encodeLesserOrEqual: Encoder[LesserOrEqual] = Encoder.forProduct2("args", "symbol")(u => (u.children, u.sym)) implicit lazy val decodeLesser: Decoder[Lesser] = - Decoder.forProduct1("args"){ args: List[Expression] => Lesser(args) } + Decoder.forProduct1("args") { args: List[Expression] => Lesser(args) } implicit lazy val encodeLesser: Encoder[Lesser] = Encoder.forProduct2("args", "symbol")(u => (u.children, u.sym)) implicit lazy val decodeUnequal: Decoder[Unequal] = - Decoder.forProduct1("args"){ args: List[Expression] => Unequal(args) } + Decoder.forProduct1("args") { args: List[Expression] => Unequal(args) } implicit lazy val encodeUnequal: Encoder[Unequal] = Encoder.forProduct2("args", "symbol")(u => (u.children, u.sym)) implicit lazy val decodeEqual: Decoder[Equal] = - Decoder.forProduct1("args"){ args: List[Expression] => Equal(args) } + Decoder.forProduct1("args") { args: List[Expression] => Equal(args) } implicit lazy val encodeEqual: Encoder[Equal] = Encoder.forProduct2("args", "symbol")(u => (u.children, u.sym)) implicit lazy val decodeOr: Decoder[Or] = - Decoder.forProduct1("args"){ args: List[Expression] => Or(args) } + Decoder.forProduct1("args") { args: List[Expression] => Or(args) } implicit lazy val encodeOr: Encoder[Or] = Encoder.forProduct2("args", "symbol")(u => (u.children, u.sym)) implicit lazy val decodeXor: Decoder[Xor] = - Decoder.forProduct1("args"){ args: List[Expression] => Xor(args) } + Decoder.forProduct1("args") { args: List[Expression] => Xor(args) } implicit lazy val encodeXor: Encoder[Xor] = Encoder.forProduct2("args", "symbol")(u => (u.children, u.sym)) implicit lazy val decodeAnd: Decoder[And] = - Decoder.forProduct1("args"){ args: List[Expression] => And(args) } + Decoder.forProduct1("args") { args: List[Expression] => And(args) } implicit lazy val encodeAnd: Encoder[And] = Encoder.forProduct2("args", "symbol")(u => (u.children, u.sym)) implicit lazy val decodeAtan2: Decoder[Atan2] = - Decoder.forProduct1("args"){ args: List[Expression] => Atan2(args) } + Decoder.forProduct1("args") { args: List[Expression] => Atan2(args) } implicit lazy val encodeAtan2: Encoder[Atan2] = Encoder.forProduct2("args", "symbol")(u => (u.children, u.sym)) implicit lazy val decodeBranch: Decoder[Branch] = - Decoder.forProduct1("args"){ args: List[Expression] => Branch(args) } + Decoder.forProduct1("args") { args: List[Expression] => Branch(args) } implicit lazy val encodeBranch: Encoder[Branch] = Encoder.forProduct2("args", "symbol")(u => (u.children, u.sym)) implicit lazy val decodeSin: Decoder[Sin] = - Decoder.forProduct1("args"){ args: List[Expression] => Sin(args) } + Decoder.forProduct1("args") { args: List[Expression] => Sin(args) } implicit lazy val encodeSin: Encoder[Sin] = Encoder.forProduct2("args", "symbol")(u => (u.children, u.sym)) implicit lazy val decodeCos: Decoder[Cos] = - Decoder.forProduct1("args"){ args: List[Expression] => Cos(args) } + Decoder.forProduct1("args") { args: List[Expression] => Cos(args) } implicit lazy val encodeCos: Encoder[Cos] = Encoder.forProduct2("args", "symbol")(u => (u.children, u.sym)) implicit lazy val decodeTan: Decoder[Tan] = - Decoder.forProduct1("args"){ args: List[Expression] => Tan(args) } + Decoder.forProduct1("args") { args: List[Expression] => Tan(args) } implicit lazy val encodeTan: Encoder[Tan] = Encoder.forProduct2("args", "symbol")(u => (u.children, u.sym)) implicit lazy val decodeSinh: Decoder[Sinh] = - Decoder.forProduct1("args"){ args: List[Expression] => Sinh(args) } + Decoder.forProduct1("args") { args: List[Expression] => Sinh(args) } implicit lazy val encodeSinh: Encoder[Sinh] = Encoder.forProduct2("args", "symbol")(u => (u.children, u.sym)) implicit lazy val decodeCosh: Decoder[Cosh] = - Decoder.forProduct1("args"){ args: List[Expression] => Cosh(args) } + Decoder.forProduct1("args") { args: List[Expression] => Cosh(args) } implicit lazy val encodeCosh: Encoder[Cosh] = Encoder.forProduct2("args", "symbol")(u => (u.children, u.sym)) implicit lazy val decodeTanh: Decoder[Tanh] = - Decoder.forProduct1("args"){ args: List[Expression] => Tanh(args) } + Decoder.forProduct1("args") { args: List[Expression] => Tanh(args) } implicit lazy val encodeTanh: Encoder[Tanh] = Encoder.forProduct2("args", "symbol")(u => (u.children, u.sym)) implicit lazy val decodeAsin: Decoder[Asin] = - Decoder.forProduct1("args"){ args: List[Expression] => Asin(args) } + Decoder.forProduct1("args") { args: List[Expression] => Asin(args) } implicit lazy val encodeAsin: Encoder[Asin] = Encoder.forProduct2("args", "symbol")(u => (u.children, u.sym)) implicit lazy val decodeAcos: Decoder[Acos] = - Decoder.forProduct1("args"){ args: List[Expression] => Acos(args) } + Decoder.forProduct1("args") { args: List[Expression] => Acos(args) } implicit lazy val encodeAcos: Encoder[Acos] = Encoder.forProduct2("args", "symbol")(u => (u.children, u.sym)) implicit lazy val decodeAtan: Decoder[Atan] = - Decoder.forProduct1("args"){ args: List[Expression] => Atan(args) } + Decoder.forProduct1("args") { args: List[Expression] => Atan(args) } implicit lazy val encodeAtan: Encoder[Atan] = Encoder.forProduct2("args", "symbol")(u => (u.children, u.sym)) implicit lazy val decodeRound: Decoder[Round] = - Decoder.forProduct1("args"){ args: List[Expression] => Round(args) } + Decoder.forProduct1("args") { args: List[Expression] => Round(args) } implicit lazy val encodeRound: Encoder[Round] = Encoder.forProduct2("args", "symbol")(u => (u.children, u.sym)) @@ -349,8 +352,8 @@ trait MamlCodecInstances extends MamlUtilityCodecs { Encoder.forProduct5("args", "redBand", "greenBand", "blueBand", "symbol")(u => (u.children, u.redBand, u.greenBand, u.blueBand, u.sym)) implicit lazy val decodeAssemble: Decoder[Assemble] = - Decoder.forProduct1[Assemble, List[Expression]]("args") { - (args) => Assemble(args) + Decoder.forProduct1[Assemble, List[Expression]]("args") { args => + Assemble(args) } implicit lazy val encodeAssemble: Encoder[Assemble] = Encoder.forProduct2("args", "symbol")(u => (u.children, u.sym)) @@ -363,7 +366,9 @@ trait MamlCodecInstances extends MamlUtilityCodecs { implicit lazy val decodeNormalize: Decoder[Normalize] = Decoder.forProduct6("args", "oldMin", "oldMax", "newMin", "newMax", "band")(Normalize.apply) implicit lazy val encodeNormalize: Encoder[Normalize] = - Encoder.forProduct7("args", "oldMin", "oldMax", "newMin", "newMax", "band", "symbol")(u => (u.children, u.oldMin, u.oldMax, u.newMin, u.newMax, u.band, u.sym)) + Encoder.forProduct7("args", "oldMin", "oldMax", "newMin", "newMax", "band", "symbol")(u => + (u.children, u.oldMin, u.oldMax, u.newMin, u.newMax, u.band, u.sym) + ) implicit lazy val decodeClamp: Decoder[Clamp] = Decoder.forProduct4("args", "min", "max", "band")(Clamp.apply) diff --git a/shared/src/main/scala/ast/codec/MamlUtilityCodecs.scala b/shared/src/main/scala/ast/codec/MamlUtilityCodecs.scala index 65a0a232..e0accaa7 100644 --- a/shared/src/main/scala/ast/codec/MamlUtilityCodecs.scala +++ b/shared/src/main/scala/ast/codec/MamlUtilityCodecs.scala @@ -15,76 +15,79 @@ import java.util.UUID import scala.util.Try trait MamlUtilityCodecs { - implicit val decodeKeyDouble: KeyDecoder[Double] = KeyDecoder.instance[Double] { - (key: String) => Try(key.toDouble).toOption + implicit val decodeKeyDouble: KeyDecoder[Double] = KeyDecoder.instance[Double] { (key: String) => + Try(key.toDouble).toOption } - implicit val encodeKeyDouble: KeyEncoder[Double] = KeyEncoder.instance[Double] { - (key: Double) => key.toString + implicit val encodeKeyDouble: KeyEncoder[Double] = KeyEncoder.instance[Double] { (key: Double) => + key.toString } - implicit val decodeKeyUUID: KeyDecoder[UUID] = KeyDecoder.instance[UUID] { - (key: String) => Try(UUID.fromString(key)).toOption + implicit val decodeKeyUUID: KeyDecoder[UUID] = KeyDecoder.instance[UUID] { (key: String) => + Try(UUID.fromString(key)).toOption } - implicit val encodeKeyUUID: KeyEncoder[UUID] = KeyEncoder.instance[UUID] { - (key: UUID) => key.toString + implicit val encodeKeyUUID: KeyEncoder[UUID] = KeyEncoder.instance[UUID] { (key: UUID) => + key.toString } implicit lazy val classBoundaryDecoder: Decoder[ClassBoundaryType] = Decoder[String].emap { - case "lessThan" => Right(LessThan) - case "lessThanOrEqualTo" => Right(LessThanOrEqualTo) - case "exact" => Right(Exact) + case "lessThan" => Right(LessThan) + case "lessThanOrEqualTo" => Right(LessThanOrEqualTo) + case "exact" => Right(Exact) case "greaterThanOrEqualTo" => Right(GreaterThanOrEqualTo) - case "greaterThan" => Right(GreaterThan) - case unrecognized => Left(s"Unable to parse $unrecognized as ClassBoundaryType") + case "greaterThan" => Right(GreaterThan) + case unrecognized => Left(s"Unable to parse $unrecognized as ClassBoundaryType") } implicit lazy val classBoundaryEncoder: Encoder[ClassBoundaryType] = - Encoder.encodeString.contramap[ClassBoundaryType]({ cbType => + Encoder.encodeString.contramap[ClassBoundaryType] { cbType => cbType match { - case LessThan => "lessThan" - case LessThanOrEqualTo => "lessThanOrEqualTo" - case Exact => "exact" + case LessThan => "lessThan" + case LessThanOrEqualTo => "lessThanOrEqualTo" + case Exact => "exact" case GreaterThanOrEqualTo => "greaterThanOrEqualTo" - case GreaterThan => "greaterThan" + case GreaterThan => "greaterThan" case unrecognized => throw new InvalidParameterException(s"'$unrecognized' is not a recognized ClassBoundaryType") } - }) + } implicit val colorRampDecoder: Decoder[ColorRamp] = - Decoder[Vector[Int]].map({ vec => ColorRamp(vec) }) + Decoder[Vector[Int]].map { vec => ColorRamp(vec) } implicit val colorRampEncoder: Encoder[ColorRamp] = new Encoder[ColorRamp] { final def apply(cRamp: ColorRamp): Json = cRamp.colors.toArray.asJson } - implicit val histogramDecoder = Decoder.instance[Histogram]({ curs => + implicit val histogramDecoder = Decoder.instance[Histogram] { curs => curs.get[Map[Double, Int]]("counts") match { case Right(counts) => Right(Histogram(counts)) - case Left(err) => Left(DecodingFailure(s"Unable to parse histogram", curs.history)) + case Left(err) => Left(DecodingFailure(s"Unable to parse histogram", curs.history)) } - }) + } implicit val histogramEncoder: Encoder[Histogram] = - Encoder.forProduct1("counts")(hist => (hist.counts)) + Encoder.forProduct1("counts")(hist => hist.counts) // This won't actually work - NESW neighborhoods will *always* succeed in decoding to Square implicit val neighborhoodDecoder: Decoder[Neighborhood] = Decoder.instance[Neighborhood] { cursor => - cursor.get[String]("type") map { - case "square" => Decoder[Square] - case "circle" => Decoder[Circle] - case "nesw" => Decoder[Nesw] - case "wedge" => Decoder[Wedge] - case "annulus" => Decoder[Annulus] - } flatMap { _.widen(cursor) } + cursor + .get[String]("type") + .map { + case "square" => Decoder[Square] + case "circle" => Decoder[Circle] + case "nesw" => Decoder[Nesw] + case "wedge" => Decoder[Wedge] + case "annulus" => Decoder[Annulus] + } + .flatMap { _.widen(cursor) } } implicit val neighborhoodEncoder: Encoder[Neighborhood] = new Encoder[Neighborhood] { final def apply(n: Neighborhood): Json = n match { - case square: Square => square.asJson - case circle: Circle => circle.asJson - case nesw: Nesw => nesw.asJson - case wedge: Wedge => wedge.asJson + case square: Square => square.asJson + case circle: Circle => circle.asJson + case nesw: Nesw => nesw.asJson + case wedge: Wedge => wedge.asJson case annulus: Annulus => annulus.asJson case unrecognized => throw new InvalidParameterException(s"Unrecognized neighborhood: $unrecognized") @@ -92,26 +95,25 @@ trait MamlUtilityCodecs { } implicit val mamlKindDecoder: Decoder[MamlKind] = Decoder[String].emap { - case "img" => Right(MamlKind.Image) - case "int" => Right(MamlKind.Int) - case "double" => Right(MamlKind.Double) - case "geom" => Right(MamlKind.Geom) - case "bool" => Right(MamlKind.Bool) + case "img" => Right(MamlKind.Image) + case "int" => Right(MamlKind.Int) + case "double" => Right(MamlKind.Double) + case "geom" => Right(MamlKind.Geom) + case "bool" => Right(MamlKind.Bool) case unrecognized => Left(s"Unrecognized MamlKind: $unrecognized") } implicit val mamlKindEncoder: Encoder[MamlKind] = Encoder.encodeString.contramap[MamlKind] { - case MamlKind.Image => "img" - case MamlKind.Int => "int" + case MamlKind.Image => "img" + case MamlKind.Int => "int" case MamlKind.Double => "double" - case MamlKind.Geom => "geom" - case MamlKind.Bool => "bool" + case MamlKind.Geom => "geom" + case MamlKind.Bool => "bool" case unrecognized => throw new InvalidParameterException(s"Unrecognized mamlKind: $unrecognized") } - implicit val squareNeighborhoodDecoder: Decoder[Square] = Decoder.forProduct1("extent")(Square.apply) implicit val squareNeighborhoodEncoder: Encoder[Square] = diff --git a/shared/src/main/scala/ast/codec/tree/ExpressionTreeCodec.scala b/shared/src/main/scala/ast/codec/tree/ExpressionTreeCodec.scala index 26d51d26..f7271a90 100644 --- a/shared/src/main/scala/ast/codec/tree/ExpressionTreeCodec.scala +++ b/shared/src/main/scala/ast/codec/tree/ExpressionTreeCodec.scala @@ -11,147 +11,147 @@ import java.security.InvalidParameterException trait ExpressionTreeCodec extends MamlCodecInstances { implicit lazy val totalEncoder: Encoder[Expression] = Encoder.instance { - case il @ IntLit(_) => il.asJson - case iv @ IntVar(_) => iv.asJson - case gv @ GeomVar(_) => gv.asJson - case gl @ GeomLit(_) => gl.asJson - case dl @ DblLit(_) => dl.asJson - case dv @ DblVar(_) => dv.asJson - case bl @ BoolLit(_) => bl.asJson - case bv @ BoolVar(_) => bv.asJson + case il @ IntLit(_) => il.asJson + case iv @ IntVar(_) => iv.asJson + case gv @ GeomVar(_) => gv.asJson + case gl @ GeomLit(_) => gl.asJson + case dl @ DblLit(_) => dl.asJson + case dv @ DblVar(_) => dv.asJson + case bl @ BoolLit(_) => bl.asJson + case bv @ BoolVar(_) => bv.asJson case rv @ RasterVar(_) => rv.asJson case rl @ RasterLit(_) => throw new InvalidParameterException("Can't encode raster literal as JSON") - case add @ Addition(_) => add.asJson - case sub @ Subtraction(_) => sub.asJson - case mul @ Multiplication(_) => mul.asJson - case div @ Division(_) => div.asJson - case max @ Max(_) => max.asJson - case min @ Min(_) => min.asJson - case mask @ Masking(_) => mask.asJson - case cls @ Classification(_, _) => cls.asJson - case fmax @ FocalMax(_, _, _) => fmax.asJson - case fmin @ FocalMin(_, _, _) => fmin.asJson - case fmean @ FocalMean(_, _, _) => fmean.asJson - case fmed @ FocalMedian(_, _, _) => fmed.asJson - case fmode @ FocalMode(_, _, _) => fmode.asJson - case fsum @ FocalSum(_, _, _) => fsum.asJson - case fstddev @ FocalStdDev(_, _, _) => fstddev.asJson - case fslope @ FocalSlope(_, _, _) => fslope.asJson + case add @ Addition(_) => add.asJson + case sub @ Subtraction(_) => sub.asJson + case mul @ Multiplication(_) => mul.asJson + case div @ Division(_) => div.asJson + case max @ Max(_) => max.asJson + case min @ Min(_) => min.asJson + case mask @ Masking(_) => mask.asJson + case cls @ Classification(_, _) => cls.asJson + case fmax @ FocalMax(_, _, _) => fmax.asJson + case fmin @ FocalMin(_, _, _) => fmin.asJson + case fmean @ FocalMean(_, _, _) => fmean.asJson + case fmed @ FocalMedian(_, _, _) => fmed.asJson + case fmode @ FocalMode(_, _, _) => fmode.asJson + case fsum @ FocalSum(_, _, _) => fsum.asJson + case fstddev @ FocalStdDev(_, _, _) => fstddev.asJson + case fslope @ FocalSlope(_, _, _) => fslope.asJson case fhillshade @ FocalHillshade(_, _, _, _, _) => fhillshade.asJson - case faspect @ FocalAspect(_, _) => faspect.asJson - case imgsel @ ImageSelect(_, _) => imgsel.asJson - case lneg @ LogicalNegation(_) => lneg.asJson - case nneg @ NumericNegation(_) => nneg.asJson - case udfn @ Undefined(_) => udfn.asJson - case dfn @ Defined(_) => dfn.asJson - case abs @ Abs(_) => abs.asJson - case sqrt @ SquareRoot(_) => sqrt.asJson - case log10 @ Log10(_) => log10.asJson - case loge @ LogE(_) => loge.asJson - case ceil @ Ceil(_) => ceil.asJson - case flr @ Floor(_) => flr.asJson - case rnd @ Round(_) => rnd.asJson - case acos @ Acos(_) => acos.asJson - case asin @ Asin(_) => asin.asJson - case tanh @ Tanh(_) => tanh.asJson - case cosh @ Cosh(_) => cosh.asJson - case sinh @ Sinh(_) => sinh.asJson - case tan @ Tan(_) => tan.asJson - case cos @ Cos(_) => cos.asJson - case sin @ Sin(_) => sin.asJson - case branch @ Branch(_) => branch.asJson - case atan @ Atan(_) => atan.asJson - case atan2 @ Atan2(_) => atan2.asJson - case and @ And(_) => and.asJson - case or @ Or(_) => or.asJson - case xor @ Xor(_) => xor.asJson - case gt @ Greater(_) => gt.asJson - case gtoe @ GreaterOrEqual(_) => gtoe.asJson - case equal @ Equal(_) => equal.asJson - case unequal @ Unequal(_) => unequal.asJson - case ltoe @ LesserOrEqual(_) => ltoe.asJson - case lt @ Lesser(_) => lt.asJson - case pow @ Pow(_) => pow.asJson - case sleep @ Sleep(_, _) => sleep.asJson - case rgb @ RGB(_, _, _, _) => rgb.asJson - case assemble @ Assemble(_) => assemble.asJson - case rescale @ Rescale(_, _, _, _) => rescale.asJson - case normalize @ Normalize(_, _, _, _, _, _) => normalize.asJson - case clamp @ Clamp(_, _, _, _) => clamp.asJson + case faspect @ FocalAspect(_, _) => faspect.asJson + case imgsel @ ImageSelect(_, _) => imgsel.asJson + case lneg @ LogicalNegation(_) => lneg.asJson + case nneg @ NumericNegation(_) => nneg.asJson + case udfn @ Undefined(_) => udfn.asJson + case dfn @ Defined(_) => dfn.asJson + case abs @ Abs(_) => abs.asJson + case sqrt @ SquareRoot(_) => sqrt.asJson + case log10 @ Log10(_) => log10.asJson + case loge @ LogE(_) => loge.asJson + case ceil @ Ceil(_) => ceil.asJson + case flr @ Floor(_) => flr.asJson + case rnd @ Round(_) => rnd.asJson + case acos @ Acos(_) => acos.asJson + case asin @ Asin(_) => asin.asJson + case tanh @ Tanh(_) => tanh.asJson + case cosh @ Cosh(_) => cosh.asJson + case sinh @ Sinh(_) => sinh.asJson + case tan @ Tan(_) => tan.asJson + case cos @ Cos(_) => cos.asJson + case sin @ Sin(_) => sin.asJson + case branch @ Branch(_) => branch.asJson + case atan @ Atan(_) => atan.asJson + case atan2 @ Atan2(_) => atan2.asJson + case and @ And(_) => and.asJson + case or @ Or(_) => or.asJson + case xor @ Xor(_) => xor.asJson + case gt @ Greater(_) => gt.asJson + case gtoe @ GreaterOrEqual(_) => gtoe.asJson + case equal @ Equal(_) => equal.asJson + case unequal @ Unequal(_) => unequal.asJson + case ltoe @ LesserOrEqual(_) => ltoe.asJson + case lt @ Lesser(_) => lt.asJson + case pow @ Pow(_) => pow.asJson + case sleep @ Sleep(_, _) => sleep.asJson + case rgb @ RGB(_, _, _, _) => rgb.asJson + case assemble @ Assemble(_) => assemble.asJson + case rescale @ Rescale(_, _, _, _) => rescale.asJson + case normalize @ Normalize(_, _, _, _, _, _) => normalize.asJson + case clamp @ Clamp(_, _, _, _) => clamp.asJson } implicit lazy val totalDecoder: Decoder[Expression] = Decoder.instance[Expression] { cursor => - cursor._symbol map { - case "+" => Decoder[Addition] - case "-" => Decoder[Subtraction] - case "*" => Decoder[Multiplication] - case "/" => Decoder[Division] - case "max" => Decoder[Max] - case "min" => Decoder[Min] - case "mask" => Decoder[Masking] - case "**" => Decoder[Pow] - case "<" => Decoder[Lesser] - case "<=" => Decoder[LesserOrEqual] - case "!=" => Decoder[Unequal] - case "=" => Decoder[Equal] - case ">=" => Decoder[GreaterOrEqual] - case ">" => Decoder[Greater] - case "or" => Decoder[Or] - case "xor" => Decoder[Xor] - case "and" => Decoder[And] - case "atan2" => Decoder[Atan2] - case "ifelse" => Decoder[Branch] - case "classify" => Decoder[Classification] - case "sin" => Decoder[Sin] - case "cos" => Decoder[Cos] - case "tan" => Decoder[Tan] - case "sinh" => Decoder[Sinh] - case "cosh" => Decoder[Cosh] - case "tanh" => Decoder[Tanh] - case "asin" => Decoder[Asin] - case "acos" => Decoder[Acos] - case "atan" => Decoder[Atan] - case "round" => Decoder[Round] - case "floor" => Decoder[Floor] - case "Ceil" => Decoder[Ceil] - case "loge" => Decoder[LogE] - case "log10" => Decoder[Log10] - case "sqrt" => Decoder[SquareRoot] - case "abs" => Decoder[Abs] - case "def" => Decoder[Defined] - case "undef" => Decoder[Undefined] - case "nneg" => Decoder[NumericNegation] - case "lneg" => Decoder[LogicalNegation] - case "fmax" => Decoder[FocalMax] - case "fmin" => Decoder[FocalMin] - case "fmean" => Decoder[FocalMean] - case "fmedian" => Decoder[FocalMedian] - case "fmode" => Decoder[FocalMode] - case "fsum" => Decoder[FocalSum] - case "fstddev" => Decoder[FocalStdDev] - case "fslope" => Decoder[FocalSlope] + cursor._symbol.map { + case "+" => Decoder[Addition] + case "-" => Decoder[Subtraction] + case "*" => Decoder[Multiplication] + case "/" => Decoder[Division] + case "max" => Decoder[Max] + case "min" => Decoder[Min] + case "mask" => Decoder[Masking] + case "**" => Decoder[Pow] + case "<" => Decoder[Lesser] + case "<=" => Decoder[LesserOrEqual] + case "!=" => Decoder[Unequal] + case "=" => Decoder[Equal] + case ">=" => Decoder[GreaterOrEqual] + case ">" => Decoder[Greater] + case "or" => Decoder[Or] + case "xor" => Decoder[Xor] + case "and" => Decoder[And] + case "atan2" => Decoder[Atan2] + case "ifelse" => Decoder[Branch] + case "classify" => Decoder[Classification] + case "sin" => Decoder[Sin] + case "cos" => Decoder[Cos] + case "tan" => Decoder[Tan] + case "sinh" => Decoder[Sinh] + case "cosh" => Decoder[Cosh] + case "tanh" => Decoder[Tanh] + case "asin" => Decoder[Asin] + case "acos" => Decoder[Acos] + case "atan" => Decoder[Atan] + case "round" => Decoder[Round] + case "floor" => Decoder[Floor] + case "Ceil" => Decoder[Ceil] + case "loge" => Decoder[LogE] + case "log10" => Decoder[Log10] + case "sqrt" => Decoder[SquareRoot] + case "abs" => Decoder[Abs] + case "def" => Decoder[Defined] + case "undef" => Decoder[Undefined] + case "nneg" => Decoder[NumericNegation] + case "lneg" => Decoder[LogicalNegation] + case "fmax" => Decoder[FocalMax] + case "fmin" => Decoder[FocalMin] + case "fmean" => Decoder[FocalMean] + case "fmedian" => Decoder[FocalMedian] + case "fmode" => Decoder[FocalMode] + case "fsum" => Decoder[FocalSum] + case "fstddev" => Decoder[FocalStdDev] + case "fslope" => Decoder[FocalSlope] case "fhillshade" => Decoder[FocalHillshade] - case "faspect" => Decoder[FocalAspect] - case "sel" => Decoder[ImageSelect] - case "int" => Decoder[IntLit] - case "intV" => Decoder[IntVar] - case "dbl" => Decoder[DblLit] - case "dblV" => Decoder[DblVar] - case "bool" => Decoder[BoolLit] - case "boolV" => Decoder[BoolVar] - case "geom" => Decoder[GeomLit] - case "geomV" => Decoder[GeomVar] - case "rasterV" => Decoder[RasterVar] - case "sleep" => Decoder[Sleep] - case "rgb" => Decoder[RGB] - case "assemble" => Decoder[Assemble] - case "rescale" => Decoder[Rescale] - case "normalize" => Decoder[Normalize] - case "clamp" => Decoder[Clamp] + case "faspect" => Decoder[FocalAspect] + case "sel" => Decoder[ImageSelect] + case "int" => Decoder[IntLit] + case "intV" => Decoder[IntVar] + case "dbl" => Decoder[DblLit] + case "dblV" => Decoder[DblVar] + case "bool" => Decoder[BoolLit] + case "boolV" => Decoder[BoolVar] + case "geom" => Decoder[GeomLit] + case "geomV" => Decoder[GeomVar] + case "rasterV" => Decoder[RasterVar] + case "sleep" => Decoder[Sleep] + case "rgb" => Decoder[RGB] + case "assemble" => Decoder[Assemble] + case "rescale" => Decoder[Rescale] + case "normalize" => Decoder[Normalize] + case "clamp" => Decoder[Clamp] } match { case Some(decoder) => decoder.widen(cursor) - case None => Left(DecodingFailure(s"No symbol provided for MAML expression", cursor.history)) + case None => Left(DecodingFailure(s"No symbol provided for MAML expression", cursor.history)) } } } diff --git a/shared/src/main/scala/dsl/Literals.scala b/shared/src/main/scala/dsl/Literals.scala index 07a0bc47..fc6f8d72 100644 --- a/shared/src/main/scala/dsl/Literals.scala +++ b/shared/src/main/scala/dsl/Literals.scala @@ -2,7 +2,6 @@ package com.azavea.maml.dsl import com.azavea.maml.ast._ - trait Literals { implicit def intIsIntLiteral(int: Int): IntLit = IntLit(int) implicit def dblIsDoubleLiteral(dbl: Double): DblLit = DblLit(dbl) diff --git a/shared/src/main/scala/dsl/Operations.scala b/shared/src/main/scala/dsl/Operations.scala index f05b4a8b..1302d9b1 100644 --- a/shared/src/main/scala/dsl/Operations.scala +++ b/shared/src/main/scala/dsl/Operations.scala @@ -2,7 +2,6 @@ package com.azavea.maml.dsl import com.azavea.maml.ast._ - trait Operations { implicit class LocalExpressionMethods[Exp <: Expression](e: Exp) { def +(other: Expression) = Addition(List(e, other)) diff --git a/shared/src/main/scala/dsl/package.scala b/shared/src/main/scala/dsl/package.scala index 87ab9432..c7d04bae 100644 --- a/shared/src/main/scala/dsl/package.scala +++ b/shared/src/main/scala/dsl/package.scala @@ -1,4 +1,3 @@ package com.azavea.maml - package object dsl extends Literals with Operations diff --git a/shared/src/main/scala/error/MamlError.scala b/shared/src/main/scala/error/MamlError.scala index a72e90d5..62ee9892 100644 --- a/shared/src/main/scala/error/MamlError.scala +++ b/shared/src/main/scala/error/MamlError.scala @@ -5,24 +5,28 @@ import com.azavea.maml.ast._ import io.circe._ import io.circe.syntax._ - -/** Custom, MAML-specific errors */ +/** + * Custom, MAML-specific errors + */ trait MamlError { def repr: String } - object MamlError { implicit val encodeMamlError: Encoder[MamlError] = Encoder.encodeString.contramap[MamlError](_.repr) } -/** Error to which signifies that a nodes aregument count is incorrect */ +/** + * Error to which signifies that a nodes aregument count is incorrect + */ case class IncorrectArgCount(exp: Expression, expectedArgs: Int) extends MamlError { def repr = s"Expected $expectedArgs arguments to ${exp}; instead, found ${exp.children.size}" } -/** Error to use when an unhandled node is encountered during evaluation */ +/** + * Error to use when an unhandled node is encountered during evaluation + */ case class UnhandledCase(exp: Expression, kind: MamlKind) extends MamlError { def repr = s"A branch of Interpreter logic has yet to be implemented for the expression ${exp} and the kind $kind" } diff --git a/shared/src/main/scala/error/package.scala b/shared/src/main/scala/error/package.scala index 69068cfc..5b9349c7 100644 --- a/shared/src/main/scala/error/package.scala +++ b/shared/src/main/scala/error/package.scala @@ -3,7 +3,6 @@ package com.azavea.maml import cats.data._ import cats.data.Validated._ - package object error { type Interpreted[A] = ValidatedNel[MamlError, A] } diff --git a/shared/src/main/scala/util/ClassMap.scala b/shared/src/main/scala/util/ClassMap.scala index 9f556b51..9f65da07 100644 --- a/shared/src/main/scala/util/ClassMap.scala +++ b/shared/src/main/scala/util/ClassMap.scala @@ -6,7 +6,6 @@ import io.circe._ import io.circe.syntax._ import io.circe.generic.JsonCodec - trait ClassBoundaryType case object LessThan extends ClassBoundaryType case object LessThanOrEqualTo extends ClassBoundaryType @@ -14,7 +13,6 @@ case object Exact extends ClassBoundaryType case object GreaterThanOrEqualTo extends ClassBoundaryType case object GreaterThan extends ClassBoundaryType - @JsonCodec case class ClassMap( classifications: Map[Double, Int] diff --git a/shared/src/main/scala/util/ColorRamp.scala b/shared/src/main/scala/util/ColorRamp.scala index 30fce15b..65a27e71 100644 --- a/shared/src/main/scala/util/ColorRamp.scala +++ b/shared/src/main/scala/util/ColorRamp.scala @@ -1,5 +1,3 @@ package com.azavea.maml.util - case class ColorRamp(colors: Vector[Int]) - diff --git a/shared/src/main/scala/util/Geometry.scala b/shared/src/main/scala/util/Geometry.scala index 11b05dbc..1da42703 100644 --- a/shared/src/main/scala/util/Geometry.scala +++ b/shared/src/main/scala/util/Geometry.scala @@ -2,7 +2,6 @@ package com.azavea.maml.util import io.circe.generic.JsonCodec - @JsonCodec case class MamlPoint(x: Double, y: Double) @@ -11,4 +10,3 @@ case class MamlPolygon(points: Array[MamlPoint]) @JsonCodec case class MamlMultiPolygon(polygons: Array[MamlPolygon]) - diff --git a/shared/src/main/scala/util/Histogram.scala b/shared/src/main/scala/util/Histogram.scala index 2890a7d0..b57da389 100644 --- a/shared/src/main/scala/util/Histogram.scala +++ b/shared/src/main/scala/util/Histogram.scala @@ -1,5 +1,3 @@ package com.azavea.maml.util - case class Histogram(counts: Map[Double, Int]) - diff --git a/shared/src/main/scala/util/Neighborhood.scala b/shared/src/main/scala/util/Neighborhood.scala index 465b78a2..a11157de 100644 --- a/shared/src/main/scala/util/Neighborhood.scala +++ b/shared/src/main/scala/util/Neighborhood.scala @@ -1,10 +1,8 @@ package com.azavea.maml.util - trait Neighborhood case class Square(extent: Int) extends Neighborhood case class Circle(radius: Double) extends Neighborhood case class Nesw(extent: Int) extends Neighborhood case class Wedge(radius: Double, startAngle: Double, endAngle: Double) extends Neighborhood case class Annulus(innerRadius: Double, outerRadius: Double) extends Neighborhood - diff --git a/shared/src/test/scala/ast/Generators.scala b/shared/src/test/scala/ast/Generators.scala index b2f42a0f..d59547c2 100644 --- a/shared/src/test/scala/ast/Generators.scala +++ b/shared/src/test/scala/ast/Generators.scala @@ -17,19 +17,21 @@ object Generators { } yield src def genBinaryOpAST(depth: Int) = for { - constructor <- Gen.lzy(Gen.oneOf( - Addition.apply _, - Subtraction.apply _, - Multiplication.apply _, - Division.apply _, - Max.apply _, - Min.apply _, - Lesser.apply _, - LesserOrEqual.apply _, - Equal.apply _, - GreaterOrEqual.apply _, - Greater.apply _ - )) + constructor <- Gen.lzy( + Gen.oneOf( + Addition.apply _, + Subtraction.apply _, + Multiplication.apply _, + Division.apply _, + Max.apply _, + Min.apply _, + Lesser.apply _, + LesserOrEqual.apply _, + Equal.apply _, + GreaterOrEqual.apply _, + Greater.apply _ + ) + ) args <- containerOfN[List, Expression](2, genExpression(depth)) } yield constructor(args) @@ -42,30 +44,32 @@ object Generators { } yield Masking(args) def genFocalOpAST(depth: Int) = for { - constructor <- Gen.lzy(Gen.oneOf( - FocalMax.apply _, - FocalMin.apply _, - FocalStdDev.apply _, - FocalMean.apply _, - FocalMedian.apply _, - FocalMode.apply _, - FocalSum.apply _ - )) - args <- containerOfN[List, Expression](1, genExpression(depth)) + constructor <- Gen.lzy( + Gen.oneOf( + FocalMax.apply _, + FocalMin.apply _, + FocalStdDev.apply _, + FocalMean.apply _, + FocalMedian.apply _, + FocalMode.apply _, + FocalSum.apply _ + ) + ) + args <- containerOfN[List, Expression](1, genExpression(depth)) neighborhood <- Gen.oneOf( - Square(123), - Circle(123.4), - Nesw(123), - Wedge(42.2, 45.1, 51.3), - Annulus(123.0, 123.4) - ) + Square(123), + Circle(123.4), + Nesw(123), + Wedge(42.2, 45.1, 51.3), + Annulus(123.0, 123.4) + ) } yield constructor(args, neighborhood, TargetCell.All) def genOpAST(depth: Int) = Gen.frequency( - (5 -> genBinaryOpAST(depth)), - (2 -> genMaskingAST(depth)), - (1 -> genClassificationAST(depth)), - (2 -> genFocalOpAST(depth)) + 5 -> genBinaryOpAST(depth), + 2 -> genMaskingAST(depth), + 1 -> genClassificationAST(depth), + 2 -> genFocalOpAST(depth) ) /* We are forced to manually control flow in this generator to prevent stack overflows @@ -73,5 +77,5 @@ object Generators { */ def genExpression(depth: Int = 1): Gen[Expression] = if (depth >= 100) genScalarSourceAST - else Gen.frequency((1 -> genBinaryOpAST(depth + 1)), (1 -> genScalarSourceAST)) + else Gen.frequency(1 -> genBinaryOpAST(depth + 1), 1 -> genScalarSourceAST) } diff --git a/shared/src/test/scala/ast/codec/tree/MamlExpressionTreeCodecSpec.scala b/shared/src/test/scala/ast/codec/tree/MamlExpressionTreeCodecSpec.scala index 9d614c9a..ddfc0ace 100644 --- a/shared/src/test/scala/ast/codec/tree/MamlExpressionTreeCodecSpec.scala +++ b/shared/src/test/scala/ast/codec/tree/MamlExpressionTreeCodecSpec.scala @@ -10,13 +10,13 @@ import cats.syntax.either._ import org.scalacheck.Prop.forAll import org.scalatest._ import org.scalatest.prop._ +import propspec._ - -class ExpressionTreeCodecSpec extends PropSpec with Checkers with ExpressionTreeCodec { +class ExpressionTreeCodecSpec extends AnyPropSpec with ExpressionTreeCodec { @transient private[this] lazy val logger = getLogger property("bijective serialization on whole tree") { - check(forAll(Generators.genExpression()) { (ast: Expression) => + forAll(Generators.genExpression()) { (ast: Expression) => logger.debug(s"Attempting to encode AST: $ast") val encoded = ast.asJson.noSpaces logger.debug(s"Encoded AST: $encoded") @@ -29,6 +29,6 @@ class ExpressionTreeCodecSpec extends PropSpec with Checkers with ExpressionTree logger.debug(f)(f.toString) fail(f) } - }) + } } } diff --git a/shared/src/test/scala/ast/kind/KindSpec.scala b/shared/src/test/scala/ast/kind/KindSpec.scala index cfa5750e..6f7a8d83 100644 --- a/shared/src/test/scala/ast/kind/KindSpec.scala +++ b/shared/src/test/scala/ast/kind/KindSpec.scala @@ -4,37 +4,43 @@ import com.azavea.maml.ast._ import com.azavea.maml.util._ import org.scalatest._ +import org.scalatest.funspec.AnyFunSpec +import org.scalatest.matchers.should.Matchers -class KindSpec extends FunSpec with Matchers { +class KindSpec extends AnyFunSpec with Matchers { it("Typecheck a valid focal tree") { FocalMax(List(RasterVar("test")), Square(1)) } it("Should correctly determine the output type for a foldable operation (image)") { - Max(List(IntLit(42), RasterVar("test"), IntLit(51))).kind should be (MamlKind.Image) + Max(List(IntLit(42), RasterVar("test"), IntLit(51))).kind should be(MamlKind.Image) } it("Should correctly determine the output type for a foldable operation (scalar)") { - Max(List(IntLit(42), IntLit(51))).kind should be (MamlKind.Int) + Max(List(IntLit(42), IntLit(51))).kind should be(MamlKind.Int) } it("Should correctly determine RGB output type for a foldable operation (scalar)") { - RGB(List(RasterVar("test1"), RasterVar("test2"), RasterVar("test3"))).kind should be (MamlKind.Image) + RGB(List(RasterVar("test1"), RasterVar("test2"), RasterVar("test3"))).kind should be(MamlKind.Image) } it("Should correctly determine RGB with Rescale output type for a foldable operation (scalar)") { - RGB(List(Rescale(RasterVar("test1") :: Nil, 10, 20), Rescale(RasterVar("test2") :: Nil, 30, 40), Rescale(RasterVar("test3") :: Nil, 50, 60))).kind should be (MamlKind.Image) + RGB( + List(Rescale(RasterVar("test1") :: Nil, 10, 20), Rescale(RasterVar("test2") :: Nil, 30, 40), Rescale(RasterVar("test3") :: Nil, 50, 60)) + ).kind should be(MamlKind.Image) } it("Should correctly determine RGB with Clamp output type for a foldable operation (scalar)") { - RGB(List( - Rescale(Addition(Clamp(RasterVar("test1") :: Nil, 10, 20) :: DblLit(15D) :: Nil) :: Nil, 10, 20), - Rescale(Addition(Clamp(RasterVar("test2") :: Nil, 10, 20) :: DblLit(15D) :: Nil) :: Nil, 30, 40), - Rescale(Addition(Clamp(RasterVar("test3") :: Nil, 10, 20) :: DblLit(15D) :: Nil) :: Nil, 50, 60) - )).kind should be (MamlKind.Image) + RGB( + List( + Rescale(Addition(Clamp(RasterVar("test1") :: Nil, 10, 20) :: DblLit(15d) :: Nil) :: Nil, 10, 20), + Rescale(Addition(Clamp(RasterVar("test2") :: Nil, 10, 20) :: DblLit(15d) :: Nil) :: Nil, 30, 40), + Rescale(Addition(Clamp(RasterVar("test3") :: Nil, 10, 20) :: DblLit(15d) :: Nil) :: Nil, 50, 60) + ) + ).kind should be(MamlKind.Image) } it("Should correctly determine Assemble output type for a foldable operation") { - Assemble(List(RasterVar("test1"), RasterVar("test2"), RasterVar("test3"))).kind should be (MamlKind.Image) + Assemble(List(RasterVar("test1"), RasterVar("test2"), RasterVar("test3"))).kind should be(MamlKind.Image) } } diff --git a/spark/build.sbt b/spark/build.sbt deleted file mode 100644 index 6c54de28..00000000 --- a/spark/build.sbt +++ /dev/null @@ -1,5 +0,0 @@ -name := "maml-spark" - -fork in Test := false - -parallelExecution in Test := false diff --git a/spark/src/main/scala/eval/RDDInterpreter.scala b/spark/src/main/scala/eval/RDDInterpreter.scala index 5b4ee852..9b1a82bb 100644 --- a/spark/src/main/scala/eval/RDDInterpreter.scala +++ b/spark/src/main/scala/eval/RDDInterpreter.scala @@ -12,7 +12,6 @@ import cats.data.{NonEmptyList => NEL, _} import scala.reflect.ClassTag - object RDDInterpreter { def DEFAULT = NaiveInterpreter( diff --git a/spark/src/main/scala/eval/RDDResult.scala b/spark/src/main/scala/eval/RDDResult.scala index e05b8803..eb667851 100644 --- a/spark/src/main/scala/eval/RDDResult.scala +++ b/spark/src/main/scala/eval/RDDResult.scala @@ -13,11 +13,10 @@ import geotrellis.layer._ import scala.reflect.ClassTag - case class RDDResult(res: TileLayerRDD[SpatialKey]) extends Result { def as[T](implicit ct: ClassTag[T]): Interpreted[T] = { val cls = ct.runtimeClass - if (classOf[ContextRDD[SpatialKey, Tile, TileLayerMetadata[SpatialKey]]] isAssignableFrom cls) + if (classOf[ContextRDD[SpatialKey, Tile, TileLayerMetadata[SpatialKey]]].isAssignableFrom(cls)) Valid(res.asInstanceOf[T]) else Invalid(NEL.of(DivergingTypes(cls.getName, List("SpatialRDD")))) diff --git a/spark/src/main/scala/eval/directive/RDDOpDirectives.scala b/spark/src/main/scala/eval/directive/RDDOpDirectives.scala index d3ff4338..2a27bece 100644 --- a/spark/src/main/scala/eval/directive/RDDOpDirectives.scala +++ b/spark/src/main/scala/eval/directive/RDDOpDirectives.scala @@ -21,7 +21,6 @@ import geotrellis.vector._ import org.apache.spark.rdd._ - object RDDOpDirectives { private def doubleResults(grouped: Map[MamlKind, Seq[Result]]): Interpreted[List[Double]] = grouped.getOrElse(MamlKind.Double, List.empty).map(_.as[Double]).toList.sequence @@ -32,11 +31,15 @@ object RDDOpDirectives { private def spatialRDDResults(grouped: Map[MamlKind, Seq[Result]]): Interpreted[List[TileLayerRDD[SpatialKey]]] = grouped(MamlKind.Image).map(_.as[TileLayerRDD[SpatialKey]]).toList.sequence - /** Some sugar to wrap a common pattern. */ - def unary(f: RDD[(SpatialKey,Tile)] => RDD[(SpatialKey,Tile)], r: TileLayerRDD[SpatialKey]): Interpreted[Result] = + /** + * Some sugar to wrap a common pattern. + */ + def unary(f: RDD[(SpatialKey, Tile)] => RDD[(SpatialKey, Tile)], r: TileLayerRDD[SpatialKey]): Interpreted[Result] = Valid(RDDResult(r.withContext(f(_)))) - /** Perform a binary operation on RDDs, while preserving any metadata they had. */ + /** + * Perform a binary operation on RDDs, while preserving any metadata they had. + */ private def binary( fn: (RDD[(SpatialKey, Tile)], RDD[(SpatialKey, Tile)]) => RDD[(SpatialKey, Tile)], rdd1: TileLayerRDD[SpatialKey], @@ -45,9 +48,9 @@ object RDDOpDirectives { TileLayerRDD(fn(rdd1, rdd2), rdd1.metadata.combine(rdd2.metadata)) } - /** No, `ri` and `ir` are not the same thing, since order is significant for - * subtraction and division. - */ + /** + * No, `ri` and `ir` are not the same thing, since order is significant for subtraction and division. + */ private def reduce( ri: (RDD[(SpatialKey, Tile)], Int) => RDD[(SpatialKey, Tile)], ir: (Int, RDD[(SpatialKey, Tile)]) => RDD[(SpatialKey, Tile)], @@ -57,54 +60,56 @@ object RDDOpDirectives { res1: Result, res2: Result ): Result = (res1, res2) match { - case (RDDResult(r1), RDDResult(r2)) => RDDResult(binary(rr, r1, r2)) - case (RDDResult(rdd), IntResult(int)) => RDDResult(rdd.withContext(ri(_, int))) - case (IntResult(int), RDDResult(rdd)) => RDDResult(rdd.withContext(ir(int, _))) + case (RDDResult(r1), RDDResult(r2)) => RDDResult(binary(rr, r1, r2)) + case (RDDResult(rdd), IntResult(int)) => RDDResult(rdd.withContext(ri(_, int))) + case (IntResult(int), RDDResult(rdd)) => RDDResult(rdd.withContext(ir(int, _))) case (RDDResult(rdd), DoubleResult(double)) => RDDResult(rdd.withContext(rd(_, double))) case (DoubleResult(double), RDDResult(rdd)) => RDDResult(rdd.withContext(dr(double, _))) } - /** Sugar for a common pattern with the unary math operations. */ + /** + * Sugar for a common pattern with the unary math operations. + */ private def mathy( fr: RDD[(SpatialKey, Tile)] => RDD[(SpatialKey, Tile)], fi: Int => Result, fd: Double => Result, res: Result ): Interpreted[Result] = res match { - case RDDResult(r) => unary(fr, r) - case IntResult(i) => Valid(fi(i)) + case RDDResult(r) => unary(fr, r) + case IntResult(i) => Valid(fi(i)) case DoubleResult(d) => Valid(fd(d)) } /* --- FOLDABLE EXPRESSIONS --- */ - val addition = Directive { case (a@Addition(_), childResults) => + val addition = Directive { case (a @ Addition(_), childResults) => val results: Result = childResults.reduce { (res1, res2) => - reduce({_ + _}, {_ +: _}, {_ + _}, {_ +: _}, {_ + _}, res1, res2) + reduce({ _ + _ }, { _ +: _ }, { _ + _ }, { _ +: _ }, { _ + _ }, res1, res2) } Valid(results) } - val subtraction = Directive { case (a@Subtraction(_), childResults) => + val subtraction = Directive { case (a @ Subtraction(_), childResults) => val results: Result = childResults.reduce { (res1, res2) => - reduce({_ - _}, {_ -: _}, {_ - _}, {_ -: _}, {_ - _}, res1, res2) + reduce({ _ - _ }, { _ -: _ }, { _ - _ }, { _ -: _ }, { _ - _ }, res1, res2) } Valid(results) } - val multiplication = Directive { case (a@Multiplication(_), childResults) => + val multiplication = Directive { case (a @ Multiplication(_), childResults) => val results: Result = childResults.reduce { (res1, res2) => - reduce({_ * _}, {_ *: _}, {_ * _}, {_ *: _}, {_ * _}, res1, res2) + reduce({ _ * _ }, { _ *: _ }, { _ * _ }, { _ *: _ }, { _ * _ }, res1, res2) } Valid(results) } - val division = Directive { case (a@Division(_), childResults) => + val division = Directive { case (a @ Division(_), childResults) => val results: Result = childResults.reduce { (res1, res2) => - reduce({_ / _}, {_ /: _}, {_ / _}, {_ /: _}, {_ / _}, res1, res2) + reduce({ _ / _ }, { _ /: _ }, { _ / _ }, { _ /: _ }, { _ / _ }, res1, res2) } Valid(results) @@ -112,7 +117,7 @@ object RDDOpDirectives { val max = Directive { case (Max(_), childResults) => val results: Result = childResults.reduce { (res1, res2) => - reduce({_.localMax(_)}, {(i,r) => r.localMax(i)}, {_.localMax(_)}, {(d,r) => r.localMax(d)}, {_.localMax(_)}, res1, res2) + reduce({ _.localMax(_) }, { (i, r) => r.localMax(i) }, { _.localMax(_) }, { (d, r) => r.localMax(d) }, { _.localMax(_) }, res1, res2) } Valid(results) @@ -120,7 +125,7 @@ object RDDOpDirectives { val min = Directive { case (Min(_), childResults) => val results: Result = childResults.reduce { (res1, res2) => - reduce({_.localMin(_)}, {(i,r) => r.localMin(i)}, {_.localMin(_)}, {(d,r) => r.localMin(d)}, {_.localMin(_)}, res1, res2) + reduce({ _.localMin(_) }, { (i, r) => r.localMin(i) }, { _.localMin(_) }, { (d, r) => r.localMin(d) }, { _.localMin(_) }, res1, res2) } Valid(results) @@ -128,7 +133,7 @@ object RDDOpDirectives { val or = Directive { case (Or(_), childResults) => val results: Result = childResults.reduce { (res1, res2) => - reduce({_ | _}, {_ |: _}, {_ | d2i(_)}, {d2i(_) |: _}, {_ | _}, res1, res2) + reduce({ _ | _ }, { _ |: _ }, { _ | d2i(_) }, { d2i(_) |: _ }, { _ | _ }, res1, res2) } Valid(results) @@ -136,7 +141,7 @@ object RDDOpDirectives { val and = Directive { case (And(_), childResults) => val results: Result = childResults.reduce { (res1, res2) => - reduce({_ & _}, {_ &: _}, {_ & d2i(_)}, {d2i(_) &: _}, {_ localAnd _}, res1, res2) + reduce({ _ & _ }, { _ &: _ }, { _ & d2i(_) }, { d2i(_) &: _ }, { _ localAnd _ }, res1, res2) } Valid(results) @@ -148,7 +153,7 @@ object RDDOpDirectives { } val results: Result = childResults.reduce { (res1, res2) => - reduce({_ ^ _}, {_ ^: _}, {_ ^ d2i(_)}, {d2i(_) ^: _}, rr, res1, res2) + reduce({ _ ^ _ }, { _ ^: _ }, { _ ^ d2i(_) }, { d2i(_) ^: _ }, rr, res1, res2) } Valid(results) @@ -158,43 +163,44 @@ object RDDOpDirectives { val equalTo = Directive { case (Equal(_), res1 :: res2 :: Nil) => val results: Result = - reduce( - {_.localEqual(_)}, {(i,r) => r.localEqual(i)}, - {_.localEqual(_)}, {(d,r) => r.localEqual(d)}, - {_.localEqual(_)}, res1, res2) + reduce({ _.localEqual(_) }, { (i, r) => r.localEqual(i) }, { _.localEqual(_) }, { (d, r) => r.localEqual(d) }, { _.localEqual(_) }, res1, res2) Valid(results) } val unequalTo = Directive { case (Unequal(_), res1 :: res2 :: Nil) => val results: Result = - reduce( - {_.localUnequal(_)}, {(i,r) => r.localUnequal(i)}, - {_.localUnequal(_)}, {(d,r) => r.localUnequal(d)}, - {_.localUnequal(_)}, res1, res2) + reduce({ _.localUnequal(_) }, + { (i, r) => r.localUnequal(i) }, + { _.localUnequal(_) }, + { (d, r) => r.localUnequal(d) }, + { _.localUnequal(_) }, + res1, + res2 + ) Valid(results) } val lessThan = Directive { case (Lesser(_), res1 :: res2 :: Nil) => - Valid(reduce({_ < _}, {_ <<: _}, {_ < _}, {_ <<: _}, {_ < _}, res1, res2)) + Valid(reduce({ _ < _ }, { _ <<: _ }, { _ < _ }, { _ <<: _ }, { _ < _ }, res1, res2)) } val lessThanOrEqualTo = Directive { case (LesserOrEqual(_), res1 :: res2 :: Nil) => - Valid(reduce({_ <= _}, {_ <=: _}, {_ <= _}, {_ <=: _}, {_ <= _}, res1, res2)) + Valid(reduce({ _ <= _ }, { _ <=: _ }, { _ <= _ }, { _ <=: _ }, { _ <= _ }, res1, res2)) } val greaterThan = Directive { case (Greater(_), res1 :: res2 :: Nil) => - Valid(reduce({_ > _}, {_ >>: _}, {_ > _}, {_ >>: _}, {_ > _}, res1, res2)) + Valid(reduce({ _ > _ }, { _ >>: _ }, { _ > _ }, { _ >>: _ }, { _ > _ }, res1, res2)) } val greaterThanOrEqualTo = Directive { case (GreaterOrEqual(_), res1 :: res2 :: Nil) => - Valid(reduce({_ >= _}, {_ >=: _}, {_ >= _}, {_ >=: _}, {_ >= _}, res1, res2)) + Valid(reduce({ _ >= _ }, { _ >=: _ }, { _ >= _ }, { _ >=: _ }, { _ >= _ }, res1, res2)) } val masking = Directive { - case (Masking(_), RDDResult(r) :: GeomResult(g: Polygon) :: Nil) => Valid(RDDResult(r.mask(g))) - case (Masking(_), GeomResult(g: Polygon) :: RDDResult(r) :: Nil) => Valid(RDDResult(r.mask(g))) + case (Masking(_), RDDResult(r) :: GeomResult(g: Polygon) :: Nil) => Valid(RDDResult(r.mask(g))) + case (Masking(_), GeomResult(g: Polygon) :: RDDResult(r) :: Nil) => Valid(RDDResult(r.mask(g))) case (Masking(_), RDDResult(r) :: GeomResult(g: MultiPolygon) :: Nil) => Valid(RDDResult(r.mask(g))) case (Masking(_), GeomResult(g: MultiPolygon) :: RDDResult(r) :: Nil) => Valid(RDDResult(r.mask(g))) } @@ -210,9 +216,9 @@ object RDDOpDirectives { val results: Result = childResults match { case (RDDResult(r1) :: RDDResult(r2) :: Nil) => - RDDResult(binary({ (a,b) => a.combineValues(b) { (t1,t2) => t1.combineDouble(t2)(math.atan2) }}, r1, r2)) - case (RDDResult(r) :: IntResult(i) :: Nil) => RDDResult(ri(r, i)) - case (IntResult(i) :: RDDResult(r) :: Nil) => RDDResult(ri(r, i)) + RDDResult(binary({ (a, b) => a.combineValues(b) { (t1, t2) => t1.combineDouble(t2)(math.atan2) } }, r1, r2)) + case (RDDResult(r) :: IntResult(i) :: Nil) => RDDResult(ri(r, i)) + case (IntResult(i) :: RDDResult(r) :: Nil) => RDDResult(ri(r, i)) case (RDDResult(r) :: DoubleResult(d) :: Nil) => RDDResult(rd(r, d)) case (DoubleResult(d) :: RDDResult(r) :: Nil) => RDDResult(rd(r, d)) } @@ -221,11 +227,11 @@ object RDDOpDirectives { } val pow = Directive { case (Pow(_), res1 :: res2 :: Nil) => - val f: (RDD[(SpatialKey, Tile)], RDD[(SpatialKey, Tile)]) => RDD[(SpatialKey, Tile)] = { (a,b) => - a.combineValues(b) { (t1,t2) => t1.combineDouble(t2)(math.pow) } + val f: (RDD[(SpatialKey, Tile)], RDD[(SpatialKey, Tile)]) => RDD[(SpatialKey, Tile)] = { (a, b) => + a.combineValues(b) { (t1, t2) => t1.combineDouble(t2)(math.pow) } } - Valid(reduce({_ localPow _}, { (i,r) => r.localPow(i) }, { _ localPow _ }, { (d,r) => r.localPow(d) }, f, res1, res2)) + Valid(reduce({ _ localPow _ }, { (i, r) => r.localPow(i) }, { _ localPow _ }, { (d, r) => r.localPow(d) }, f, res1, res2)) } /* --- UNARY EXPRESSIONS --- */ @@ -235,33 +241,33 @@ object RDDOpDirectives { } val sin = Directive { case (Sin(_), res :: Nil) => - mathy({_.localMapDouble(math.sin(_))}, { i => DoubleResult(math.sin(i.toDouble)) }, { d => DoubleResult(math.sin(d)) }, res) + mathy({ _.localMapDouble(math.sin(_)) }, { i => DoubleResult(math.sin(i.toDouble)) }, { d => DoubleResult(math.sin(d)) }, res) } val cos = Directive { case (Cos(_), res :: Nil) => - mathy({_.localMapDouble(math.cos(_))}, { i => DoubleResult(math.cos(i.toDouble)) }, { d => DoubleResult(math.cos(d)) }, res) + mathy({ _.localMapDouble(math.cos(_)) }, { i => DoubleResult(math.cos(i.toDouble)) }, { d => DoubleResult(math.cos(d)) }, res) } val tan = Directive { case (Tan(_), res :: Nil) => - mathy({_.localMapDouble(math.tan(_))}, { i => DoubleResult(math.tan(i.toDouble)) }, { d => DoubleResult(math.tan(d)) }, res) + mathy({ _.localMapDouble(math.tan(_)) }, { i => DoubleResult(math.tan(i.toDouble)) }, { d => DoubleResult(math.tan(d)) }, res) } val sinh = Directive { case (Sinh(_), res :: Nil) => - mathy({_.localMapDouble(math.sinh(_))}, { i => DoubleResult(math.sinh(i.toDouble)) }, { d => DoubleResult(math.sinh(d)) }, res) + mathy({ _.localMapDouble(math.sinh(_)) }, { i => DoubleResult(math.sinh(i.toDouble)) }, { d => DoubleResult(math.sinh(d)) }, res) } val cosh = Directive { case (Cosh(_), res :: Nil) => - mathy({_.localMapDouble(math.cosh(_))}, { i => DoubleResult(math.cosh(i.toDouble)) }, { d => DoubleResult(math.cosh(d)) }, res) + mathy({ _.localMapDouble(math.cosh(_)) }, { i => DoubleResult(math.cosh(i.toDouble)) }, { d => DoubleResult(math.cosh(d)) }, res) } val tanh = Directive { case (Tanh(_), res :: Nil) => - mathy({_.localMapDouble(math.tanh(_))}, { i => DoubleResult(math.tanh(i.toDouble)) }, { d => DoubleResult(math.tanh(d)) }, res) + mathy({ _.localMapDouble(math.tanh(_)) }, { i => DoubleResult(math.tanh(i.toDouble)) }, { d => DoubleResult(math.tanh(d)) }, res) } val asin = Directive { case (Asin(_), res :: Nil) => - mathy({_.localMapDouble(math.asin(_))}, { i => DoubleResult(math.asin(i.toDouble)) }, { d => DoubleResult(math.asin(d)) }, res) + mathy({ _.localMapDouble(math.asin(_)) }, { i => DoubleResult(math.asin(i.toDouble)) }, { d => DoubleResult(math.asin(d)) }, res) } val acos = Directive { case (Acos(_), res :: Nil) => - mathy({_.localMapDouble(math.acos(_))}, { i => DoubleResult(math.acos(i.toDouble)) }, { d => DoubleResult(math.acos(d)) }, res) + mathy({ _.localMapDouble(math.acos(_)) }, { i => DoubleResult(math.acos(i.toDouble)) }, { d => DoubleResult(math.acos(d)) }, res) } val atan = Directive { case (Atan(_), res :: Nil) => - mathy({_.localMapDouble(math.atan(_))}, { i => DoubleResult(math.atan(i.toDouble)) }, { d => DoubleResult(math.atan(d)) }, res) + mathy({ _.localMapDouble(math.atan(_)) }, { i => DoubleResult(math.atan(i.toDouble)) }, { d => DoubleResult(math.atan(d)) }, res) } val floor = Directive { case (Floor(_), res :: Nil) => @@ -271,7 +277,7 @@ object RDDOpDirectives { mathy({ _.localCeil }, { IntResult(_) }, { d => IntResult(math.ceil(d).toInt) }, res) } - val loge = Directive { case (LogE(_), res :: Nil) => + val loge = Directive { case (LogE(_), res :: Nil) => mathy({ _.localLog }, { i => DoubleResult(math.log(i)) }, { d => DoubleResult(math.log(d)) }, res) } val log10 = Directive { case (Log10(_), res :: Nil) => diff --git a/spark/src/main/scala/eval/directive/RDDSourceDirectives.scala b/spark/src/main/scala/eval/directive/RDDSourceDirectives.scala index 67fd1256..5bba9f17 100644 --- a/spark/src/main/scala/eval/directive/RDDSourceDirectives.scala +++ b/spark/src/main/scala/eval/directive/RDDSourceDirectives.scala @@ -13,15 +13,12 @@ import cats.data.Validated._ import scala.util.Try - object RDDSourceDirectives { - val rddLiteral = Directive { - case (rl@RasterLit(rdd), _) => - Try(rdd.asInstanceOf[TileLayerRDD[SpatialKey]]).toOption match { - case Some(rasterRdd) => Valid(RDDResult(rasterRdd)) - case None => - Invalid(NEL.of(NonEvaluableNode(rl, Some("Unable to treat raster literal contents as type TileLayerRDD[SpatialKey]]")))) - } + val rddLiteral = Directive { case (rl @ RasterLit(rdd), _) => + Try(rdd.asInstanceOf[TileLayerRDD[SpatialKey]]).toOption match { + case Some(rasterRdd) => Valid(RDDResult(rasterRdd)) + case None => + Invalid(NEL.of(NonEvaluableNode(rl, Some("Unable to treat raster literal contents as type TileLayerRDD[SpatialKey]]")))) + } } } - diff --git a/spark/src/test/scala/RDDOpDirectivesSpec.scala b/spark/src/test/scala/RDDOpDirectivesSpec.scala index bf7940ef..47da4370 100644 --- a/spark/src/test/scala/RDDOpDirectivesSpec.scala +++ b/spark/src/test/scala/RDDOpDirectivesSpec.scala @@ -15,6 +15,8 @@ import cats._ import cats.data.{NonEmptyList => NEL, _} import cats.data.Validated._ import org.scalatest._ +import org.scalatest.funspec.AnyFunSpec +import org.scalatest.matchers.should.Matchers import geotrellis.raster._ import geotrellis.spark._ @@ -23,17 +25,14 @@ import geotrellis.spark.testkit._ import scala.reflect._ - -class RDDOpDirectivesSpec extends FunSpec - with Matchers - with TestEnvironment { +class RDDOpDirectivesSpec extends AnyFunSpec with Matchers with TestEnvironment { val interpreter = RDDInterpreter.DEFAULT implicit class TypeRefinement(self: Interpreted[Result]) { def as[T: ClassTag]: Interpreted[T] = self match { - case Valid(r) => r.as[T] - case i@Invalid(_) => i + case Valid(r) => r.as[T] + case i @ Invalid(_) => i } } @@ -42,8 +41,8 @@ class RDDOpDirectivesSpec extends FunSpec it("Should interpret and evaluate spatial RDDs") { interpreter(RasterLit(rdd)).as[ContextRDD[SpatialKey, Tile, TileLayerMetadata[SpatialKey]]] match { - case Valid(result) => rastersEqual(rdd, result) - case i@Invalid(_) => fail(s"$i") + case Valid(result) => rastersEqual(rdd, result) + case i @ Invalid(_) => fail(s"$i") } } @@ -54,9 +53,9 @@ class RDDOpDirectivesSpec extends FunSpec } interpreter(RasterLit(rdd) + RasterLit(rdd)).as[ContextRDD[SpatialKey, Tile, TileLayerMetadata[SpatialKey]]] match { - case Valid(result) => rastersEqual(result, expected) - case i@Invalid(_) => println(s"$i") - } + case Valid(result) => rastersEqual(result, expected) + case i @ Invalid(_) => println(s"$i") + } } it("Should add an int and an spatial RDD together") { @@ -64,9 +63,9 @@ class RDDOpDirectivesSpec extends FunSpec createTileLayerRDD(IntArrayTile(1 until 11 toArray, 2, 5), TileLayout(1, 1, 2, 5)) interpreter(IntLit(1) + RasterLit(rdd)).as[ContextRDD[SpatialKey, Tile, TileLayerMetadata[SpatialKey]]] match { - case Valid(result) => rastersEqual(result, expected) - case i@Invalid(_) => println(s"$i") - } + case Valid(result) => rastersEqual(result, expected) + case i @ Invalid(_) => println(s"$i") + } } it("Should add a double and an spatial RDD together") { @@ -76,18 +75,18 @@ class RDDOpDirectivesSpec extends FunSpec } interpreter(DblLit(1.0) + RasterLit(rdd)).as[ContextRDD[SpatialKey, Tile, TileLayerMetadata[SpatialKey]]] match { - case Valid(result) => rastersEqual(result, expected) - case i@Invalid(_) => println(s"$i") - } + case Valid(result) => rastersEqual(result, expected) + case i @ Invalid(_) => println(s"$i") + } } it("Should subtract two spatial RDDs") { val expected = createTileLayerRDD(IntArrayTile.fill(0, 2, 5), TileLayout(1, 1, 2, 5)) interpreter(RasterLit(rdd) - RasterLit(rdd)).as[ContextRDD[SpatialKey, Tile, TileLayerMetadata[SpatialKey]]] match { - case Valid(result) => rastersEqual(result, expected) - case i@Invalid(_) => println(s"$i") - } + case Valid(result) => rastersEqual(result, expected) + case i @ Invalid(_) => println(s"$i") + } } it("Should subtract an int from the spatial RDD") { @@ -97,9 +96,9 @@ class RDDOpDirectivesSpec extends FunSpec } interpreter(RasterLit(rdd) - IntLit(1)).as[ContextRDD[SpatialKey, Tile, TileLayerMetadata[SpatialKey]]] match { - case Valid(result) => rastersEqual(result, expected) - case i@Invalid(_) => println(s"$i") - } + case Valid(result) => rastersEqual(result, expected) + case i @ Invalid(_) => println(s"$i") + } } it("Should subtract an spatial RDD from a double") { @@ -109,9 +108,9 @@ class RDDOpDirectivesSpec extends FunSpec } interpreter(DblLit(1.0) - RasterLit(rdd)).as[ContextRDD[SpatialKey, Tile, TileLayerMetadata[SpatialKey]]] match { - case Valid(result) => rastersEqual(result, expected) - case i@Invalid(_) => println(s"$i") - } + case Valid(result) => rastersEqual(result, expected) + case i @ Invalid(_) => println(s"$i") + } } it("Should multiply two spatial RDDs") { @@ -121,9 +120,9 @@ class RDDOpDirectivesSpec extends FunSpec } interpreter(RasterLit(rdd) * RasterLit(rdd)).as[ContextRDD[SpatialKey, Tile, TileLayerMetadata[SpatialKey]]] match { - case Valid(result) => rastersEqual(result, expected) - case i@Invalid(_) => println(s"$i") - } + case Valid(result) => rastersEqual(result, expected) + case i @ Invalid(_) => println(s"$i") + } } it("Should multiply an spatial RDD by an int") { @@ -133,8 +132,8 @@ class RDDOpDirectivesSpec extends FunSpec } interpreter(RasterLit(rdd) * IntLit(3)).as[ContextRDD[SpatialKey, Tile, TileLayerMetadata[SpatialKey]]] match { - case Valid(result) => rastersEqual(result, expected) - case i@Invalid(_) => println(s"$i") + case Valid(result) => rastersEqual(result, expected) + case i @ Invalid(_) => println(s"$i") } } @@ -145,9 +144,9 @@ class RDDOpDirectivesSpec extends FunSpec } interpreter(DblLit(5.0) * RasterLit(rdd)).as[ContextRDD[SpatialKey, Tile, TileLayerMetadata[SpatialKey]]] match { - case Valid(result) => rastersEqual(result, expected) - case i@Invalid(_) => println(s"$i") - } + case Valid(result) => rastersEqual(result, expected) + case i @ Invalid(_) => println(s"$i") + } } it("Should divide two spatial RDDs") { @@ -157,9 +156,9 @@ class RDDOpDirectivesSpec extends FunSpec } interpreter(RasterLit(rdd) / RasterLit(rdd)).as[ContextRDD[SpatialKey, Tile, TileLayerMetadata[SpatialKey]]] match { - case Valid(result) => rastersEqual(result, expected) - case i@Invalid(_) => println(s"$i") - } + case Valid(result) => rastersEqual(result, expected) + case i @ Invalid(_) => println(s"$i") + } } it("Should divide an spatial RDD by an int") { @@ -169,9 +168,9 @@ class RDDOpDirectivesSpec extends FunSpec } interpreter(RasterLit(rdd) / IntLit(1)).as[ContextRDD[SpatialKey, Tile, TileLayerMetadata[SpatialKey]]] match { - case Valid(result) => rastersEqual(result, expected) - case i@Invalid(_) => println(s"$i") - } + case Valid(result) => rastersEqual(result, expected) + case i @ Invalid(_) => println(s"$i") + } } it("Should divide a double by an spatial RDD") { @@ -181,8 +180,8 @@ class RDDOpDirectivesSpec extends FunSpec } interpreter(DblLit(5.0) / RasterLit(rdd)).as[ContextRDD[SpatialKey, Tile, TileLayerMetadata[SpatialKey]]] match { - case Valid(result) => rastersEqual(result, expected) - case i@Invalid(_) => println(s"$i") - } + case Valid(result) => rastersEqual(result, expected) + case i @ Invalid(_) => println(s"$i") + } } }