From 7a3d38075e8e348d5ed77f2d485cd6e169413fe8 Mon Sep 17 00:00:00 2001 From: Grigory Pomadchin Date: Mon, 29 Apr 2019 16:23:48 -0400 Subject: [PATCH 1/6] Bump project version up --- jvm/version.sbt | 2 +- version.sbt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/jvm/version.sbt b/jvm/version.sbt index 0d3bc31e..dfd7e193 100644 --- a/jvm/version.sbt +++ b/jvm/version.sbt @@ -1 +1 @@ -version in ThisBuild := "0.3.3" +version in ThisBuild := "0.3.4-SNAPSHOT" diff --git a/version.sbt b/version.sbt index f45dd2d1..dfd7e193 100644 --- a/version.sbt +++ b/version.sbt @@ -1 +1 @@ -version in ThisBuild := "0.3.3-SNAPSHOT" +version in ThisBuild := "0.3.4-SNAPSHOT" From f92a30b1ca6154b2b3994597685935aada13c6f1 Mon Sep 17 00:00:00 2001 From: Nathan Zimmerman Date: Tue, 30 Apr 2019 10:36:59 -0400 Subject: [PATCH 2/6] Fix codec spec for 2.12 (#95) --- CHANGELOG.md | 3 +++ .../scala/ast/codec/tree/MamlExpressionTreeCodecSpec.scala | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index bf507c3a..933be80b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,6 +10,9 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Add STRTA and migrate to CircleCI [#93](https://github.com/geotrellis/maml/pull/93) - Add changelog and pull request template [#96](https://github.com/geotrellis/maml/pull/96) +### Changed +- Fixed 2.12 compilation in tests [#95](https://github.com/geotrellis/maml/pull/95) + ## [0.3.2] - 2019-04-17 ### Added - Add hillshade [#77](https://github.com/geotrellis/maml/pull/77) diff --git a/shared/src/test/scala/ast/codec/tree/MamlExpressionTreeCodecSpec.scala b/shared/src/test/scala/ast/codec/tree/MamlExpressionTreeCodecSpec.scala index e0d34820..07697f6b 100644 --- a/shared/src/test/scala/ast/codec/tree/MamlExpressionTreeCodecSpec.scala +++ b/shared/src/test/scala/ast/codec/tree/MamlExpressionTreeCodecSpec.scala @@ -12,7 +12,7 @@ import org.scalatest._ import org.scalatest.prop._ -class ExpressionTreeCodecSpec extends PropSpec with Checkers with LazyLogging { +class ExpressionTreeCodecSpec extends PropSpec with Checkers with LazyLogging with ExpressionTreeCodec { property("bijective serialization on whole tree") { check(forAll(Generators.genExpression()) { (ast: Expression) => logger.debug(s"Attempting to encode AST: $ast") From 81197d4a3d43be11ca1480288599ccae429fe81d Mon Sep 17 00:00:00 2001 From: James Santucci Date: Wed, 15 May 2019 17:23:20 -0400 Subject: [PATCH 3/6] Add parallel interpreter (#101) * Make metals happy * Include metals files in gitignore * .map .sequence => .traverse * First cut -- ParallelInterpreter * Update changelog * Prove consistency with NaiveInterpreter * Fix evaluation (thanks, tests!) * scalafmt * Add target monad to interpreter so ParallelInterpreter can extend it Necessary for downstream use in e.g. geotrellis-server * Add sleep expression and use it to test parallelism * Don't know how to interpret sleep by default * Make test look better * ParallelInterpreter -> ConcurrentInterpreter * Add actually parallel interpreter + tests --- .gitignore | 3 + .scalafmt.conf | 1 + CHANGELOG.md | 1 + .../scala/eval/ConcurrentInterpreter.scala | 58 +++ jvm/src/main/scala/eval/Interpreter.scala | 4 +- .../main/scala/eval/NaiveInterpreter.scala | 8 +- .../main/scala/eval/ParallelInterpreter.scala | 65 ++++ .../main/scala/eval/ScopedInterpreter.scala | 6 +- .../scala/eval/directive/OpDirectives.scala | 7 + .../scala/eval/ConcurrentEvaluationSpec.scala | 342 ++++++++++++++++++ .../scala/eval/ParallelEvaluationSpec.scala | 342 ++++++++++++++++++ project/build.properties | 2 +- shared/src/main/scala/ast/Expression.scala | 8 +- .../scala/ast/codec/MamlCodecInstances.scala | 7 + .../ast/codec/tree/ExpressionTreeCodec.scala | 2 + 15 files changed, 844 insertions(+), 12 deletions(-) create mode 100644 .scalafmt.conf create mode 100644 jvm/src/main/scala/eval/ConcurrentInterpreter.scala create mode 100644 jvm/src/main/scala/eval/ParallelInterpreter.scala create mode 100644 jvm/src/test/scala/eval/ConcurrentEvaluationSpec.scala create mode 100644 jvm/src/test/scala/eval/ParallelEvaluationSpec.scala diff --git a/.gitignore b/.gitignore index 61a6b2f2..2cc8ced3 100644 --- a/.gitignore +++ b/.gitignore @@ -48,3 +48,6 @@ nohup.out site/ .metadata/ + +.metals +.bloop \ No newline at end of file diff --git a/.scalafmt.conf b/.scalafmt.conf new file mode 100644 index 00000000..311c2de2 --- /dev/null +++ b/.scalafmt.conf @@ -0,0 +1 @@ +version=2.0.0-RC4 \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index 933be80b..e7939431 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -9,6 +9,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Add README [#92](https://github.com/geotrellis/maml/pull/92) - Add STRTA and migrate to CircleCI [#93](https://github.com/geotrellis/maml/pull/93) - Add changelog and pull request template [#96](https://github.com/geotrellis/maml/pull/96) +- Added `ParallelInterpreter` [#101](https://github.com/geotrellis/maml/pull/101) ### Changed - Fixed 2.12 compilation in tests [#95](https://github.com/geotrellis/maml/pull/95) diff --git a/jvm/src/main/scala/eval/ConcurrentInterpreter.scala b/jvm/src/main/scala/eval/ConcurrentInterpreter.scala new file mode 100644 index 00000000..e4538a1f --- /dev/null +++ b/jvm/src/main/scala/eval/ConcurrentInterpreter.scala @@ -0,0 +1,58 @@ +package com.azavea.maml.eval + +import com.azavea.maml.ast._ +import com.azavea.maml.error._ +import com.azavea.maml.eval.directive._ + +import cats._ +import cats.implicits._ +import cats.data.Validated._ +import cats.data.{NonEmptyList => NEL, _} +import cats.effect.{Concurrent, Fiber} + +import scala.reflect.ClassTag + +class ConcurrentInterpreter[F[_]](directives: List[Directive])( + implicit Conc: Concurrent[F] +) extends Interpreter[F] { + def apply(exp: Expression): F[Interpreted[Result]] = { + val children = evalInF(exp) + val out = children map { + _.andThen({ childRes => + instructions(exp, childRes) + }) + } + out + } + + def evalInF(expression: Expression): F[Interpreted[List[Result]]] = { + val fibsF: F[List[Fiber[F, Interpreted[Result]]]] = + expression.children traverse { expr => + Conc.start(apply(expr)) + } + fibsF flatMap { _.traverse { _.join } } map { _.sequence } + } + + val fallbackDirective: Directive = { + case (exp, res) => Invalid(NEL.of(UnhandledCase(exp, exp.kind))) + } + + def prependDirective(directive: Directive) = + new ConcurrentInterpreter[F](directive +: directives) + + def appendDirective(directive: Directive) = + new ConcurrentInterpreter[F](directives :+ directive) + + def instructions( + expression: Expression, + children: List[Result] + ): Interpreted[Result] = + directives + .reduceLeft(_ orElse _) + .orElse(fallbackDirective)((expression, children)) +} + +object ConcurrentInterpreter { + def DEFAULT[T[_]: Concurrent] = + new ConcurrentInterpreter[T](NaiveInterpreter.DEFAULT.directives) +} diff --git a/jvm/src/main/scala/eval/Interpreter.scala b/jvm/src/main/scala/eval/Interpreter.scala index a11b830e..41db3f7b 100644 --- a/jvm/src/main/scala/eval/Interpreter.scala +++ b/jvm/src/main/scala/eval/Interpreter.scala @@ -11,8 +11,8 @@ import cats.data.{NonEmptyList => NEL, _} import scala.reflect.ClassTag -trait Interpreter { - def apply(exp: Expression): Interpreted[Result] +trait Interpreter[F[_]] { + def apply(exp: Expression): F[Interpreted[Result]] } object Interpreter { diff --git a/jvm/src/main/scala/eval/NaiveInterpreter.scala b/jvm/src/main/scala/eval/NaiveInterpreter.scala index 131268f7..71053eaa 100644 --- a/jvm/src/main/scala/eval/NaiveInterpreter.scala +++ b/jvm/src/main/scala/eval/NaiveInterpreter.scala @@ -12,17 +12,17 @@ import cats.data.{NonEmptyList => NEL, _} import scala.reflect.ClassTag -case class NaiveInterpreter(directives: List[Directive]) extends Interpreter { +case class NaiveInterpreter(directives: List[Directive]) extends Interpreter[Id] { def apply(exp: Expression): Interpreted[Result] = { - val children: Interpreted[List[Result]] = exp.children.map(apply).sequence + val children: Interpreted[List[Result]] = exp.children.traverse(apply) children.andThen({ childRes => instructions(exp, childRes) }) } - def prependDirective(directive: Directive): Interpreter = + def prependDirective(directive: Directive): Interpreter[Id] = NaiveInterpreter(directive +: directives) - def appendDirective(directive: Directive): Interpreter = + def appendDirective(directive: Directive): Interpreter[Id] = NaiveInterpreter(directives :+ directive) val fallbackDirective: Directive = diff --git a/jvm/src/main/scala/eval/ParallelInterpreter.scala b/jvm/src/main/scala/eval/ParallelInterpreter.scala new file mode 100644 index 00000000..08cb5423 --- /dev/null +++ b/jvm/src/main/scala/eval/ParallelInterpreter.scala @@ -0,0 +1,65 @@ +package com.azavea.maml.eval + +import com.azavea.maml.ast._ +import com.azavea.maml.error._ +import com.azavea.maml.eval.directive._ + +import cats._ +import cats.implicits._ +import cats.data.Validated._ +import cats.data.{NonEmptyList => NEL, _} +import cats.effect.ContextShift + +import scala.reflect.ClassTag + +class ParallelInterpreter[F[_]: Monad, G[_]](directives: List[Directive])( + implicit Par: Parallel[F, G], + contextShift: ContextShift[F] +) extends Interpreter[F] { + def apply(exp: Expression): F[Interpreted[Result]] = { + val children = evalInF(exp) + val out = children map { + _.andThen({ childRes => + instructions(exp, childRes) + }) + } + out + } + + def evalInF( + expression: Expression + )(implicit contextShift: ContextShift[F]): F[Interpreted[List[Result]]] = { + val resultsF: F[List[Interpreted[Result]]] = + expression.children parTraverse { expr => + apply(expr) + } + resultsF map { _.sequence } + } + + val fallbackDirective: Directive = { + case (exp, res) => Invalid(NEL.of(UnhandledCase(exp, exp.kind))) + } + + def prependDirective(directive: Directive) = + new ParallelInterpreter[F, G](directive +: directives) + + def appendDirective(directive: Directive) = + new ParallelInterpreter[F, G](directives :+ directive) + + def instructions( + expression: Expression, + children: List[Result] + ): Interpreted[Result] = + directives + .reduceLeft(_ orElse _) + .orElse(fallbackDirective)((expression, children)) +} + +object ParallelInterpreter { + def DEFAULT[T[_], U[_]]( + implicit P: Parallel[T, U], + M: Monad[T], + contextShift: ContextShift[T] + ) = + new ParallelInterpreter[T, U](NaiveInterpreter.DEFAULT.directives) +} diff --git a/jvm/src/main/scala/eval/ScopedInterpreter.scala b/jvm/src/main/scala/eval/ScopedInterpreter.scala index a613a56e..3a86ab70 100644 --- a/jvm/src/main/scala/eval/ScopedInterpreter.scala +++ b/jvm/src/main/scala/eval/ScopedInterpreter.scala @@ -10,7 +10,7 @@ import cats.data.{NonEmptyList => NEL, _} import geotrellis.raster.GridBounds -trait ScopedInterpreter[Scope] extends Interpreter { +trait ScopedInterpreter[Scope] extends Interpreter[Id] { def scopeFor(exp: Expression, previous: Option[Scope]): Scope def appendDirective(directive: ScopedDirective[Scope]): ScopedInterpreter[Scope] def prependDirective(directive: ScopedDirective[Scope]): ScopedInterpreter[Scope] @@ -20,10 +20,10 @@ trait ScopedInterpreter[Scope] extends Interpreter { def apply(exp: Expression): Interpreted[Result] = { def eval(exp: Expression, maybeScope: Option[Scope] = None): Interpreted[Result] = { val currentScope = scopeFor(exp, maybeScope) - val children: Interpreted[List[Result]] = exp.children.map({ childTree => + val children: Interpreted[List[Result]] = exp.children.traverse({ childTree => val childScope = scopeFor(childTree, Some(currentScope)) eval(childTree, Some(childScope)) - }).sequence + }) children.andThen({ childResult => instructions(exp, childResult, currentScope) }) } eval(exp) diff --git a/jvm/src/main/scala/eval/directive/OpDirectives.scala b/jvm/src/main/scala/eval/directive/OpDirectives.scala index 12296e72..2a963a34 100644 --- a/jvm/src/main/scala/eval/directive/OpDirectives.scala +++ b/jvm/src/main/scala/eval/directive/OpDirectives.scala @@ -14,6 +14,7 @@ import Validated._ import geotrellis.vector._ import geotrellis.raster.{Tile, isData} +import scala.concurrent.duration._ import scala.util.Try @@ -348,6 +349,12 @@ object OpDirectives { Valid(results) } + /** Sleeping */ + val sleep = Directive { case (Sleep(n, _), childResults) => + Thread.sleep(n * 1000) + Valid(childResults.head) + } + /** Tile-specific Operations */ val masking = Directive { case (mask@Masking(_), childResults) => ((childResults(0), childResults(1)) match { diff --git a/jvm/src/test/scala/eval/ConcurrentEvaluationSpec.scala b/jvm/src/test/scala/eval/ConcurrentEvaluationSpec.scala new file mode 100644 index 00000000..c1be2db6 --- /dev/null +++ b/jvm/src/test/scala/eval/ConcurrentEvaluationSpec.scala @@ -0,0 +1,342 @@ +package com.azavea.maml.eval + +import com.azavea.maml.ast._ +import com.azavea.maml.ast.codec.tree.ExpressionTreeCodec +import com.azavea.maml.dsl._ +import com.azavea.maml.error._ +import com.azavea.maml.eval._ +import com.azavea.maml.eval.tile._ +import com.azavea.maml.eval.directive.SourceDirectives._ +import com.azavea.maml.eval.directive.OpDirectives._ +import com.azavea.maml.ast.codec.tree.ExpressionTreeCodec +import com.azavea.maml.util.Square + +import io.circe._ +import io.circe.syntax._ +import geotrellis.raster._ +import geotrellis.vector._ +import geotrellis.proj4.WebMercator +import cats._ +import cats.data.{NonEmptyList => NEL, _} +import cats.effect._ +import Validated._ +import org.scalatest._ + +import scala.reflect._ + +import org.scalatest._ + +import scala.concurrent.ExecutionContext.Implicits.global +import java.time.Instant + +class ConcurrentEvaluationSpec + extends FunSpec + with Matchers + with ExpressionTreeCodec { + implicit val cs = IO.contextShift(global) + val interpreter = ConcurrentInterpreter.DEFAULT[IO].prependDirective(sleep) + + implicit def tileIsTileLiteral( + tile: Tile + ): RasterLit[ProjectedRaster[MultibandTile]] = + RasterLit( + ProjectedRaster( + MultibandTile(tile), + Extent(0, 0, 0.05, 0.05), + WebMercator + ) + ) + + implicit class TypeRefinement(self: Interpreted[Result]) { + def as[T: ClassTag]: Interpreted[T] = self match { + case Valid(r) => r.as[T] + case i @ Invalid(_) => i + } + } + + it("should take less time than the total duration of its leaves") { + val sleepDuration = 3L + val expr = Addition(List( + Sleep(sleepDuration, List(IntLit(1))), + Sleep(sleepDuration, List(IntLit(1))))) + val now1 = Instant.now.toEpochMilli + interpreter(expr).unsafeRunSync.as[Int] should be(Valid(2)) + val now2 = Instant.now.toEpochMilli + val duration = (now2 - now1) / 1000 + duration should be < (2 * sleepDuration) + } + + it("Should interpret and evaluate to Boolean literals") { + interpreter(BoolLit(true)).unsafeRunSync.as[Boolean] should be(Valid(true)) + interpreter(false).unsafeRunSync.as[Boolean] should be(Valid(false)) + interpreter(true).unsafeRunSync.as[Boolean] should be(Valid(true)) + } + + it("Should interpret and evaluate to Int literals") { + interpreter(IntLit(42)).unsafeRunSync.as[Int] should be(Valid(42)) + interpreter(IntLit(4200)).unsafeRunSync.as[Int] should be(Valid(4200)) + } + + it("Should interpret and evaluate to double literals") { + interpreter(DblLit(42.0)).unsafeRunSync.as[Double] should be(Valid(42.0)) + interpreter(DblLit(4200.0123)).unsafeRunSync.as[Double] should be( + Valid(4200.0123) + ) + } + + it("Should interpret and evaluate addition with scalars") { + interpreter(IntLit(42) + DblLit(42)).unsafeRunSync.as[Double] should be( + Valid(84.0) + ) + } + + it("Should interpret and evaluate multiplication with scalars") { + interpreter(IntLit(2) * DblLit(42)).unsafeRunSync.as[Double] should be( + Valid(84.0) + ) + } + + it("Should interpret and evaluate division with scalars") { + interpreter(DblLit(20) / DblLit(2) / DblLit(2)).unsafeRunSync + .as[Double] should be(Valid(5.0)) + } + + it("Should interpret and evaluate comparisions with scalars") { + interpreter(DblLit(20) < DblLit(20)).unsafeRunSync.as[Boolean] should be( + Valid(false) + ) + interpreter(DblLit(19) < DblLit(20)).unsafeRunSync.as[Boolean] should be( + Valid(true) + ) + interpreter(DblLit(29) < DblLit(20)).unsafeRunSync.as[Boolean] should be( + Valid(false) + ) + + interpreter(DblLit(20) <= DblLit(20)).unsafeRunSync.as[Boolean] should be( + Valid(true) + ) + interpreter(DblLit(19) <= DblLit(20)).unsafeRunSync.as[Boolean] should be( + Valid(true) + ) + interpreter(DblLit(29) <= DblLit(20)).unsafeRunSync.as[Boolean] should be( + Valid(false) + ) + + interpreter(DblLit(20) === DblLit(20)).unsafeRunSync.as[Boolean] should be( + Valid(true) + ) + interpreter(DblLit(19) === DblLit(20)).unsafeRunSync.as[Boolean] should be( + Valid(false) + ) + interpreter(DblLit(29) === DblLit(20)).unsafeRunSync.as[Boolean] should be( + Valid(false) + ) + + interpreter(DblLit(20) >= DblLit(20)).unsafeRunSync.as[Boolean] should be( + Valid(true) + ) + interpreter(DblLit(19) >= DblLit(20)).unsafeRunSync.as[Boolean] should be( + Valid(false) + ) + interpreter(DblLit(29) >= DblLit(20)).unsafeRunSync.as[Boolean] should be( + Valid(true) + ) + + interpreter(DblLit(20) > DblLit(20)).unsafeRunSync.as[Boolean] should be( + Valid(false) + ) + interpreter(DblLit(19) > DblLit(20)).unsafeRunSync.as[Boolean] should be( + Valid(false) + ) + interpreter(DblLit(29) > DblLit(20)).unsafeRunSync.as[Boolean] should be( + Valid(true) + ) + } + + it("Should interpret and evaluate ndvi") { + interpreter((DblLit(5) - DblLit(2)) / (DblLit(5) + DblLit(2))).unsafeRunSync + .as[Double] match { + case Valid(x) => x should be(0.42857 +- 0.001) + case i @ Invalid(_) => fail(s"$i") + } + } + + it("Should interpret and evaluate tile addition") { + interpreter( + IntArrayTile(1 to 4 toArray, 2, 2) + IntArrayTile(1 to 4 toArray, 2, 2) + ).unsafeRunSync.as[MultibandTile] match { + case Valid(t) => t.bands.head.get(0, 0) should be(2) + case i @ Invalid(_) => fail(s"$i") + } + } + + it("Should interpret and evaluate tile subtraction") { + interpreter( + IntArrayTile(1 to 4 toArray, 2, 2) - IntArrayTile(1 to 4 toArray, 2, 2) + ).unsafeRunSync.as[MultibandTile] match { + case Valid(t) => t.bands.head.get(0, 0) should be(0) + case i @ Invalid(_) => fail(s"$i") + } + } + + it("Should interpret and evaluate tile multiplication") { + interpreter( + IntArrayTile(1 to 4 toArray, 2, 2) * IntArrayTile(1 to 4 toArray, 2, 2) + ).unsafeRunSync.as[MultibandTile] match { + case Valid(t) => t.bands.head.get(1, 0) should be(4) + case i @ Invalid(_) => fail(s"$i") + } + } + + it("Should interpret and evaluate tile division") { + interpreter( + IntArrayTile(1 to 4 toArray, 2, 2) / IntArrayTile(1 to 4 toArray, 2, 2) + ).unsafeRunSync.as[MultibandTile] match { + case Valid(t) => t.bands.head.get(1, 0) should be(1) + case i @ Invalid(_) => fail(s"$i") + } + } + + it("should interpret and evaluate tile comparison") { + interpreter( + IntArrayTile(1 to 4 toArray, 2, 2) < IntArrayTile(2 to 5 toArray, 2, 2) + ).unsafeRunSync.as[MultibandTile] match { + case Valid(t) => t.bands.head.get(0, 0) should be(1) + case i @ Invalid(_) => fail(s"$i") + } + interpreter( + IntArrayTile(1 to 4 toArray, 2, 2) < IntArrayTile(1 to 4 toArray, 2, 2) + ).unsafeRunSync.as[MultibandTile] match { + case Valid(t) => t.bands.head.get(0, 0) should be(0) + case i @ Invalid(_) => fail(s"$i") + } + interpreter( + IntArrayTile(1 to 4 toArray, 2, 2) < IntArrayTile(0 to 3 toArray, 2, 2) + ).unsafeRunSync.as[MultibandTile] match { + case Valid(t) => t.bands.head.get(0, 0) should be(0) + case i @ Invalid(_) => fail(s"$i") + } + + interpreter( + IntArrayTile(1 to 4 toArray, 2, 2) <= IntArrayTile(2 to 5 toArray, 2, 2) + ).unsafeRunSync.as[MultibandTile] match { + case Valid(t) => t.bands.head.get(0, 0) should be(1) + case i @ Invalid(_) => fail(s"$i") + } + interpreter( + IntArrayTile(1 to 4 toArray, 2, 2) <= IntArrayTile(1 to 4 toArray, 2, 2) + ).unsafeRunSync.as[MultibandTile] match { + case Valid(t) => t.bands.head.get(0, 0) should be(1) + case i @ Invalid(_) => fail(s"$i") + } + interpreter( + IntArrayTile(1 to 4 toArray, 2, 2) <= IntArrayTile(0 to 3 toArray, 2, 2) + ).unsafeRunSync.as[MultibandTile] match { + case Valid(t) => t.bands.head.get(0, 0) should be(0) + case i @ Invalid(_) => fail(s"$i") + } + + interpreter( + Equal( + List( + IntArrayTile(1 to 4 toArray, 2, 2), + IntArrayTile(2 to 5 toArray, 2, 2) + ) + ) + ).unsafeRunSync.as[MultibandTile] match { + case Valid(t) => t.bands.head.get(0, 0) should be(0) + case i @ Invalid(_) => fail(s"$i") + } + interpreter( + Equal( + List( + IntArrayTile(1 to 4 toArray, 2, 2), + IntArrayTile(1 to 4 toArray, 2, 2) + ) + ) + ).unsafeRunSync.as[MultibandTile] match { + case Valid(t) => t.bands.head.get(0, 0) should be(1) + case i @ Invalid(_) => fail(s"$i") + } + interpreter( + Equal( + List( + IntArrayTile(1 to 4 toArray, 2, 2), + IntArrayTile(0 to 3 toArray, 2, 2) + ) + ) + ).unsafeRunSync.as[MultibandTile] match { + case Valid(t) => t.bands.head.get(0, 0) should be(0) + case i @ Invalid(_) => fail(s"$i") + } + + interpreter( + IntArrayTile(1 to 4 toArray, 2, 2) >= IntArrayTile(2 to 5 toArray, 2, 2) + ).unsafeRunSync.as[MultibandTile] match { + case Valid(t) => t.bands.head.get(0, 0) should be(0) + case i @ Invalid(_) => fail(s"$i") + } + interpreter( + IntArrayTile(1 to 4 toArray, 2, 2) >= IntArrayTile(1 to 4 toArray, 2, 2) + ).unsafeRunSync.as[MultibandTile] match { + case Valid(t) => t.bands.head.get(0, 0) should be(1) + case i @ Invalid(_) => fail(s"$i") + } + interpreter( + IntArrayTile(1 to 4 toArray, 2, 2) >= IntArrayTile(0 to 3 toArray, 2, 2) + ).unsafeRunSync.as[MultibandTile] match { + case Valid(t) => t.bands.head.get(0, 0) should be(1) + case i @ Invalid(_) => fail(s"$i") + } + + interpreter( + IntArrayTile(1 to 4 toArray, 2, 2) > IntArrayTile(2 to 5 toArray, 2, 2) + ).unsafeRunSync.as[MultibandTile] match { + case Valid(t) => t.bands.head.get(0, 0) should be(0) + case i @ Invalid(_) => fail(s"$i") + } + interpreter( + IntArrayTile(1 to 4 toArray, 2, 2) > IntArrayTile(1 to 4 toArray, 2, 2) + ).unsafeRunSync.as[MultibandTile] match { + case Valid(t) => t.bands.head.get(0, 0) should be(0) + case i @ Invalid(_) => fail(s"$i") + } + interpreter( + IntArrayTile(1 to 4 toArray, 2, 2) > IntArrayTile(0 to 3 toArray, 2, 2) + ).unsafeRunSync.as[MultibandTile] match { + case Valid(t) => t.bands.head.get(0, 0) should be(1) + case i @ Invalid(_) => fail(s"$i") + } + interpreter(FocalSlope(List(IntArrayTile(1 to 100 toArray, 10, 10)))).unsafeRunSync + .as[MultibandTile] match { + case Valid(t) => t.bands.head.get(5, 5) should be(10) + case i @ Invalid(_) => fail(s"$i") + } + + /** The hillshade test is a bit more involved than some of the above + * See http://bit.ly/Qj0YPg for more information about the proper interpretation + * of hillshade values + **/ + val hillshadeTile = + IntArrayTile( + Array(0, 0, 0, 0, 0, 0, 2450, 2461, 2483, 0, 0, 2452, 2461, 2483, 0, 0, + 2447, 2455, 2477, 0, 0, 0, 0, 0, 0), + 5, + 5 + ) + val hillshadeE = + Extent(0, 0, 25, 25) + val hillshadeProjectedRaster = + ProjectedRaster( + Raster(MultibandTile(hillshadeTile), hillshadeE), + WebMercator + ) + + interpreter( + FocalHillshade(List(RasterLit(hillshadeProjectedRaster)), 315, 45) + ).unsafeRunSync.as[MultibandTile] match { + case Valid(t) => t.bands.head.get(2, 2) should be(77) + case i @ Invalid(_) => fail(s"$i") + } + } +} diff --git a/jvm/src/test/scala/eval/ParallelEvaluationSpec.scala b/jvm/src/test/scala/eval/ParallelEvaluationSpec.scala new file mode 100644 index 00000000..0fc7389d --- /dev/null +++ b/jvm/src/test/scala/eval/ParallelEvaluationSpec.scala @@ -0,0 +1,342 @@ +package com.azavea.maml.eval + +import com.azavea.maml.ast._ +import com.azavea.maml.ast.codec.tree.ExpressionTreeCodec +import com.azavea.maml.dsl._ +import com.azavea.maml.error._ +import com.azavea.maml.eval._ +import com.azavea.maml.eval.tile._ +import com.azavea.maml.eval.directive.SourceDirectives._ +import com.azavea.maml.eval.directive.OpDirectives._ +import com.azavea.maml.ast.codec.tree.ExpressionTreeCodec +import com.azavea.maml.util.Square + +import io.circe._ +import io.circe.syntax._ +import geotrellis.raster._ +import geotrellis.vector._ +import geotrellis.proj4.WebMercator +import cats._ +import cats.data.{NonEmptyList => NEL, _} +import cats.effect._ +import Validated._ +import org.scalatest._ + +import scala.reflect._ + +import org.scalatest._ + +import scala.concurrent.ExecutionContext.Implicits.global +import java.time.Instant + +class ParallelEvaluationSpec + extends FunSpec + with Matchers + with ExpressionTreeCodec { + implicit val cs = IO.contextShift(global) + val interpreter = ParallelInterpreter.DEFAULT[IO, IO.Par].prependDirective(sleep) + + implicit def tileIsTileLiteral( + tile: Tile + ): RasterLit[ProjectedRaster[MultibandTile]] = + RasterLit( + ProjectedRaster( + MultibandTile(tile), + Extent(0, 0, 0.05, 0.05), + WebMercator + ) + ) + + implicit class TypeRefinement(self: Interpreted[Result]) { + def as[T: ClassTag]: Interpreted[T] = self match { + case Valid(r) => r.as[T] + case i @ Invalid(_) => i + } + } + + it("should take less time than the total duration of its leaves") { + val sleepDuration = 3L + val expr = Addition(List( + Sleep(sleepDuration, List(IntLit(1))), + Sleep(sleepDuration, List(IntLit(1))))) + val now1 = Instant.now.toEpochMilli + interpreter(expr).unsafeRunSync.as[Int] should be(Valid(2)) + val now2 = Instant.now.toEpochMilli + val duration = (now2 - now1) / 1000 + duration should be < (2 * sleepDuration) + } + + it("Should interpret and evaluate to Boolean literals") { + interpreter(BoolLit(true)).unsafeRunSync.as[Boolean] should be(Valid(true)) + interpreter(false).unsafeRunSync.as[Boolean] should be(Valid(false)) + interpreter(true).unsafeRunSync.as[Boolean] should be(Valid(true)) + } + + it("Should interpret and evaluate to Int literals") { + interpreter(IntLit(42)).unsafeRunSync.as[Int] should be(Valid(42)) + interpreter(IntLit(4200)).unsafeRunSync.as[Int] should be(Valid(4200)) + } + + it("Should interpret and evaluate to double literals") { + interpreter(DblLit(42.0)).unsafeRunSync.as[Double] should be(Valid(42.0)) + interpreter(DblLit(4200.0123)).unsafeRunSync.as[Double] should be( + Valid(4200.0123) + ) + } + + it("Should interpret and evaluate addition with scalars") { + interpreter(IntLit(42) + DblLit(42)).unsafeRunSync.as[Double] should be( + Valid(84.0) + ) + } + + it("Should interpret and evaluate multiplication with scalars") { + interpreter(IntLit(2) * DblLit(42)).unsafeRunSync.as[Double] should be( + Valid(84.0) + ) + } + + it("Should interpret and evaluate division with scalars") { + interpreter(DblLit(20) / DblLit(2) / DblLit(2)).unsafeRunSync + .as[Double] should be(Valid(5.0)) + } + + it("Should interpret and evaluate comparisions with scalars") { + interpreter(DblLit(20) < DblLit(20)).unsafeRunSync.as[Boolean] should be( + Valid(false) + ) + interpreter(DblLit(19) < DblLit(20)).unsafeRunSync.as[Boolean] should be( + Valid(true) + ) + interpreter(DblLit(29) < DblLit(20)).unsafeRunSync.as[Boolean] should be( + Valid(false) + ) + + interpreter(DblLit(20) <= DblLit(20)).unsafeRunSync.as[Boolean] should be( + Valid(true) + ) + interpreter(DblLit(19) <= DblLit(20)).unsafeRunSync.as[Boolean] should be( + Valid(true) + ) + interpreter(DblLit(29) <= DblLit(20)).unsafeRunSync.as[Boolean] should be( + Valid(false) + ) + + interpreter(DblLit(20) === DblLit(20)).unsafeRunSync.as[Boolean] should be( + Valid(true) + ) + interpreter(DblLit(19) === DblLit(20)).unsafeRunSync.as[Boolean] should be( + Valid(false) + ) + interpreter(DblLit(29) === DblLit(20)).unsafeRunSync.as[Boolean] should be( + Valid(false) + ) + + interpreter(DblLit(20) >= DblLit(20)).unsafeRunSync.as[Boolean] should be( + Valid(true) + ) + interpreter(DblLit(19) >= DblLit(20)).unsafeRunSync.as[Boolean] should be( + Valid(false) + ) + interpreter(DblLit(29) >= DblLit(20)).unsafeRunSync.as[Boolean] should be( + Valid(true) + ) + + interpreter(DblLit(20) > DblLit(20)).unsafeRunSync.as[Boolean] should be( + Valid(false) + ) + interpreter(DblLit(19) > DblLit(20)).unsafeRunSync.as[Boolean] should be( + Valid(false) + ) + interpreter(DblLit(29) > DblLit(20)).unsafeRunSync.as[Boolean] should be( + Valid(true) + ) + } + + it("Should interpret and evaluate ndvi") { + interpreter((DblLit(5) - DblLit(2)) / (DblLit(5) + DblLit(2))).unsafeRunSync + .as[Double] match { + case Valid(x) => x should be(0.42857 +- 0.001) + case i @ Invalid(_) => fail(s"$i") + } + } + + it("Should interpret and evaluate tile addition") { + interpreter( + IntArrayTile(1 to 4 toArray, 2, 2) + IntArrayTile(1 to 4 toArray, 2, 2) + ).unsafeRunSync.as[MultibandTile] match { + case Valid(t) => t.bands.head.get(0, 0) should be(2) + case i @ Invalid(_) => fail(s"$i") + } + } + + it("Should interpret and evaluate tile subtraction") { + interpreter( + IntArrayTile(1 to 4 toArray, 2, 2) - IntArrayTile(1 to 4 toArray, 2, 2) + ).unsafeRunSync.as[MultibandTile] match { + case Valid(t) => t.bands.head.get(0, 0) should be(0) + case i @ Invalid(_) => fail(s"$i") + } + } + + it("Should interpret and evaluate tile multiplication") { + interpreter( + IntArrayTile(1 to 4 toArray, 2, 2) * IntArrayTile(1 to 4 toArray, 2, 2) + ).unsafeRunSync.as[MultibandTile] match { + case Valid(t) => t.bands.head.get(1, 0) should be(4) + case i @ Invalid(_) => fail(s"$i") + } + } + + it("Should interpret and evaluate tile division") { + interpreter( + IntArrayTile(1 to 4 toArray, 2, 2) / IntArrayTile(1 to 4 toArray, 2, 2) + ).unsafeRunSync.as[MultibandTile] match { + case Valid(t) => t.bands.head.get(1, 0) should be(1) + case i @ Invalid(_) => fail(s"$i") + } + } + + it("should interpret and evaluate tile comparison") { + interpreter( + IntArrayTile(1 to 4 toArray, 2, 2) < IntArrayTile(2 to 5 toArray, 2, 2) + ).unsafeRunSync.as[MultibandTile] match { + case Valid(t) => t.bands.head.get(0, 0) should be(1) + case i @ Invalid(_) => fail(s"$i") + } + interpreter( + IntArrayTile(1 to 4 toArray, 2, 2) < IntArrayTile(1 to 4 toArray, 2, 2) + ).unsafeRunSync.as[MultibandTile] match { + case Valid(t) => t.bands.head.get(0, 0) should be(0) + case i @ Invalid(_) => fail(s"$i") + } + interpreter( + IntArrayTile(1 to 4 toArray, 2, 2) < IntArrayTile(0 to 3 toArray, 2, 2) + ).unsafeRunSync.as[MultibandTile] match { + case Valid(t) => t.bands.head.get(0, 0) should be(0) + case i @ Invalid(_) => fail(s"$i") + } + + interpreter( + IntArrayTile(1 to 4 toArray, 2, 2) <= IntArrayTile(2 to 5 toArray, 2, 2) + ).unsafeRunSync.as[MultibandTile] match { + case Valid(t) => t.bands.head.get(0, 0) should be(1) + case i @ Invalid(_) => fail(s"$i") + } + interpreter( + IntArrayTile(1 to 4 toArray, 2, 2) <= IntArrayTile(1 to 4 toArray, 2, 2) + ).unsafeRunSync.as[MultibandTile] match { + case Valid(t) => t.bands.head.get(0, 0) should be(1) + case i @ Invalid(_) => fail(s"$i") + } + interpreter( + IntArrayTile(1 to 4 toArray, 2, 2) <= IntArrayTile(0 to 3 toArray, 2, 2) + ).unsafeRunSync.as[MultibandTile] match { + case Valid(t) => t.bands.head.get(0, 0) should be(0) + case i @ Invalid(_) => fail(s"$i") + } + + interpreter( + Equal( + List( + IntArrayTile(1 to 4 toArray, 2, 2), + IntArrayTile(2 to 5 toArray, 2, 2) + ) + ) + ).unsafeRunSync.as[MultibandTile] match { + case Valid(t) => t.bands.head.get(0, 0) should be(0) + case i @ Invalid(_) => fail(s"$i") + } + interpreter( + Equal( + List( + IntArrayTile(1 to 4 toArray, 2, 2), + IntArrayTile(1 to 4 toArray, 2, 2) + ) + ) + ).unsafeRunSync.as[MultibandTile] match { + case Valid(t) => t.bands.head.get(0, 0) should be(1) + case i @ Invalid(_) => fail(s"$i") + } + interpreter( + Equal( + List( + IntArrayTile(1 to 4 toArray, 2, 2), + IntArrayTile(0 to 3 toArray, 2, 2) + ) + ) + ).unsafeRunSync.as[MultibandTile] match { + case Valid(t) => t.bands.head.get(0, 0) should be(0) + case i @ Invalid(_) => fail(s"$i") + } + + interpreter( + IntArrayTile(1 to 4 toArray, 2, 2) >= IntArrayTile(2 to 5 toArray, 2, 2) + ).unsafeRunSync.as[MultibandTile] match { + case Valid(t) => t.bands.head.get(0, 0) should be(0) + case i @ Invalid(_) => fail(s"$i") + } + interpreter( + IntArrayTile(1 to 4 toArray, 2, 2) >= IntArrayTile(1 to 4 toArray, 2, 2) + ).unsafeRunSync.as[MultibandTile] match { + case Valid(t) => t.bands.head.get(0, 0) should be(1) + case i @ Invalid(_) => fail(s"$i") + } + interpreter( + IntArrayTile(1 to 4 toArray, 2, 2) >= IntArrayTile(0 to 3 toArray, 2, 2) + ).unsafeRunSync.as[MultibandTile] match { + case Valid(t) => t.bands.head.get(0, 0) should be(1) + case i @ Invalid(_) => fail(s"$i") + } + + interpreter( + IntArrayTile(1 to 4 toArray, 2, 2) > IntArrayTile(2 to 5 toArray, 2, 2) + ).unsafeRunSync.as[MultibandTile] match { + case Valid(t) => t.bands.head.get(0, 0) should be(0) + case i @ Invalid(_) => fail(s"$i") + } + interpreter( + IntArrayTile(1 to 4 toArray, 2, 2) > IntArrayTile(1 to 4 toArray, 2, 2) + ).unsafeRunSync.as[MultibandTile] match { + case Valid(t) => t.bands.head.get(0, 0) should be(0) + case i @ Invalid(_) => fail(s"$i") + } + interpreter( + IntArrayTile(1 to 4 toArray, 2, 2) > IntArrayTile(0 to 3 toArray, 2, 2) + ).unsafeRunSync.as[MultibandTile] match { + case Valid(t) => t.bands.head.get(0, 0) should be(1) + case i @ Invalid(_) => fail(s"$i") + } + interpreter(FocalSlope(List(IntArrayTile(1 to 100 toArray, 10, 10)))).unsafeRunSync + .as[MultibandTile] match { + case Valid(t) => t.bands.head.get(5, 5) should be(10) + case i @ Invalid(_) => fail(s"$i") + } + + /** The hillshade test is a bit more involved than some of the above + * See http://bit.ly/Qj0YPg for more information about the proper interpretation + * of hillshade values + **/ + val hillshadeTile = + IntArrayTile( + Array(0, 0, 0, 0, 0, 0, 2450, 2461, 2483, 0, 0, 2452, 2461, 2483, 0, 0, + 2447, 2455, 2477, 0, 0, 0, 0, 0, 0), + 5, + 5 + ) + val hillshadeE = + Extent(0, 0, 25, 25) + val hillshadeProjectedRaster = + ProjectedRaster( + Raster(MultibandTile(hillshadeTile), hillshadeE), + WebMercator + ) + + interpreter( + FocalHillshade(List(RasterLit(hillshadeProjectedRaster)), 315, 45) + ).unsafeRunSync.as[MultibandTile] match { + case Valid(t) => t.bands.head.get(2, 2) should be(77) + case i @ Invalid(_) => fail(s"$i") + } + } +} diff --git a/project/build.properties b/project/build.properties index c091b86c..133a8f19 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=0.13.16 +sbt.version=0.13.17 diff --git a/shared/src/main/scala/ast/Expression.scala b/shared/src/main/scala/ast/Expression.scala index 26efc218..95f62d09 100644 --- a/shared/src/main/scala/ast/Expression.scala +++ b/shared/src/main/scala/ast/Expression.scala @@ -20,7 +20,7 @@ object Expression { case None => Invalid(NEL.of(NoVariableBinding(v, params))) } case _ => - subExpr.children.map(eval(_)).sequence.map(subExpr.withChildren) + subExpr.children.traverse(eval(_)).map(subExpr.withChildren) } eval(expr) } @@ -34,7 +34,7 @@ sealed abstract class Expression(val sym: String) extends Product with Serializa def withChildren(newChildren: List[Expression]): Expression def bind(args: Map[String, Literal]): Interpreted[Expression] = - children.map(_.bind(args)).sequence.map(this.withChildren) + children.traverse(_.bind(args)).map(this.withChildren) } case class Addition(children: List[Expression]) extends Expression("+") with FoldableExpression { @@ -349,3 +349,7 @@ case class RasterVar(name: String) extends Expression("rasterV") with Variable { val kind = MamlKind.Image } +case class Sleep(seconds: Long, children: List[Expression]) extends Expression("sleep") with UnaryExpression { + val kindDerivation = UnaryExpression.imageOrScalar + def withChildren(newChildren: List[Expression]): Expression = copy(children = newChildren) +} diff --git a/shared/src/main/scala/ast/codec/MamlCodecInstances.scala b/shared/src/main/scala/ast/codec/MamlCodecInstances.scala index b295f720..0d5819fd 100644 --- a/shared/src/main/scala/ast/codec/MamlCodecInstances.scala +++ b/shared/src/main/scala/ast/codec/MamlCodecInstances.scala @@ -50,6 +50,13 @@ trait MamlCodecInstances extends MamlUtilityCodecs { implicit lazy val encodeMasking: Encoder[Masking] = Encoder.forProduct2("args", "symbol")(u => (u.children, u.sym)) + implicit lazy val decoderSleep: Decoder[Sleep] = + Decoder.forProduct2("seconds", "args"){ + (seconds: Long, args: List[Expression]) => Sleep(seconds, args) + } + implicit lazy val encoderSleep: Encoder[Sleep] = + Encoder.forProduct2("seconds", "args")(u => (u.seconds, u.children)) + implicit lazy val decodePow: Decoder[Pow] = Decoder.forProduct1("args"){ args: List[Expression] => Pow(args) } implicit lazy val encodePow: Encoder[Pow] = diff --git a/shared/src/main/scala/ast/codec/tree/ExpressionTreeCodec.scala b/shared/src/main/scala/ast/codec/tree/ExpressionTreeCodec.scala index fc324603..b4e938d3 100644 --- a/shared/src/main/scala/ast/codec/tree/ExpressionTreeCodec.scala +++ b/shared/src/main/scala/ast/codec/tree/ExpressionTreeCodec.scala @@ -76,6 +76,7 @@ trait ExpressionTreeCodec extends MamlCodecInstances { case ltoe @ LesserOrEqual(_) => ltoe.asJson case lt @ Lesser(_) => lt.asJson case pow @ Pow(_) => pow.asJson + case sleep @ Sleep(_, _) => sleep.asJson } implicit lazy val totalDecoder: Decoder[Expression] = Decoder.instance[Expression] { cursor => @@ -139,6 +140,7 @@ trait ExpressionTreeCodec extends MamlCodecInstances { case "geom" => Decoder[GeomLit] case "geomV" => Decoder[GeomVar] case "rasterV" => Decoder[RasterVar] + case "sleep" => Decoder[Sleep] } match { case Some(decoder) => decoder.widen(cursor) case None => Left(DecodingFailure(s"No symbol provided for MAML expression", cursor.history)) From b1d33ff66e41a92bd3f57034bf5f591acdf89cfc Mon Sep 17 00:00:00 2001 From: Rocky Breslow Date: Wed, 22 May 2019 10:22:48 -0400 Subject: [PATCH 4/6] Add automated releases to Sonatype Nexus through CI (#98) * Add automated releases to Sonatype Nexus through CI * Add GitHub issue template for releases * Temporarily deploy my branch * Add changelog entry * add .jvmopts * fixup! add .jvmopts * only build master on tagged commits * fixup! only build master on tagged commits * Add master back * add required tag filters to pass tags to required jobs * update comment about value of version --- .circleci/config.yml | 86 ++++++++++++++++--- .github/ISSUE_TEMPLATE/release.md | 31 +++++++ .jvmopts | 1 + CHANGELOG.md | 1 + build.sbt | 138 +++++++++++++++++++----------- project/build.properties | 2 +- project/plugins.sbt | 29 +++---- scripts/cipublish | 27 ++++++ version.sbt | 1 - 9 files changed, 236 insertions(+), 80 deletions(-) create mode 100644 .github/ISSUE_TEMPLATE/release.md create mode 100644 .jvmopts create mode 100755 scripts/cipublish delete mode 100644 version.sbt diff --git a/.circleci/config.yml b/.circleci/config.yml index 09a3e4fc..b26a85e0 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -1,9 +1,9 @@ aliases: - &restore_sbt_cache - key: sbt-{{ checksum "/tmp/scala_version" }}-cache + key: sbt-cache-{{ checksum "/tmp/scala_version" }} - &save_sbt_cache - key: sbt-{{ checksum "/tmp/scala_version" }}-cache-{{ epoch }} + key: sbt-cache-{{ checksum "/tmp/scala_version" }}-{{ epoch }} paths: - "~/.ivy2/cache" - "~/.sbt" @@ -18,25 +18,87 @@ aliases: command: ./scripts/cibuild - save_cache: *save_sbt_cache + - &run_cipublish + - checkout + - run: echo "${SCALA_VERSION}" > /tmp/scala_version + - restore_cache: *restore_sbt_cache + - run: + name: "Import signing key" + command: | + echo "${GPG_KEY}" | base64 -d > signing_key.asc && \ + gpg --batch \ + --passphrase "${GPG_PASSPHRASE}" \ + --import signing_key.asc + - run: + name: Executing cipublish + command: ./scripts/cipublish + + # Build environments + - &openjdk8-scala2_11_12-nodelts_environment + docker: + - image: circleci/openjdk:8-stretch-node + environment: + SCALA_VERSION: 2.11.12 + + - &openjdk8-scala2_12_8-nodelts_environment + docker: + - image: circleci/openjdk:8-stretch-node + environment: + SCALA_VERSION: 2.12.8 + version: 2 workflows: version: 2 build: jobs: - - "openjdk8-scala2.11.12-nodelts" - - "openjdk8-scala2.12.8-nodelts" + - "openjdk8-scala2.11.12-nodelts": + filters: # required since `openjdk8-scala2.11.12-nodelts_deploy` has tag filters AND requires `openjdk8-scala2.11.12-nodelts` + tags: + only: + - /^(.*)$/ + - "openjdk8-scala2.12.8-nodelts": + filters: # required since `openjdk8-scala2.12.8-nodelts_deploy` has tag filters AND requires `openjdk8-scala2.12.8-nodelts` + tags: + only: + - /^(.*)$/ + - "openjdk8-scala2.11.12-nodelts_deploy": + requires: + - "openjdk8-scala2.11.12-nodelts" + filters: + tags: + only: + - /^(.*)$/ + branches: + only: + - master + - develop + - feature/jrb/automated-releases-to-sonatype + - "openjdk8-scala2.12.8-nodelts_deploy": + requires: + - "openjdk8-scala2.12.8-nodelts" + filters: + tags: + only: + - /^(.*)$/ + branches: + only: + - master + - develop + - feature/jrb/automated-releases-to-sonatype jobs: "openjdk8-scala2.11.12-nodelts": - docker: - - image: circleci/openjdk:8-stretch-node - environment: - SCALA_VERSION: 2.11.12 + <<: *openjdk8-scala2_11_12-nodelts_environment steps: *run_cibuild "openjdk8-scala2.12.8-nodelts": - docker: - - image: circleci/openjdk:8-stretch-node - environment: - SCALA_VERSION: 2.12.8 + <<: *openjdk8-scala2_12_8-nodelts_environment steps: *run_cibuild + + "openjdk8-scala2.11.12-nodelts_deploy": + <<: *openjdk8-scala2_11_12-nodelts_environment + steps: *run_cipublish + + "openjdk8-scala2.12.8-nodelts_deploy": + <<: *openjdk8-scala2_12_8-nodelts_environment + steps: *run_cipublish diff --git a/.github/ISSUE_TEMPLATE/release.md b/.github/ISSUE_TEMPLATE/release.md new file mode 100644 index 00000000..de05db84 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/release.md @@ -0,0 +1,31 @@ +--- +name: Release +about: When ready to cut a release +title: Release X.Y.Z +labels: release +assignees: '' + +--- + +- [ ] Start a new release branch: +```bash +$ git flow release start X.Y.Z +``` +- [ ] Rotate `CHANGELOG.md` (following [Keep a Changelog](https://keepachangelog.com/) principles) +- [ ] Ensure outstanding changes are committed: +```bash +$ git status # Is the git staging area clean? +$ git add CHANGELOG.md +$ git commit -m "X.Y.Z" +``` +- [ ] Publish the release branch: +```bash +$ git flow release publish X.Y.Z +``` +- [ ] Ensure that CI checks pass +- [ ] Finish and publish the release branch: + - When prompted, keep default commit messages + - Use `X.Y.Z` as the tag message +```bash +$ git flow release finish -p X.Y.Z +``` diff --git a/.jvmopts b/.jvmopts new file mode 100644 index 00000000..56aefe05 --- /dev/null +++ b/.jvmopts @@ -0,0 +1 @@ +-Xmx4G diff --git a/CHANGELOG.md b/CHANGELOG.md index e7939431..91ca58b8 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,6 +10,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Add STRTA and migrate to CircleCI [#93](https://github.com/geotrellis/maml/pull/93) - Add changelog and pull request template [#96](https://github.com/geotrellis/maml/pull/96) - Added `ParallelInterpreter` [#101](https://github.com/geotrellis/maml/pull/101) +- Add automated releases to Sonatype Nexus through CI [#98](https://github.com/geotrellis/maml/pull/98) ### Changed - Fixed 2.12 compilation in tests [#95](https://github.com/geotrellis/maml/pull/95) diff --git a/build.sbt b/build.sbt index 016506ce..1f863a86 100644 --- a/build.sbt +++ b/build.sbt @@ -1,22 +1,102 @@ +import xerial.sbt.Sonatype._ + import com.scalapenos.sbt.prompt.SbtPrompt.autoImport._ promptTheme := com.scalapenos.sbt.prompt.PromptThemes.ScalapenosTheme -/** Project configurations */ +val commonSettings = Seq( + // We are overriding the default behavior of sbt-git which, by default, + // only appends the `-SNAPSHOT` suffix if there are uncommitted + // changes in the workspace. + version := { + // Avoid Cyclic reference involving error + if (git.gitCurrentTags.value.isEmpty || git.gitUncommittedChanges.value) + git.gitDescribedVersion.value.get + "-SNAPSHOT" + else + git.gitDescribedVersion.value.get + }, + scalaVersion := "2.11.12", + crossScalaVersions := Seq("2.11.12", "2.12.8"), + resolvers ++= Seq( + Resolver.sonatypeRepo("releases"), + "locationtech-releases" at "https://repo.locationtech.org/content/groups/releases", + "locationtech-snapshots" at "https://repo.locationtech.org/content/groups/snapshots" + ), + addCompilerPlugin("org.scalamacros" % "paradise" % "2.1.0" cross CrossVersion.full), + scalacOptions := Seq( + "-deprecation", + "-unchecked", + "-feature", + "-language:implicitConversions", + "-language:reflectiveCalls", + "-language:higherKinds", + "-language:postfixOps", + "-language:existentials", + "-language:experimental.macros", + "-feature", + "-Ypatmat-exhaust-depth", "100" + ) +) + +lazy val noPublishSettings = Seq( + publish := {}, + publishLocal := {}, + publishArtifact := false +) + +lazy val publishSettings = Seq( + organization := "com.azavea.geotrellis", + organizationName := "GeoTrellis", + organizationHomepage := Some(new URL("https://geotrellis.io/")), + description := "MAML is used to create a declarative structure that describes a combination of map algebra operations.", + publishArtifact in Test := false +) ++ sonatypeSettings ++ credentialSettings + +lazy val sonatypeSettings = Seq( + publishMavenStyle := true, + + sonatypeProfileName := "com.azavea", + sonatypeProjectHosting := Some(GitHubHosting(user="geotrellis", repository="maml", email="systems@azavea.com")), + developers := List( + Developer(id = "moradology", name = "Nathan Zimmerman", email = "nzimmerman@azavea.com", url = url("https://github.com/moradology")), + Developer(id = "echeipesh", name = "Eugene Cheipesh", email = "echeipesh@azavea.com", url = url("https://github.com/echeipesh")), + Developer(id = "lossyrob", name = "Rob Emanuele", email = "remanuele@azavea.com", url = url("https://github.com/lossyrob")) + ), + licenses := Seq("Apache-2.0" -> url("http://www.apache.org/licenses/LICENSE-2.0.txt")), + + publishTo := sonatypePublishTo.value +) + +lazy val credentialSettings = Seq( + credentials += Credentials( + "GnuPG Key ID", + "gpg", + System.getenv().get("GPG_KEY_ID"), + "ignored" + ), + + credentials += Credentials( + "Sonatype Nexus Repository Manager", + "oss.sonatype.org", + System.getenv().get("SONATYPE_USERNAME"), + System.getenv().get("SONATYPE_PASSWORD") + ) +) + lazy val root = project.in(file(".")) + .settings(commonSettings) + .settings(publishSettings) // these settings are needed to release all aggregated modules under this root module + .settings(noPublishSettings) // this is to exclue the root module itself from being published .aggregate(mamlJs, mamlJvm, mamlSpark) - .settings(commonSettings:_*) - .settings( - licenses += ("Apache-2.0", url("http://www.apache.org/licenses/LICENSE-2.0.txt")) - ).enablePlugins(ScalaJSPlugin) + .enablePlugins(ScalaJSPlugin) val circeVer = "0.11.1" val circeOpticsVer = "0.11.0" val gtVer = "3.0.0-M3" lazy val maml = crossProject.in(file(".")) - .settings(publishSettings:_*) - .settings(commonSettings:_*) + .settings(commonSettings) + .settings(publishSettings) .settings( libraryDependencies ++= Seq( "org.scalacheck" %% "scalacheck" % "1.13.4" % "test", @@ -40,51 +120,13 @@ lazy val maml = crossProject.in(file(".")) lazy val mamlJvm = maml.jvm lazy val mamlJs = maml.js lazy val mamlSpark = project.in(file("spark")) - .dependsOn(mamlJvm) + .settings(commonSettings) + .settings(publishSettings) .settings( libraryDependencies ++= Seq( "org.locationtech.geotrellis" %% "geotrellis-spark-testkit" % gtVer % "test", "org.apache.spark" %% "spark-core" % "2.4.0" % "provided", "org.apache.spark" %% "spark-sql" % "2.4.0" % "provided" ) - ).settings(publishSettings:_*) - .settings(commonSettings:_*) - - -/** Common settings */ -lazy val publishSettings = - Seq( - bintrayOrganization := Some("azavea"), - bintrayRepository := "maven", - bintrayVcsUrl := Some("https://github.com/geotrellis/maml.git"), - publishMavenStyle := true, - publishArtifact in Test := false, - pomIncludeRepository := { _ => false }, - homepage := Some(url("https://geotrellis.github.io/maml")) - ) - -val commonSettings = Seq( - organization := "com.azavea", - licenses += ("Apache-2.0", url("http://www.apache.org/licenses/LICENSE-2.0.txt")), - scalaVersion := "2.11.12", - crossScalaVersions := Seq("2.11.12", "2.12.7"), - resolvers ++= Seq( - Resolver.sonatypeRepo("releases"), - "locationtech-releases" at "https://repo.locationtech.org/content/groups/releases", - "locationtech-snapshots" at "https://repo.locationtech.org/content/groups/snapshots" - ), - addCompilerPlugin("org.scalamacros" % "paradise" % "2.1.0" cross CrossVersion.full), - scalacOptions := Seq( - "-deprecation", - "-unchecked", - "-feature", - "-language:implicitConversions", - "-language:reflectiveCalls", - "-language:higherKinds", - "-language:postfixOps", - "-language:existentials", - "-language:experimental.macros", - "-feature", - "-Ypatmat-exhaust-depth", "100" ) -) + .dependsOn(mamlJvm) diff --git a/project/build.properties b/project/build.properties index 133a8f19..c0bab049 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=0.13.17 +sbt.version=1.2.8 diff --git a/project/plugins.sbt b/project/plugins.sbt index 75c7a1ca..32d6b1ee 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -1,28 +1,21 @@ -resolvers ++= Seq( - Classpaths.sbtPluginReleases, - Opts.resolver.sonatypeReleases -) +addSbtPlugin("com.timushev.sbt" % "sbt-updates" % "0.4.0") -resolvers += Classpaths.typesafeResolver - -addSbtPlugin("org.foundweekends" % "sbt-bintray" % "0.5.1") - -addSbtPlugin("com.timushev.sbt" % "sbt-updates" % "0.1.10") - -addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.14.5") +addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.14.9") addSbtPlugin("net.virtual-void" % "sbt-dependency-graph" % "0.9.2") -addSbtPlugin("org.scala-js" % "sbt-scalajs" % "0.6.25") +addSbtPlugin("org.scala-js" % "sbt-scalajs" % "0.6.27") -addSbtPlugin("com.typesafe.sbteclipse" % "sbteclipse-plugin" % "5.1.0") +addSbtPlugin("io.spray" % "sbt-revolver" % "0.9.1") -addSbtPlugin("io.spray" % "sbt-revolver" % "0.8.0") +addSbtPlugin("com.typesafe.sbt" % "sbt-native-packager" % "1.3.19") -addSbtPlugin("com.typesafe.sbt" % "sbt-native-packager" % "1.2.0") - -addSbtPlugin("com.47deg" % "sbt-microsites" % "0.6.1") +addSbtPlugin("com.47deg" % "sbt-microsites" % "0.9.0") addSbtPlugin("com.scalapenos" % "sbt-prompt" % "1.0.2") -addSbtPlugin("com.github.gseitz" % "sbt-release" % "1.0.11") +addSbtPlugin("org.xerial.sbt" % "sbt-sonatype" % "2.5") + +addSbtPlugin("io.crashbox" % "sbt-gpg" % "0.2.0") + +addSbtPlugin("com.typesafe.sbt" % "sbt-git" % "1.0.0") diff --git a/scripts/cipublish b/scripts/cipublish new file mode 100755 index 00000000..41b73c08 --- /dev/null +++ b/scripts/cipublish @@ -0,0 +1,27 @@ +#!/bin/bash + +set -e + +if [[ -n "${MAML_DEBUG}" ]]; then + set -x +fi + +function usage() { + echo -n \ + "Usage: $(basename "$0") +Publish artifacts to Sonatype. +" +} + +if [[ "${BASH_SOURCE[0]}" == "${0}" ]]; then + if [[ "${1:-}" == "--help" ]]; then + usage + else + echo "Publishing artifacts to Sonatype" + if [[ -n "${CIRCLE_TAG}" ]]; then + ./sbt "++${SCALA_VERSION:-2.11.12}" publish sonatypeRelease + else + ./sbt "++${SCALA_VERSION:-2.11.12}" publish + fi + fi +fi diff --git a/version.sbt b/version.sbt deleted file mode 100644 index dfd7e193..00000000 --- a/version.sbt +++ /dev/null @@ -1 +0,0 @@ -version in ThisBuild := "0.3.4-SNAPSHOT" From 6b40e430351d73d191f06c6ae229bf96bc1e9a4a Mon Sep 17 00:00:00 2001 From: Rocky Breslow Date: Thu, 23 May 2019 11:36:35 -0400 Subject: [PATCH 5/6] Reconcile change log for 0.3.3 release (#102) * Reconcile change log for 0.3.3 release * Add change log entry * remove leftover branch gates --- .circleci/config.yml | 2 -- CHANGELOG.md | 15 ++++++++++----- 2 files changed, 10 insertions(+), 7 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index b26a85e0..2cd1e28f 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -72,7 +72,6 @@ workflows: only: - master - develop - - feature/jrb/automated-releases-to-sonatype - "openjdk8-scala2.12.8-nodelts_deploy": requires: - "openjdk8-scala2.12.8-nodelts" @@ -84,7 +83,6 @@ workflows: only: - master - develop - - feature/jrb/automated-releases-to-sonatype jobs: "openjdk8-scala2.11.12-nodelts": diff --git a/CHANGELOG.md b/CHANGELOG.md index 91ca58b8..24ee574d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,14 +6,18 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] ### Added -- Add README [#92](https://github.com/geotrellis/maml/pull/92) -- Add STRTA and migrate to CircleCI [#93](https://github.com/geotrellis/maml/pull/93) -- Add changelog and pull request template [#96](https://github.com/geotrellis/maml/pull/96) - Added `ParallelInterpreter` [#101](https://github.com/geotrellis/maml/pull/101) - Add automated releases to Sonatype Nexus through CI [#98](https://github.com/geotrellis/maml/pull/98) -### Changed +### Fixed - Fixed 2.12 compilation in tests [#95](https://github.com/geotrellis/maml/pull/95) +- Reconcile change log for 0.3.3 release [#102](https://github.com/geotrellis/maml/pull/102) + +## [0.3.3] - 2019-04-29 +### Added +- Add README [#92](https://github.com/geotrellis/maml/pull/92) +- Add STRTA and migrate to CircleCI [#93](https://github.com/geotrellis/maml/pull/93) +- Add changelog and pull request template [#96](https://github.com/geotrellis/maml/pull/96) ## [0.3.2] - 2019-04-17 ### Added @@ -25,5 +29,6 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Mask on LazyMultibandRasters directly [#89](https://github.com/geotrellis/maml/pull/89) - Assume correct extents in binary ops to avoid floating point errors [#91](https://github.com/geotrellis/maml/pull/91) -[Unreleased]: https://github.com/geotrellis/maml/compare/0.3.2...HEAD +[Unreleased]: https://github.com/geotrellis/maml/compare/0.3.3...HEAD +[0.3.3]: https://github.com/geotrellis/maml/compare/0.3.2...0.3.3 [0.3.2]: https://github.com/geotrellis/maml/compare/v0.2.2...0.3.2 From 02af3e823e222062393ee5cc89cdbb263e9724bd Mon Sep 17 00:00:00 2001 From: Rocky Breslow Date: Thu, 23 May 2019 11:45:40 -0400 Subject: [PATCH 6/6] 0.4.0 --- CHANGELOG.md | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 24ee574d..6141ddec 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,9 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [Unreleased] ### Added + +## [0.4.0] - 2019-05-23 +### Added - Added `ParallelInterpreter` [#101](https://github.com/geotrellis/maml/pull/101) - Add automated releases to Sonatype Nexus through CI [#98](https://github.com/geotrellis/maml/pull/98) @@ -29,6 +32,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Mask on LazyMultibandRasters directly [#89](https://github.com/geotrellis/maml/pull/89) - Assume correct extents in binary ops to avoid floating point errors [#91](https://github.com/geotrellis/maml/pull/91) -[Unreleased]: https://github.com/geotrellis/maml/compare/0.3.3...HEAD +[Unreleased]: https://github.com/geotrellis/maml/compare/0.4.0...HEAD +[0.4.0]: https://github.com/geotrellis/maml/compare/0.3.3...0.4.0 [0.3.3]: https://github.com/geotrellis/maml/compare/0.3.2...0.3.3 [0.3.2]: https://github.com/geotrellis/maml/compare/v0.2.2...0.3.2