diff --git a/.locationtech/deploy-213.sh b/.locationtech/deploy-213.sh new file mode 100755 index 0000000000..1d42b7c527 --- /dev/null +++ b/.locationtech/deploy-213.sh @@ -0,0 +1,29 @@ +#!/usr/bin/env bash + +set -Eeuo pipefail +set -x + +./sbt -213 "project macros" publish -no-colors -J-Drelease=locationtech \ + && ./sbt -213 "project vector" publish -no-colors -J-Drelease=locationtech \ + && ./sbt -213 "project proj4" publish -no-colors -J-Drelease=locationtech \ + && ./sbt -213 "project raster" publish -no-colors -J-Drelease=locationtech \ +# && ./sbt -213 "project spark" publish -no-colors -J-Drelease=locationtech \ +# && ./sbt -213 "project spark-pipeline" publish -no-colors -J-Drelease=locationtech \ + && ./sbt -213 "project s3" publish -no-colors -J-Drelease=locationtech \ +# && ./sbt -213 "project s3-spark" publish -no-colors -J-Drelease=locationtech \ + && ./sbt -213 "project accumulo" publish -no-colors -J-Drelease=locationtech \ +# && ./sbt -213 "project accumulo-spark" publish -no-colors -J-Drelease=locationtech \ + && ./sbt -213 "project hbase" publish -no-colors -J-Drelease=locationtech \ +# && ./sbt -213 "project hbase-spark" publish -no-colors -J-Drelease=locationtech \ + && ./sbt -213 "project cassandra" publish -no-colors -J-Drelease=locationtech \ +# && ./sbt -213 "project cassandra-spark" publish -no-colors -J-Drelease=locationtech \ + && ./sbt -213 "project geotools" publish -no-colors -J-Drelease=locationtech \ + && ./sbt -213 "project shapefile" publish -no-colors -J-Drelease=locationtech \ + && ./sbt -213 "project layer" publish -no-colors -J-Drelease=locationtech \ + && ./sbt -213 "project store" publish -no-colors -J-Drelease=locationtech \ + && ./sbt -213 "project util" publish -no-colors -J-Drelease=locationtech \ + && ./sbt -213 "project vectortile" publish -no-colors -J-Drelease=locationtech \ + && ./sbt -213 "project raster-testkit" publish -no-colors -J-Drelease=locationtech \ + && ./sbt -213 "project vector-testkit" publish -no-colors -J-Drelease=locationtech \ +# && ./sbt -213 "project spark-testkit" publish -no-colors -J-Drelease=locationtech \ + && ./sbt -213 "project gdal" publish -no-colors -J-Drelease=locationtech diff --git a/build.sbt b/build.sbt index 4c285279de..f6505061df 100644 --- a/build.sbt +++ b/build.sbt @@ -1,8 +1,7 @@ import sbt.Keys._ -ThisBuild / scalaVersion := "2.12.13" ThisBuild / organization := "org.locationtech.geotrellis" -ThisBuild / crossScalaVersions := List("2.12.13", "2.11.12") +ThisBuild / scalaVersion := Settings.scala212 lazy val root = Project("geotrellis", file(".")) .aggregate( diff --git a/layer/src/main/scala/geotrellis/layer/buffer/BufferTiles.scala b/layer/src/main/scala/geotrellis/layer/buffer/BufferTiles.scala index 82b61aab2d..5d8994038a 100644 --- a/layer/src/main/scala/geotrellis/layer/buffer/BufferTiles.scala +++ b/layer/src/main/scala/geotrellis/layer/buffer/BufferTiles.scala @@ -84,7 +84,7 @@ trait BufferTiles { addSlice(SpatialKey(col+1, row+1), TopLeft) addSlice(SpatialKey(col-1, row+1), TopRight) - parts + parts.toSeq } def bufferWithNeighbors[ diff --git a/proj4/src/main/scala/geotrellis/proj4/io/wkt/WKT.scala b/proj4/src/main/scala/geotrellis/proj4/io/wkt/WKT.scala index 65ceaa9d1a..cef6bac519 100644 --- a/proj4/src/main/scala/geotrellis/proj4/io/wkt/WKT.scala +++ b/proj4/src/main/scala/geotrellis/proj4/io/wkt/WKT.scala @@ -22,7 +22,7 @@ import scala.io.Source object WKT { private val wktResourcePath = "/proj4/wkt/epsg.properties" - lazy val parsed: Map[Int, WktCS] = records.mapValues(WKTParser.apply) + lazy val parsed: Map[Int, WktCS] = records.mapValues(WKTParser.apply).toMap lazy val projections: Set[WktCS] = parsed.values.toSet lazy val records: Map[Int, String] = parseWktEpsgResource diff --git a/proj4/src/test/scala/geotrellis/proj4/GenerateTestCases.scala b/proj4/src/test/scala/geotrellis/proj4/GenerateTestCases.scala index 8dfc5104ef..21d6837004 100644 --- a/proj4/src/test/scala/geotrellis/proj4/GenerateTestCases.scala +++ b/proj4/src/test/scala/geotrellis/proj4/GenerateTestCases.scala @@ -43,7 +43,7 @@ object GenerateTestCases { .filter { _ startsWith "<" } .map { s => s.tail.take(s.indexOf('>') - 1) } .filterNot { _ == "4326" } - .to[Vector] + .toVector } val output = new java.io.FileWriter("proj4/src/test/resources/proj4-epsg.csv"); diff --git a/proj4/src/test/scala/geotrellis/proj4/MetaCRSTestFileReader.scala b/proj4/src/test/scala/geotrellis/proj4/MetaCRSTestFileReader.scala index a07d64dfb1..1f9a051c4f 100644 --- a/proj4/src/test/scala/geotrellis/proj4/MetaCRSTestFileReader.scala +++ b/proj4/src/test/scala/geotrellis/proj4/MetaCRSTestFileReader.scala @@ -49,7 +49,7 @@ object MetaCRSTestFileReader { .filter(r => r.nonEmpty && !r.head.startsWith("#")) .drop(1) .map(parseTest) - .to[List] + .toList } private def parseTest(cols: Array[String]): MetaCRSTestCase = { diff --git a/project/CrossCompileAutoPlugin.scala b/project/CrossCompileAutoPlugin.scala new file mode 100644 index 0000000000..46f3d082fb --- /dev/null +++ b/project/CrossCompileAutoPlugin.scala @@ -0,0 +1,27 @@ +import sbt._ +import sbt.Keys._ + +object CrossCompileAutoPlugin extends AutoPlugin { + + override def trigger: sbt.PluginTrigger = allRequirements + + override def projectSettings: Seq[Def.Setting[_]] = + Seq( + libraryDependencies ++= (CrossVersion.partialVersion(scalaVersion.value) match { + case Some((2, 13)) => Seq.empty + case Some((2, 11 | 12)) => Seq( + compilerPlugin("org.scalamacros" % "paradise" % "2.1.1" cross CrossVersion.full), + "org.scala-lang.modules" %% "scala-collection-compat" % "2.4.2" + ) + case x => sys.error(s"Encountered unsupported Scala version ${x.getOrElse("undefined")}") + }), + Compile / scalacOptions ++= (CrossVersion.partialVersion(scalaVersion.value) match { + case Some((2, 13)) => Seq( + "-Ymacro-annotations", // replaces paradise in 2.13 + "-Wconf:cat=deprecation&msg=Auto-application:silent" // there are many of these, silence until fixed + ) + case Some((2, 11 | 12)) => Seq("-Ypartial-unification") // required by Cats + case x => sys.error(s"Encountered unsupported Scala version ${x.getOrElse("undefined")}") + }) + ) +} \ No newline at end of file diff --git a/project/Dependencies.scala b/project/Dependencies.scala index 149682e7f5..765b0360d9 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -18,7 +18,6 @@ import sbt._ object Version { val geotools = "24.2" - val spire = "0.13.0" val accumulo = "1.9.3" val cassandra = "3.7.2" val hbase = "2.2.5" @@ -34,11 +33,13 @@ object Version { import sbt.Keys._ object Dependencies { - private def ver(for211: String, for212: String) = Def.setting { + + private def ver(for211: String, for212: String, for213: Option[String] = None) = Def.setting { CrossVersion.partialVersion(scalaVersion.value) match { case Some((2, 11)) => for211 case Some((2, 12)) => for212 - case _ => sys.error("not good") + case Some((2, 13)) => for213.getOrElse(for212) + case x => sys.error(s"Encountered unsupported Scala version ${x.getOrElse("undefined")}") } } @@ -67,6 +68,15 @@ object Dependencies { def scalaReflect(version: String) = "org.scala-lang" % "scala-reflect" % version + def spire(module: String) = Def.setting { + CrossVersion.partialVersion(scalaVersion.value) match { + case Some((2, 11)) => "org.spire-math" %% "spire" % "0.13.0" + case Some((2, 12)) => "org.spire-math" %% "spire" % "0.13.0" // 0.17.0 exists for 2.12 + case Some((2, 13)) => "org.typelevel" %% "spire" % "0.17.0" + case x => sys.error(s"Encountered unsupported Scala version ${x.getOrElse("undefined")}") + } + } + val sparkCore = "org.apache.spark" %% "spark-core" % Version.spark val sparkSql = "org.apache.spark" %% "spark-sql" % Version.spark val pureconfig = "com.github.pureconfig" %% "pureconfig" % "0.14.0" @@ -77,8 +87,6 @@ object Dependencies { val jts = "org.locationtech.jts" % "jts-core" % "1.17.1" val proj4j = "org.locationtech.proj4j" % "proj4j" % "1.1.1" val openCSV = "com.opencsv" % "opencsv" % "5.3" - val spire = "org.spire-math" %% "spire" % Version.spire - val spireMacro = "org.spire-math" %% "spire-macros" % Version.spire val apacheIO = "commons-io" % "commons-io" % "2.8.0" val apacheLang3 = "org.apache.commons" % "commons-lang3" % "3.12.0" val apacheMath = "org.apache.commons" % "commons-math3" % "3.6.1" diff --git a/project/Settings.scala b/project/Settings.scala index e5e493c6e1..6fb1b3670c 100644 --- a/project/Settings.scala +++ b/project/Settings.scala @@ -17,7 +17,7 @@ import Dependencies._ import GTBenchmarkPlugin.Keys._ import sbt._ -import sbt.Keys._ +import sbt.Keys.{crossScalaVersions, _} import sbtassembly.AssemblyPlugin.autoImport._ import com.typesafe.tools.mima.plugin.MimaKeys._ import de.heikoseeberger.sbtheader.{CommentStyle, FileType} @@ -40,12 +40,19 @@ object Settings { val all = external ++ local } + lazy val scala211 = "2.11.12" + lazy val scala212 = "2.12.13" + lazy val scala213 = "2.13.5" + + lazy val crossScalaVersionsAll = List(scala213, scala212, scala211) + lazy val crossScalaVersionsSparkOnly = List(scala212, scala211) + lazy val noForkInTests = Seq( Test / fork := false, Test / parallelExecution := false ) - val commonScalacOptions = Seq( + lazy val commonScalacOptions = Seq( "-deprecation", "-unchecked", "-feature", @@ -56,16 +63,17 @@ object Settings { "-language:existentials", "-language:experimental.macros", "-feature", - "-Ypartial-unification", // required by Cats // "-Yrangepos", // required by SemanticDB compiler plugin // "-Ywarn-unused-import", // required by `RemoveUnused` rule - "-target:jvm-1.8") + "-target:jvm-1.8" + ) lazy val commonSettings = Seq( description := "geographic data processing library for high performance applications", licenses := Seq("Apache-2.0" -> url("http://www.apache.org/licenses/LICENSE-2.0.html")), homepage := Some(url("https://geotrellis.io")), scmInfo := Some(ScmInfo(url("https://github.com/locationtech/geotrellis"), "scm:git:git@github.com:locationtech/geotrellis.git")), + crossScalaVersions := crossScalaVersionsAll, scalacOptions ++= commonScalacOptions, publishMavenStyle := true, Test / publishArtifact := false, @@ -93,7 +101,6 @@ object Settings { ).filter(_.asFile.canRead).map(Credentials(_)), addCompilerPlugin("org.typelevel" %% "kind-projector" % "0.11.3" cross CrossVersion.full), - addCompilerPlugin("org.scalamacros" %% "paradise" % "2.1.1" cross CrossVersion.full), addCompilerPlugin("org.scalameta" % "semanticdb-scalac" % "4.4.10" cross CrossVersion.full), pomExtra := ( @@ -121,7 +128,11 @@ object Settings { existingText.flatMap(_ => existingText.map(_.trim)).getOrElse(newText) } } ) - ) + ), + evictionWarningOptions in update := EvictionWarningOptions.default + .withWarnTransitiveEvictions(false) + .withWarnDirectEvictions(false) + .withWarnScalaVersionEviction(false) ) lazy val accumulo = Seq( @@ -144,6 +155,7 @@ object Settings { lazy val `accumulo-spark` = Seq( name := "geotrellis-accumulo-spark", + crossScalaVersions := crossScalaVersionsSparkOnly, libraryDependencies ++= Seq( accumuloCore exclude("org.jboss.netty", "netty") @@ -166,6 +178,7 @@ object Settings { ) ++ commonSettings ++ noForkInTests lazy val bench = Seq( + crossScalaVersions := crossScalaVersionsSparkOnly, libraryDependencies += sl4jnop, jmhIterations := Some(5), jmhTimeUnit := None, // Each benchmark should determing the appropriate time unit. @@ -196,6 +209,7 @@ object Settings { lazy val `cassandra-spark` = Seq( name := "geotrellis-cassandra-spark", + crossScalaVersions := crossScalaVersionsSparkOnly, libraryDependencies ++= Seq( cassandraDriverCore excludeAll( @@ -223,6 +237,7 @@ object Settings { lazy val `doc-examples` = Seq( name := "geotrellis-doc-examples", + crossScalaVersions := crossScalaVersionsSparkOnly, scalacOptions ++= commonScalacOptions, libraryDependencies ++= Seq( sparkCore, @@ -376,6 +391,7 @@ object Settings { lazy val `hbase-spark` = Seq( name := "geotrellis-hbase-spark", + crossScalaVersions := crossScalaVersionsSparkOnly, libraryDependencies ++= Seq( hadoopClient % Provided, sparkCore % Provided, @@ -399,7 +415,7 @@ object Settings { name := "geotrellis-macros", Compile / sourceGenerators += (Compile / sourceManaged).map(Boilerplate.genMacro).taskValue, libraryDependencies ++= Seq( - spireMacro, + spire("spire-macros").value, scalaReflect(scalaVersion.value) ) ) ++ commonSettings @@ -410,7 +426,8 @@ object Settings { mdocOut := new File("website/docs"), mdocVariables := Map( "VERSION" -> (ThisBuild / version).value - ) + ), + crossScalaVersions := crossScalaVersionsSparkOnly ) lazy val proj4 = Seq( @@ -430,7 +447,7 @@ object Settings { name := "geotrellis-raster", libraryDependencies ++= Seq( squants, - monocle("core").value, + monocle("core").value, monocle("macro").value, scalaXml, scalaURI.value, @@ -497,6 +514,7 @@ object Settings { lazy val `s3-spark` = Seq( name := "geotrellis-s3-spark", + crossScalaVersions := crossScalaVersionsSparkOnly, libraryDependencies ++= Seq( hadoopClient % Provided, sparkCore % Provided, @@ -533,6 +551,7 @@ object Settings { lazy val spark = Seq( name := "geotrellis-spark", + crossScalaVersions := crossScalaVersionsSparkOnly, libraryDependencies ++= Seq( sparkCore % Provided, hadoopClient % Provided, @@ -556,7 +575,8 @@ object Settings { lazy val `spark-pipeline` = Seq( name := "geotrellis-spark-pipeline", - libraryDependencies ++= Seq( + crossScalaVersions := crossScalaVersionsSparkOnly, + libraryDependencies ++= Seq( circe("generic-extras").value, hadoopClient % Provided, sparkCore % Provided, @@ -587,6 +607,7 @@ object Settings { lazy val `spark-testkit` = Seq( name := "geotrellis-spark-testkit", + crossScalaVersions := crossScalaVersionsSparkOnly, libraryDependencies ++= Seq( hadoopClient % Provided, sparkCore % Provided, @@ -600,7 +621,7 @@ object Settings { libraryDependencies ++= Seq( log4s, scalaj, - spire, + spire("spire").value, scalatest % Test ) ) ++ commonSettings @@ -608,11 +629,12 @@ object Settings { lazy val vector = Seq( name := "geotrellis-vector", libraryDependencies ++= Seq( + scalaReflect(scalaVersion.value), jts, shapeless, pureconfig, - circe("core").value, - circe("generic").value, + circe("core").value, + circe("generic").value, circe("parser").value, cats("core").value, apacheMath, @@ -669,7 +691,7 @@ object Settings { uzaygezenCore, scalaXml, apacheLang3, - fs2("core").value, + fs2("core").value, fs2("io").value, cats("effect").value, scalatest % Test @@ -691,6 +713,7 @@ object Settings { lazy val `gdal-spark` = Seq( name := "geotrellis-gdal-spark", + crossScalaVersions := crossScalaVersionsSparkOnly, libraryDependencies ++= Seq( gdalWarp, hadoopClient % Provided, diff --git a/publish/publish-to-sonatype-213.sh b/publish/publish-to-sonatype-213.sh new file mode 100755 index 0000000000..2340740b7f --- /dev/null +++ b/publish/publish-to-sonatype-213.sh @@ -0,0 +1,8 @@ +#!/usr/bin/env bash + +# Publish to sonatype for all supported scala version 2.13 + +set -Eeuo pipefail +set -x + +./sbt -213 publishSigned -no-colors -J-Drelease=sonatype diff --git a/raster/src/main/scala/geotrellis/raster/GridBounds.scala b/raster/src/main/scala/geotrellis/raster/GridBounds.scala index 2df888e99f..c7427214ae 100644 --- a/raster/src/main/scala/geotrellis/raster/GridBounds.scala +++ b/raster/src/main/scala/geotrellis/raster/GridBounds.scala @@ -162,7 +162,7 @@ case class GridBounds[@specialized(Int, Long) N: Integral]( if(overlapRowMax < rowMax) { result += GridBounds(overlapColMin, overlapRowMax + 1, overlapColMax, rowMax) } - result + result.toSeq } /** diff --git a/raster/src/main/scala/geotrellis/raster/Implicits.scala b/raster/src/main/scala/geotrellis/raster/Implicits.scala index 7bb98f8db6..72ba5e88cd 100644 --- a/raster/src/main/scala/geotrellis/raster/Implicits.scala +++ b/raster/src/main/scala/geotrellis/raster/Implicits.scala @@ -17,40 +17,40 @@ package geotrellis.raster import geotrellis.vector.Point -import geotrellis.vector._ +import geotrellis.vector.withExtraPointMethods import geotrellis.util.{MethodExtensions, np} object Implicits extends Implicits trait Implicits - extends costdistance.Implicits - with crop.Implicits - with density.Implicits - with distance.Implicits - with equalization.Implicits - with hydrology.Implicits - with interpolation.Implicits - with io.json.Implicits - with mapalgebra.focal.Implicits - with mapalgebra.focal.hillshade.Implicits - with mapalgebra.local.Implicits - with mapalgebra.zonal.Implicits - with mask.Implicits - with matching.Implicits - with merge.Implicits - with prototype.Implicits - with rasterize.Implicits - with regiongroup.Implicits - with render.Implicits - with reproject.Implicits - with resample.Implicits - with sigmoidal.Implicits - with split.Implicits - with summary.Implicits - with summary.polygonal.Implicits - with transform.Implicits - with vectorize.Implicits - with viewshed.Implicits { + extends geotrellis.raster.costdistance.Implicits + with geotrellis.raster.crop.Implicits + with geotrellis.raster.density.Implicits + with geotrellis.raster.distance.Implicits + with geotrellis.raster.equalization.Implicits + with geotrellis.raster.hydrology.Implicits + with geotrellis.raster.interpolation.Implicits + with geotrellis.raster.io.json.Implicits + with geotrellis.raster.mapalgebra.focal.Implicits + with geotrellis.raster.mapalgebra.focal.hillshade.Implicits + with geotrellis.raster.mapalgebra.local.Implicits + with geotrellis.raster.mapalgebra.zonal.Implicits + with geotrellis.raster.mask.Implicits + with geotrellis.raster.matching.Implicits + with geotrellis.raster.merge.Implicits + with geotrellis.raster.prototype.Implicits + with geotrellis.raster.rasterize.Implicits + with geotrellis.raster.regiongroup.Implicits + with geotrellis.raster.render.Implicits + with geotrellis.raster.reproject.Implicits + with geotrellis.raster.resample.Implicits + with geotrellis.raster.sigmoidal.Implicits + with geotrellis.raster.split.Implicits + with geotrellis.raster.summary.Implicits + with geotrellis.raster.summary.polygonal.Implicits + with geotrellis.raster.transform.Implicits + with geotrellis.raster.vectorize.Implicits + with geotrellis.raster.viewshed.Implicits { // Implicit method extension for core types @@ -84,7 +84,7 @@ trait Implicits def assertEqualDimensions(): Unit = if(Set(rs.map(_.dimensions)).size != 1) { val dimensions = rs.map(_.dimensions).toSeq - throw new GeoAttrsError("Cannot combine tiles with different dimensions." + + throw GeoAttrsError("Cannot combine tiles with different dimensions." + s"$dimensions are not all equal") } } @@ -92,7 +92,7 @@ trait Implicits implicit class TileTupleExtensions(t: (Tile, Tile)) { def assertEqualDimensions(): Unit = if(t._1.dimensions != t._2.dimensions) { - throw new GeoAttrsError("Cannot combine rasters with different dimensions." + + throw GeoAttrsError("Cannot combine rasters with different dimensions." + s"${t._1.dimensions} does not match ${t._2.dimensions}") } } diff --git a/raster/src/main/scala/geotrellis/raster/rasterize/polygon/FractionalRasterizer.scala b/raster/src/main/scala/geotrellis/raster/rasterize/polygon/FractionalRasterizer.scala index 105769b839..418558cae4 100644 --- a/raster/src/main/scala/geotrellis/raster/rasterize/polygon/FractionalRasterizer.scala +++ b/raster/src/main/scala/geotrellis/raster/rasterize/polygon/FractionalRasterizer.scala @@ -30,7 +30,7 @@ object FractionalRasterizer { private type Segment = (Double, Double, Double, Double) - private def polygonToEdges(poly: Polygon, re: RasterExtent): Seq[Segment] = { + private def polygonToEdges(poly: Polygon, re: RasterExtent): mutable.ArrayBuffer[Segment] = { val arrayBuffer = mutable.ArrayBuffer.empty[Segment] diff --git a/raster/src/main/scala/geotrellis/raster/rasterize/polygon/PolygonRasterizer.scala b/raster/src/main/scala/geotrellis/raster/rasterize/polygon/PolygonRasterizer.scala index df3281c705..5bda5390f4 100644 --- a/raster/src/main/scala/geotrellis/raster/rasterize/polygon/PolygonRasterizer.scala +++ b/raster/src/main/scala/geotrellis/raster/rasterize/polygon/PolygonRasterizer.scala @@ -72,7 +72,7 @@ object PolygonRasterizer { private def intervalDifference(a : List[Interval], b: Interval) : List[Interval] = a.flatMap(intervalDifference(_,b)) - private def mergeIntervals(sortedIntervals : Seq[Interval]) : Array[Double] = { + private def mergeIntervals(sortedIntervals : mutable.ListBuffer[Interval]) : Array[Double] = { if (sortedIntervals.length > 0) { val head = sortedIntervals.head val stack = mutable.Stack(head._1, head._2) diff --git a/raster/src/main/scala/geotrellis/raster/render/ColorMap.scala b/raster/src/main/scala/geotrellis/raster/render/ColorMap.scala index 0db67cdb8f..452546d7d0 100644 --- a/raster/src/main/scala/geotrellis/raster/render/ColorMap.scala +++ b/raster/src/main/scala/geotrellis/raster/render/ColorMap.scala @@ -16,11 +16,11 @@ package geotrellis.raster.render -import geotrellis.raster._ import geotrellis.raster.histogram.Histogram import spire.syntax.cfor._ import spire.std.any._ import _root_.io.circe._ +import geotrellis.raster.{ArrayTile, IntCellType, Tile, d2i, i2d, isNoData} import scala.util.Try diff --git a/raster/src/main/scala/geotrellis/raster/render/JpgRenderMethods.scala b/raster/src/main/scala/geotrellis/raster/render/JpgRenderMethods.scala index ac5a8e8a32..27f1ceaf09 100644 --- a/raster/src/main/scala/geotrellis/raster/render/JpgRenderMethods.scala +++ b/raster/src/main/scala/geotrellis/raster/render/JpgRenderMethods.scala @@ -16,8 +16,8 @@ package geotrellis.raster.render -import geotrellis.raster._ -import geotrellis.raster.render.jpg._ +import geotrellis.raster.Tile +import geotrellis.raster.render.jpg.{JpgEncoder, Settings} import geotrellis.util.MethodExtensions diff --git a/spark/src/main/scala/geotrellis/spark/Implicits.scala b/spark/src/main/scala/geotrellis/spark/Implicits.scala index 3cf7a02658..a257b0dc85 100644 --- a/spark/src/main/scala/geotrellis/spark/Implicits.scala +++ b/spark/src/main/scala/geotrellis/spark/Implicits.scala @@ -35,39 +35,39 @@ import scala.reflect.ClassTag object Implicits extends Implicits trait Implicits - extends buffer.Implicits + extends geotrellis.spark.buffer.Implicits with CrsFormats with StoreCodecs - with clip.Implicits - with costdistance.Implicits - with crop.Implicits - with density.Implicits - with distance.Implicits - with equalization.Implicits - with filter.Implicits - with join.Implicits - with knn.Implicits - with mapalgebra.focal.hillshade.Implicits - with mapalgebra.focal.Implicits - with mapalgebra.Implicits - with mapalgebra.local.Implicits - with mapalgebra.local.temporal.Implicits - with mapalgebra.zonal.Implicits - with mask.Implicits - with matching.Implicits - with merge.Implicits - with partition.Implicits - with regrid.Implicits - with reproject.Implicits - with resample.Implicits - with rasterize.Implicits - with sigmoidal.Implicits - with split.Implicits - with stitch.Implicits - with summary.Implicits - with tiling.Implicits - with timeseries.Implicits - with viewshed.Implicits + with geotrellis.spark.clip.Implicits + with geotrellis.spark.costdistance.Implicits + with geotrellis.spark.crop.Implicits + with geotrellis.spark.density.Implicits + with geotrellis.spark.distance.Implicits + with geotrellis.spark.equalization.Implicits + with geotrellis.spark.filter.Implicits + with geotrellis.spark.join.Implicits + with geotrellis.spark.knn.Implicits + with geotrellis.spark.mapalgebra.focal.hillshade.Implicits + with geotrellis.spark.mapalgebra.focal.Implicits + with geotrellis.spark.mapalgebra.Implicits + with geotrellis.spark.mapalgebra.local.Implicits + with geotrellis.spark.mapalgebra.local.temporal.Implicits + with geotrellis.spark.mapalgebra.zonal.Implicits + with geotrellis.spark.mask.Implicits + with geotrellis.spark.matching.Implicits + with geotrellis.spark.merge.Implicits + with geotrellis.spark.partition.Implicits + with geotrellis.spark.regrid.Implicits + with geotrellis.spark.reproject.Implicits + with geotrellis.spark.resample.Implicits + with geotrellis.spark.rasterize.Implicits + with geotrellis.spark.sigmoidal.Implicits + with geotrellis.spark.split.Implicits + with geotrellis.spark.stitch.Implicits + with geotrellis.spark.summary.Implicits + with geotrellis.spark.tiling.Implicits + with geotrellis.spark.timeseries.Implicits + with geotrellis.spark.viewshed.Implicits with Serializable { /** Auto wrap a partitioner when something is requestion an Option[Partitioner]; diff --git a/spark/src/main/scala/geotrellis/spark/mapalgebra/local/temporal/package.scala b/spark/src/main/scala/geotrellis/spark/mapalgebra/local/temporal/package.scala index 3a7fddfeb8..d5d46bdd2e 100644 --- a/spark/src/main/scala/geotrellis/spark/mapalgebra/local/temporal/package.scala +++ b/spark/src/main/scala/geotrellis/spark/mapalgebra/local/temporal/package.scala @@ -16,17 +16,17 @@ package geotrellis.spark.mapalgebra.local -import geotrellis.layer._ -import geotrellis.raster._ +import geotrellis.layer.{SpatialComponent, TemporalComponent, SpatialKey, TemporalKey} import geotrellis.layer.mapalgebra.local.temporal.LocalTemporalStatistics +import geotrellis.raster.Tile import geotrellis.util._ import org.apache.spark.Partitioner import org.apache.spark.rdd.RDD import jp.ne.opt.chronoscala.Imports._ -import java.time._ +import java.time._ import scala.reflect.ClassTag diff --git a/spark/src/main/scala/geotrellis/spark/package.scala b/spark/src/main/scala/geotrellis/spark/package.scala index 73dd8b0a0f..00a241b198 100644 --- a/spark/src/main/scala/geotrellis/spark/package.scala +++ b/spark/src/main/scala/geotrellis/spark/package.scala @@ -17,8 +17,7 @@ package geotrellis import geotrellis.layer.{Metadata, TileLayerMetadata} -import geotrellis.raster._ -import geotrellis.layer._ +import geotrellis.raster.{Raster, Tile, MultibandTile} import org.apache.spark.rdd._ package object spark extends Implicits { diff --git a/store/src/main/scala-2.13/compact/FS2Utils.scala b/store/src/main/scala-2.13/compact/FS2Utils.scala new file mode 100644 index 0000000000..293c3b45ef --- /dev/null +++ b/store/src/main/scala-2.13/compact/FS2Utils.scala @@ -0,0 +1,21 @@ +/* + * Copyright 2019 Azavea + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package geotrellis.store.compact + +object FS2Utils { + def fromIterator[F[_]] = fs2.Stream.fromIterator[F] +} diff --git a/store/src/main/scala/geotrellis/store/AttributeStore.scala b/store/src/main/scala/geotrellis/store/AttributeStore.scala index fa03a6d63e..079d75f4fc 100644 --- a/store/src/main/scala/geotrellis/store/AttributeStore.scala +++ b/store/src/main/scala/geotrellis/store/AttributeStore.scala @@ -56,7 +56,7 @@ trait AttributeStore extends AttributeCaching with LayerAttributeStore { * This function should be re-implemented by AttributeStore subclasses so that * catalogs with large numbers of layers can be queried efficiently. */ - def layersWithZoomLevels: Map[String, Seq[Int]] = layerIds.groupBy(_.name).mapValues(_.map(_.zoom)) + def layersWithZoomLevels: Map[String, Seq[Int]] = layerIds.groupBy(_.name).mapValues(_.map(_.zoom)).toMap /** Return a sequence of available zoom levels for a named layer. * diff --git a/store/src/main/scala/geotrellis/store/hadoop/formats/FilterMapFileInputFormat.scala b/store/src/main/scala/geotrellis/store/hadoop/formats/FilterMapFileInputFormat.scala index c5d788b2fc..91970232ae 100644 --- a/store/src/main/scala/geotrellis/store/hadoop/formats/FilterMapFileInputFormat.scala +++ b/store/src/main/scala/geotrellis/store/hadoop/formats/FilterMapFileInputFormat.scala @@ -115,8 +115,8 @@ class FilterMapFileInputFormat() extends FileInputFormat[BigIntWritable, BytesWr val possibleMatches = FilterMapFileInputFormat - .mapFileRanges(dataFileStatus.asScala.map(_.getPath.getParent), conf) - .filter { case (file, iMin, iMax) => + .mapFileRanges(dataFileStatus.asScala.map(_.getPath.getParent).toSeq, conf) + .filter { case (_, iMin: BigInt, iMax: BigInt) => // both file ranges and query ranges are sorted, use in-sync traversal while (it.hasNext && it.head._2 < iMin) it.next if (it.hasNext) iMin <= it.head._2 && (iMax == -1 || it.head._1 <= iMax) diff --git a/vector/src/main/scala/geotrellis/vector/SpatialIndex.scala b/vector/src/main/scala/geotrellis/vector/SpatialIndex.scala index 377c560fb9..904e580f46 100644 --- a/vector/src/main/scala/geotrellis/vector/SpatialIndex.scala +++ b/vector/src/main/scala/geotrellis/vector/SpatialIndex.scala @@ -100,7 +100,7 @@ class SpatialIndex[T](val measure: Measure = Measure.Euclidean) extends Serializ } def pointsInExtent(extent: Extent): Vector[T] = - pointsInExtentAsIterable(extent).to[Vector] + pointsInExtentAsIterable(extent).toVector def pointsInExtentAsJavaList(extent: Extent): java.util.List[T] = rtree.query(new Envelope(extent.xmin, extent.xmax, extent.ymin, extent.ymax)).asInstanceOf[java.util.List[T]] diff --git a/vector/src/main/scala/geotrellis/vector/triangulation/DelaunayTriangulation.scala b/vector/src/main/scala/geotrellis/vector/triangulation/DelaunayTriangulation.scala index fda9a46878..6b81a1d031 100644 --- a/vector/src/main/scala/geotrellis/vector/triangulation/DelaunayTriangulation.scala +++ b/vector/src/main/scala/geotrellis/vector/triangulation/DelaunayTriangulation.scala @@ -501,7 +501,7 @@ case class DelaunayTriangulation( removeIncidentEdge(vi) } - private def removeVertexAndFill(vi: Int, tris: Map[(Int, Int, Int), HalfEdge[Int, Int]], bnd: Option[Int]): Seq[Int] = { + private def removeVertexAndFill(vi: Int, tris: Map[(Int, Int, Int), HalfEdge[Int, Int]], bnd: Option[Int]): ListBuffer[Int] = { val exteriorRing = ListBuffer.empty[Int] decoupleVertex(vi) diff --git a/vector/src/main/scala/geotrellis/vector/voronoi/VoronoiDiagram.scala b/vector/src/main/scala/geotrellis/vector/voronoi/VoronoiDiagram.scala index c4772c5fbe..ca293fc9a3 100644 --- a/vector/src/main/scala/geotrellis/vector/voronoi/VoronoiDiagram.scala +++ b/vector/src/main/scala/geotrellis/vector/voronoi/VoronoiDiagram.scala @@ -66,7 +66,7 @@ object VoronoiDiagram { private final val EPSILON = 1e-10 - private def cellBoundsNew(het: HalfEdgeTable, verts: Int => Coordinate, extent: Extent)(incidentEdge: Int): Seq[CellBound] = { + private def cellBoundsNew(het: HalfEdgeTable, verts: Int => Coordinate, extent: Extent)(incidentEdge: Int): ListBuffer[CellBound] = { import het._ var e = incidentEdge @@ -118,7 +118,7 @@ object VoronoiDiagram { l } - private def cellExtentIntersection(het: HalfEdgeTable, verts: Int => Coordinate, incidentEdge: Int)(cell: Seq[CellBound], extent: Extent) = { + private def cellExtentIntersection(het: HalfEdgeTable, verts: Int => Coordinate, incidentEdge: Int)(cell: ListBuffer[CellBound], extent: Extent) = { val Extent(xmin, ymin, xmax, ymax) = extent val expts = ListBuffer((xmin, ymin), (xmax, ymin), (xmax, ymax), (xmin, ymax)).map{ case (x, y) => new Coordinate(x, y) } @@ -185,7 +185,7 @@ object VoronoiDiagram { None } else { clippedCorners += clippedCorners.head - val poly = Polygon(clippedCorners.map(Point(_))) + val poly = Polygon(clippedCorners.map(Point(_)).toSeq) Some(poly) } } diff --git a/vectortile/src/main/scala/geotrellis/vectortile/internal/Command.scala b/vectortile/src/main/scala/geotrellis/vectortile/internal/Command.scala index d16574d039..aca6fa930b 100644 --- a/vectortile/src/main/scala/geotrellis/vectortile/internal/Command.scala +++ b/vectortile/src/main/scala/geotrellis/vectortile/internal/Command.scala @@ -68,7 +68,7 @@ private[vectortile] case object ClosePath extends Command /** Contains convenience functions for handling [[Command]]s. */ private[vectortile] object Command { /** Attempt to parse a list of Command/Parameter Integers. */ - def commands(cmds: Seq[Int]): ListBuffer[Command] = { + def commands(cmds: Seq[Int]): Seq[Command] = { @tailrec def work(cmds: Seq[Int], curr: ListBuffer[Command]): ListBuffer[Command] = cmds match { case Nil => curr case ns => (parseCmd(ns.head): @unchecked) match { @@ -102,7 +102,7 @@ private[vectortile] object Command { } } - work(cmds, new ListBuffer[Command]) + work(cmds, new ListBuffer[Command]).toSeq } /** Convert a list of parsed Commands back into their original Command diff --git a/vectortile/src/main/scala/geotrellis/vectortile/internal/package.scala b/vectortile/src/main/scala/geotrellis/vectortile/internal/package.scala index 72d1231ac0..9bc3a9ad16 100644 --- a/vectortile/src/main/scala/geotrellis/vectortile/internal/package.scala +++ b/vectortile/src/main/scala/geotrellis/vectortile/internal/package.scala @@ -199,7 +199,7 @@ package object internal { ): Seq[Command] = { def work(lines: Array[LineString]): Seq[Command] = { var curs: (Int, Int) = (0, 0) - var buff = new ListBuffer[Command] + val buff = new ListBuffer[Command] lines.foreach({l => val diffs: Array[(Int, Int)] = collapse( @@ -313,7 +313,7 @@ package object internal { buff.appendAll(Seq(MoveTo(Array(diffs.head)), LineTo(diffs.tail), ClosePath)) }) - buff + buff.toSeq } poly match {