Skip to content

Commit

Permalink
Add Scala 2.13 crosscompilation
Browse files Browse the repository at this point in the history
  • Loading branch information
pomadchin committed Apr 8, 2021
1 parent 04748ff commit c1d54b8
Show file tree
Hide file tree
Showing 33 changed files with 297 additions and 270 deletions.
10 changes: 5 additions & 5 deletions .circleci/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -159,7 +159,7 @@ workflows:
- common:
matrix:
parameters:
scala-version: ["2.12.13"]
scala-version: ["2.12.13", "2.13.5"]
test-set: ["1", "2"]
filters:
branches:
Expand All @@ -170,7 +170,7 @@ workflows:
- cassandra:
matrix:
parameters:
scala-version: ["2.12.13"]
scala-version: ["2.12.13", "2.13.5"]
filters:
branches:
only: /.*/
Expand All @@ -180,7 +180,7 @@ workflows:
- s3:
matrix:
parameters:
scala-version: ["2.12.13"]
scala-version: ["2.12.13", "2.13.5"]
filters:
branches:
only: /.*/
Expand All @@ -190,7 +190,7 @@ workflows:
- hbase:
matrix:
parameters:
scala-version: ["2.12.13"]
scala-version: ["2.12.13", "2.13.5"]
filters:
branches:
only: /.*/
Expand All @@ -213,7 +213,7 @@ workflows:
- hbase
matrix:
parameters:
scala-version: ["2.12.13"]
scala-version: ["2.12.13", "2.13.5"]
filters:
branches:
only: master
Expand Down
2 changes: 1 addition & 1 deletion build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ import sbt.Keys._

ThisBuild / scalaVersion := "2.12.13"
ThisBuild / organization := "org.locationtech.geotrellis"
ThisBuild / crossScalaVersions := List("2.12.13")
ThisBuild / crossScalaVersions := List("2.12.13", "2.13.5")

lazy val root = Project("geotrellis", file("."))
.aggregate(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -105,8 +105,11 @@ object ClipToGridExamples {
}
}
}
.reduceByKey { case (Feature(poly1, (accum1, count1)), Feature(poly2, (accum2, count2))) =>
Feature(poly1, (accum1 + accum2, count1 + count2))
.reduceByKey { (l, r) =>
(l, r) match {
case (Feature(poly1, (accum1, count1)), Feature(poly2, (accum2, count2))) =>
Feature(poly1, (accum1 + accum2, count1 + count2))
}
}
.map { case (_, feature) =>
// We no longer need the UUID; also compute the mean
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -93,8 +93,8 @@ object LandsatMultibandRDDExample {
// Round the center coordinates in case there's any floating point errors
val center =
(
BigDecimal(x).setScale(5, RoundingMode.HALF_UP).doubleValue(),
BigDecimal(y).setScale(5, RoundingMode.HALF_UP).doubleValue()
BigDecimal(x).setScale(5, RoundingMode.HALF_UP).doubleValue,
BigDecimal(y).setScale(5, RoundingMode.HALF_UP).doubleValue
)

// Get the scene ID from the path
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,6 @@ import geotrellis.spark._
import geotrellis.spark.testkit._
import geotrellis.spark.testkit.TestEnvironment

import org.joda.time._

import org.scalatest.funsuite.AnyFunSuite
import org.scalatest.matchers.should.Matchers

Expand Down
14 changes: 8 additions & 6 deletions project/Dependencies.scala
Original file line number Diff line number Diff line change
Expand Up @@ -34,10 +34,10 @@ object Version {
import sbt.Keys._

object Dependencies {
private def ver(for212: String, for213: Option[String] = None) = Def.setting {
private def ver(for212: String, for213: String) = Def.setting {
CrossVersion.partialVersion(scalaVersion.value) match {
case Some((2, 12)) => for212
case Some((2, 13)) if for213.nonEmpty => for213.get
case Some((2, 13)) => for213
case _ => sys.error("not good")
}
}
Expand All @@ -61,10 +61,12 @@ object Dependencies {
"co.fs2" %% s"fs2-$module" % "2.5.3"
}

def apacheSpark(module: String) = Def.setting {
"org.apache.spark" %% s"spark-$module" % ver("3.0.1", "3.2.0-SNAPSHOT").value
}

def scalaReflect(version: String) = "org.scala-lang" % "scala-reflect" % version

val sparkCore = "org.apache.spark" %% "spark-core" % Version.spark
val sparkSql = "org.apache.spark" %% "spark-sql" % Version.spark
val pureconfig = "com.github.pureconfig" %% "pureconfig" % "0.14.0"
val log4s = "org.log4s" %% "log4s" % "1.9.0"
val scalatest = "org.scalatest" %% "scalatest" % "3.2.5"
Expand All @@ -73,8 +75,8 @@ object Dependencies {
val jts = "org.locationtech.jts" % "jts-core" % "1.17.1"
val proj4j = "org.locationtech.proj4j" % "proj4j" % "1.1.1"
val openCSV = "com.opencsv" % "opencsv" % "5.3"
val spire = "org.spire-math" %% "spire" % Version.spire
val spireMacro = "org.spire-math" %% "spire-macros" % Version.spire
val spire = "org.typelevel" %% "spire" % Version.spire
val spireMacro = "org.typelevel" %% "spire-macros" % Version.spire
val apacheIO = "commons-io" % "commons-io" % "2.8.0"
val apacheLang3 = "org.apache.commons" % "commons-lang3" % "3.12.0"
val apacheMath = "org.apache.commons" % "commons-math3" % "3.6.1"
Expand Down
63 changes: 40 additions & 23 deletions project/Settings.scala
Original file line number Diff line number Diff line change
Expand Up @@ -29,14 +29,15 @@ import java.io.File

object Settings {
object Repositories {
val apacheSnapshots = "apache-snapshots" at "https://repository.apache.org/content/repositories/snapshots/"
val eclipseReleases = "eclipse-releases" at "https://repo.eclipse.org/content/groups/releases"
val osgeoReleases = "osgeo-releases" at "https://repo.osgeo.org/repository/release/"
val geosolutions = "geosolutions" at "https://maven.geo-solutions.it/"
val ivy2Local = Resolver.file("local", file(Path.userHome.absolutePath + "/.ivy2/local"))(Resolver.ivyStylePatterns)
val mavenLocal = Resolver.mavenLocal
val maven = DefaultMavenRepository
val local = Seq(ivy2Local, mavenLocal)
val external = Seq(osgeoReleases, maven, eclipseReleases, geosolutions)
val external = Seq(osgeoReleases, maven, eclipseReleases, geosolutions, apacheSnapshots)
val all = external ++ local
}

Expand All @@ -56,7 +57,7 @@ object Settings {
"-language:existentials",
"-language:experimental.macros",
"-feature",
"-Ypartial-unification", // required by Cats
// "-Ypartial-unification", // required by Cats
// "-Yrangepos", // required by SemanticDB compiler plugin
// "-Ywarn-unused-import", // required by `RemoveUnused` rule
"-target:jvm-1.8")
Expand Down Expand Up @@ -93,9 +94,25 @@ object Settings {
).filter(_.asFile.canRead).map(Credentials(_)),

addCompilerPlugin("org.typelevel" %% "kind-projector" % "0.11.3" cross CrossVersion.full),
addCompilerPlugin("org.scalamacros" %% "paradise" % "2.1.1" cross CrossVersion.full),
addCompilerPlugin("org.scalameta" % "semanticdb-scalac" % "4.4.10" cross CrossVersion.full),

libraryDependencies ++= (CrossVersion.partialVersion(scalaVersion.value) match {
case Some((2, 13)) => Seq.empty
case Some((2, 12)) => Seq(
compilerPlugin("org.scalamacros" % "paradise" % "2.1.1" cross CrossVersion.full),
"org.scala-lang.modules" %% "scala-collection-compat" % "2.4.2"
)
case x => sys.error(s"Encountered unsupported Scala version ${x.getOrElse("undefined")}")
}),
Compile / scalacOptions ++= (CrossVersion.partialVersion(scalaVersion.value) match {
case Some((2, 13)) => Seq(
"-Ymacro-annotations" // replaces paradise in 2.13
// "-Wconf:cat=deprecation&msg=Auto-application:silent" // there are many of these, silence until fixed
)
case Some((2, 12)) => Seq("-Ypartial-unification") // required by Cats
case x => sys.error(s"Encountered unsupported Scala version ${x.getOrElse("undefined")}")
}),

pomExtra := (
<developers>
<developer>
Expand Down Expand Up @@ -149,8 +166,8 @@ object Settings {
exclude("org.jboss.netty", "netty")
exclude("org.apache.hadoop", "hadoop-client"),
hadoopClient % Provided,
sparkCore % Provided,
sparkSql % Test,
apacheSpark("core").value % Provided,
apacheSpark("sql").value % Test,
scalatest % Test
),
console / initialCommands :=
Expand Down Expand Up @@ -203,8 +220,8 @@ object Settings {
ExclusionRule("org.slf4j"), ExclusionRule("com.typesafe.akka")
) exclude("org.apache.hadoop", "hadoop-client"),
hadoopClient % Provided,
sparkCore % Provided,
sparkSql % Test,
apacheSpark("core").value % Provided,
apacheSpark("sql").value % Test,
scalatest % Test
),
console / initialCommands :=
Expand All @@ -225,9 +242,9 @@ object Settings {
name := "geotrellis-doc-examples",
scalacOptions ++= commonScalacOptions,
libraryDependencies ++= Seq(
sparkCore,
apacheSpark("core").value,
scalatest % Test,
sparkSql % Test
apacheSpark("sql").value % Test
)
)

Expand Down Expand Up @@ -320,8 +337,8 @@ object Settings {
scalaArm,
kryoSerializers exclude("com.esotericsoftware", "kryo"),
kryoShaded,
sparkCore % Provided,
sparkSql % Test,
apacheSpark("core").value % Provided,
apacheSpark("sql").value % Test,
scalatest % Test
),
assembly / assemblyMergeStrategy := {
Expand Down Expand Up @@ -378,8 +395,8 @@ object Settings {
name := "geotrellis-hbase-spark",
libraryDependencies ++= Seq(
hadoopClient % Provided,
sparkCore % Provided,
sparkSql % Test,
apacheSpark("core").value % Provided,
apacheSpark("sql").value % Test,
scalatest % Test
),
console / initialCommands :=
Expand Down Expand Up @@ -499,8 +516,8 @@ object Settings {
name := "geotrellis-s3-spark",
libraryDependencies ++= Seq(
hadoopClient % Provided,
sparkCore % Provided,
sparkSql % Test,
apacheSpark("core").value % Provided,
apacheSpark("sql").value % Test,
scalatest % Test
),
mimaPreviousArtifacts := Set(
Expand Down Expand Up @@ -534,9 +551,9 @@ object Settings {
lazy val spark = Seq(
name := "geotrellis-spark",
libraryDependencies ++= Seq(
sparkCore % Provided,
apacheSpark("core").value % Provided,
hadoopClient % Provided,
sparkSql % Test,
apacheSpark("sql").value % Test,
scalatest % Test
),
mimaPreviousArtifacts := Set(
Expand All @@ -559,8 +576,8 @@ object Settings {
libraryDependencies ++= Seq(
circe("generic-extras").value,
hadoopClient % Provided,
sparkCore % Provided,
sparkSql % Test,
apacheSpark("core").value % Provided,
apacheSpark("sql").value % Test,
scalatest % Test
),
assembly / test := {},
Expand Down Expand Up @@ -589,8 +606,8 @@ object Settings {
name := "geotrellis-spark-testkit",
libraryDependencies ++= Seq(
hadoopClient % Provided,
sparkCore % Provided,
sparkSql % Provided,
apacheSpark("core").value % Provided,
apacheSpark("sql").value % Provided,
scalatest
)
) ++ commonSettings
Expand Down Expand Up @@ -694,8 +711,8 @@ object Settings {
libraryDependencies ++= Seq(
gdalWarp,
hadoopClient % Provided,
sparkCore % Provided,
sparkSql % Test,
apacheSpark("core").value % Provided,
apacheSpark("sql").value % Test,
scalatest % Test
),
Test / fork := true,
Expand Down
4 changes: 4 additions & 0 deletions publish/publish-to-sonatype-213.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
#!/bin/bash
# Publish to sonatype for all supported scala version 2.12

./sbt -213 publishSigned -no-colors -J-Drelease=sonatype
2 changes: 1 addition & 1 deletion publish/publish-to-sonatype.sh
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
#!/bin/bash

./publish/publish-to-sonatype-212.sh
./publish/publish-to-sonatype-212.sh # && ./publish/publish-to-sonatype-213.sh
Original file line number Diff line number Diff line change
Expand Up @@ -368,7 +368,10 @@ object IndexedColorMap {
)
/** Converts a ColorMap to sequence of short triplets in encoding expected by GeoTiff 'Palette' color space.*/
def toTiffPalette(cm: ColorMap): Seq[(Short, Short, Short)] =
fromColorMap(cm).colors.map(c => (upsample(c.red), upsample(c.green), upsample(c.blue)))
fromColorMap(cm).colors.map { c =>
val rgba = RGBA(c)
(upsample(rgba.red), upsample(rgba.green), upsample(rgba.blue))
}

/** Flattens the given colormap into an indexed variant, throwing away any defined boundaries. */
def fromColorMap(cm: ColorMap) = new IndexedColorMap(cm.colors)
Expand Down
24 changes: 12 additions & 12 deletions raster/src/main/scala/geotrellis/raster/render/ColorRamp.scala
Original file line number Diff line number Diff line change
Expand Up @@ -38,14 +38,14 @@ class ColorRamp(val colors: Vector[Int]) extends Serializable {
* gradient (default 255)
*/
def setAlphaGradient(start: Int = 0, stop: Int = 0xFF): ColorRamp = {
val alphas = ColorRamp.chooseColors(Vector(start, stop), colors.length).map(_.alpha)
val alphas = ColorRamp.chooseColors(Vector(start, stop), colors.length).map(RGBA(_).alpha)

val newColors =
colors
.zip(alphas)
.map { case (color, a) =>
val (r, g, b) = color.unzipRGB
RGBA(r, g, b, a).int
val (r, g, b) = RGBA(color).unzipRGB
RGBA.fromRGBA(r, g, b, a).int
}

ColorRamp(newColors)
Expand All @@ -56,8 +56,8 @@ class ColorRamp(val colors: Vector[Int]) extends Serializable {
val newColors =
colors
.map { color =>
val (r, g, b) = color.unzipRGB
RGBA(r, g, b, a).int
val (r, g, b) = RGBA(color).unzipRGB
RGBA.fromRGBA(r, g, b, a).int
}

ColorRamp(newColors)
Expand All @@ -68,8 +68,8 @@ class ColorRamp(val colors: Vector[Int]) extends Serializable {
val newColors =
colors
.map { color =>
val (r, g, b) = color.unzipRGB
RGBA(r, g, b, alphaPct).int
val (r, g, b) = RGBA(color).unzipRGB
RGBA.fromRGBA(r, g, b, alphaPct).int
}

ColorRamp(newColors)
Expand Down Expand Up @@ -206,10 +206,10 @@ object ColorRamp {
private def getColorSequence(n: Int)(getRanges: (Int => Int) => Array[Int]): Vector[Int] = n match {
case n if n < 1 => Vector.empty[Int]
case _ => {
val unzipR = { color: Int => color.red }
val unzipG = { color: Int => color.green }
val unzipB = { color: Int => color.blue }
val unzipA = { color: Int => color.alpha }
val unzipR = { color: Int => RGBA(color).red }
val unzipG = { color: Int => RGBA(color).green }
val unzipB = { color: Int => RGBA(color).blue }
val unzipA = { color: Int => RGBA(color).alpha }
val rs = getRanges(unzipR)
val gs = getRanges(unzipG)
val bs = getRanges(unzipB)
Expand All @@ -218,7 +218,7 @@ object ColorRamp {
val theColors = new Array[Int](n)
var i = 0
while (i < n) {
theColors(i) = RGBA(rs(i), gs(i), bs(i), as(i))
theColors(i) = RGBA.fromRGBA(rs(i), gs(i), bs(i), as(i)).int
i += 1
}
theColors.toVector
Expand Down
Loading

0 comments on commit c1d54b8

Please sign in to comment.