Skip to content

Commit

Permalink
Adjust Raster and Spark tests
Browse files Browse the repository at this point in the history
  • Loading branch information
pomadchin committed Apr 8, 2021
1 parent c1d54b8 commit 0c91881
Show file tree
Hide file tree
Showing 6 changed files with 18 additions and 15 deletions.
15 changes: 9 additions & 6 deletions project/Settings.scala
Original file line number Diff line number Diff line change
Expand Up @@ -97,18 +97,15 @@ object Settings {
addCompilerPlugin("org.scalameta" % "semanticdb-scalac" % "4.4.10" cross CrossVersion.full),

libraryDependencies ++= (CrossVersion.partialVersion(scalaVersion.value) match {
case Some((2, 13)) => Seq.empty
case Some((2, 13)) => Nil
case Some((2, 12)) => Seq(
compilerPlugin("org.scalamacros" % "paradise" % "2.1.1" cross CrossVersion.full),
"org.scala-lang.modules" %% "scala-collection-compat" % "2.4.2"
)
case x => sys.error(s"Encountered unsupported Scala version ${x.getOrElse("undefined")}")
}),
Compile / scalacOptions ++= (CrossVersion.partialVersion(scalaVersion.value) match {
case Some((2, 13)) => Seq(
"-Ymacro-annotations" // replaces paradise in 2.13
// "-Wconf:cat=deprecation&msg=Auto-application:silent" // there are many of these, silence until fixed
)
case Some((2, 13)) => Seq("-Ymacro-annotations") // replaces paradise in 2.13
case Some((2, 12)) => Seq("-Ypartial-unification") // required by Cats
case x => sys.error(s"Encountered unsupported Scala version ${x.getOrElse("undefined")}")
}),
Expand Down Expand Up @@ -141,6 +138,12 @@ object Settings {
)
)

lazy val sparkCompatDependencies = Def.setting { CrossVersion.partialVersion(scalaVersion.value) match {
case Some((2, 13)) => Seq("org.scala-lang.modules" %% "scala-parallel-collections" % "0.2.0") // spark uses it as a par collections compat
case Some((2, 12)) => Nil
case x => sys.error(s"Encountered unsupported Scala version ${x.getOrElse("undefined")}")
} }

lazy val accumulo = Seq(
name := "geotrellis-accumulo",
libraryDependencies ++= Seq(
Expand Down Expand Up @@ -555,7 +558,7 @@ object Settings {
hadoopClient % Provided,
apacheSpark("sql").value % Test,
scalatest % Test
),
) ++ sparkCompatDependencies.value,
mimaPreviousArtifacts := Set(
"org.locationtech.geotrellis" %% "geotrellis-spark" % Version.previousVersion
),
Expand Down
2 changes: 1 addition & 1 deletion publish/publish-to-sonatype-213.sh
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
#!/bin/bash
# Publish to sonatype for all supported scala version 2.12
# Publish to sonatype for all supported scala version 2.13

./sbt -213 publishSigned -no-colors -J-Drelease=sonatype
Original file line number Diff line number Diff line change
Expand Up @@ -73,9 +73,9 @@ class ZonalHistogramSpec extends AnyFunSpec
}

val expected =
zoneValues.toMap.mapValues { list =>
list.distinct
.map { v => (v, list.filter(_ == v).length) }
zoneValues.toMap.map { case (k, list) =>
k -> list.distinct
.map { v => (v, list.count(_ == v)) }
.toMap
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,9 +56,9 @@ class ZonalStatisticsSpec extends AnyFunSpec with Matchers with RasterMatchers w
}

val expected =
zoneValues.toMap.mapValues { list =>
list.distinct
.map { v => (v, list.filter(_ == v).length) }
zoneValues.toMap.map { case (k, list) =>
k -> list.distinct
.map { v => (v, list.count(_ == v)) }
.toMap
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -141,7 +141,7 @@ class ColorSpec extends AnyFunSpec with Matchers {
describe("RGBA value class") {
it("should be able to create RGB values") {
// an RGB constructor should create an RGBA with a fully opaque A
RGB(1, 2, 3) should be (RGBA.fromRGBA(1, 2, 3, 255))
RGB(1, 2, 3) should be (RGBA.fromRGBA(1, 2, 3, 255).int)

// we need to be able to convert from RGBA to ARGB for the current jpg writer implementation
RGB(1, 2, 3) should be (RGBA.fromRGBA(2, 3, 255, 1).toARGB)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,6 @@ abstract class CombineMethods[K: ClassTag, V: ClassTag] extends MethodExtensions
val union = self.sparkContext.union(self :: others.toList)
partitioner
.fold(union.groupByKey(Partitioner.defaultPartitioner(self, others.toSeq: _*)))(union.groupByKey(_))
.mapValues { case tiles => f(tiles) }
.mapValues { tiles => f(tiles) }
}
}

0 comments on commit 0c91881

Please sign in to comment.