Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Support for Spark 2.4.3 #238

Open
wants to merge 5 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 4 additions & 4 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,12 +8,12 @@ cache:
matrix:
include:
- jdk: oraclejdk8
scala: 2.11.8
scala: 2.11.12
env: TEST_HADOOP_VERSION="2.7.3" TEST_SPARK_VERSION="2.3.1"
- jdk: oraclejdk8
scala: 2.11.8
env: TEST_HADOOP_VERSION="2.2.0" TEST_SPARK_VERSION="2.1.0"
scala: 2.11.12
env: TEST_HADOOP_VERSION="2.7.7" TEST_SPARK_VERSION="2.4.3"
script:
- sbt -Dhadoop.testVersion=$TEST_HADOOP_VERSION -Dspark.testVersion=$TEST_SPARK_VERSION ++$TRAVIS_SCALA_VERSION coverage test
- sbt -Dhadoop.testVersion=$TEST_HADOOP_VERSION -Dspark.testVersion=$TEST_SPARK_VERSION ++$TRAVIS_SCALA_VERSION clean coverage test coverageReport
after_success:
- bash <(curl -s https://codecov.io/bash)
31 changes: 21 additions & 10 deletions build.sbt
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,11 @@ version := "1.0.6-SNAPSHOT"

organization := "harsha2010"

scalaVersion := "2.11.8"
scalaVersion := "2.11.12"

crossScalaVersions := Seq("2.11.8")
crossScalaVersions := Seq("2.11.12")

sparkVersion := "2.3.1"
sparkVersion := "2.4.3"

scalacOptions += "-optimize"

Expand All @@ -18,20 +18,30 @@ testSparkVersion := sys.props.get("spark.testVersion").getOrElse(sparkVersion.va

val testHadoopVersion = settingKey[String]("The version of Hadoop to test against.")

testHadoopVersion := sys.props.getOrElse("hadoop.testVersion", "2.7.3")
testHadoopVersion := sys.props.getOrElse("hadoop.testVersion", "2.7.7")

sparkComponents := Seq("core", "sql")

libraryDependencies ++= Seq(
"commons-io" % "commons-io" % "2.4",
"com.google.guava" % "guava" % "14.0.1" % "provided",
"commons-io" % "commons-io" % "2.6",
"com.google.guava" % "guava" % "27.1-jre",
"org.slf4j" % "slf4j-api" % "1.7.16" % "provided",
"com.lihaoyi" % "fastparse_2.11" % "0.4.3" % "provided",
"org.scalatest" %% "scalatest" % "2.2.1" % "test",
"com.lihaoyi" %% "fastparse" % "0.4.4" % "provided",
"org.scalatest" %% "scalatest" % "3.0.5" % "test",
"com.vividsolutions" % "jts" % "1.13" % "test",
"com.esri.geometry" % "esri-geometry-api" % "1.2.1"
"com.esri.geometry" % "esri-geometry-api" % "1.2.1",
"com.fasterxml.jackson.core" % "jackson-core" % "2.9.8",
"com.fasterxml.jackson.core" % "jackson-databind" % "2.9.8",
"com.fasterxml.jackson.core" % "jackson-annotations" % "2.9.8",
"com.fasterxml.jackson.jaxrs" % "jackson-jaxrs-base" % "2.9.8",
"com.fasterxml.jackson.jaxrs" % "jackson-jaxrs-json-provider" % "2.9.8",
"com.fasterxml.jackson.module" % "jackson-module-paranamer" % "2.9.8",
"com.fasterxml.jackson.module" %% "jackson-module-scala" % "2.9.8"

)

libraryDependencies += "org.scala-lang" % "scala-compiler" % scalaVersion.value % "provided"

libraryDependencies ++= Seq(
"org.apache.hadoop" % "hadoop-client" % testHadoopVersion.value % "test",
"org.apache.spark" %% "spark-core" % testSparkVersion.value % "test" exclude("org.apache.hadoop", "hadoop-client"),
Expand Down Expand Up @@ -81,7 +91,8 @@ spName := "harsha2010/magellan"

parallelExecution in Test := false

ScoverageSbtPlugin.ScoverageKeys.coverageHighlighting := {

scoverage.ScoverageKeys.coverageHighlighting := {
if (scalaBinaryVersion.value == "2.10") false
else true
}
Expand Down
2 changes: 1 addition & 1 deletion project/assembly.sbt
Original file line number Diff line number Diff line change
@@ -1 +1 @@
addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.13.0")
addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.14.9")
2 changes: 1 addition & 1 deletion project/build.properties
Original file line number Diff line number Diff line change
Expand Up @@ -13,4 +13,4 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
sbt.version=0.13.6
sbt.version=0.13.18
4 changes: 1 addition & 3 deletions project/plugins.sbt
Original file line number Diff line number Diff line change
@@ -1,7 +1,5 @@
resolvers += "Spark Package Main Repo" at "https://dl.bintray.com/spark-packages/maven"

addSbtPlugin("com.typesafe.sbteclipse" % "sbteclipse-plugin" % "2.2.0")

addSbtPlugin("com.github.mpeltonen" % "sbt-idea" % "1.6.0")

addSbtPlugin("org.spark-packages" % "sbt-spark-package" % "0.2.2")
Expand All @@ -19,6 +17,6 @@ addSbtPlugin("com.eed3si9n" % "sbt-unidoc" % "0.3.3")

addSbtPlugin("com.jsuereth" % "sbt-pgp" % "1.0.0")

addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.1.0")
addSbtPlugin("org.scoverage" % "sbt-scoverage" % "1.5.1")

addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.1.7")
10 changes: 5 additions & 5 deletions src/main/scala/magellan/catalyst/SpatialJoin.scala
Original file line number Diff line number Diff line change
Expand Up @@ -114,19 +114,19 @@ private[magellan] case class SpatialJoin(session: SparkSession)
// isGenerated: java.lang.Boolean = false
val klass = Class.forName("org.apache.spark.sql.catalyst.expressions.AttributeReference")
val ctor = klass.getConstructors.apply(0)
val nullable = true.asInstanceOf[AnyRef]
val nullable = true
val metadata = Metadata.empty
val exprId = NamedExpression.newExprId
val qualifier = None
val qualifier = Seq.empty[String]
val isGenerated = false.asInstanceOf[AnyRef]
if (ctor.getParameterCount == 7) {
// prior to Spark 2.3
ctor.newInstance(name, dt, nullable, metadata, exprId, qualifier, isGenerated)
ctor.newInstance(name, dt, nullable.asInstanceOf[AnyRef], metadata, exprId, qualifier, isGenerated)
.asInstanceOf[Attribute]
} else {
// Spark 2.3 +
ctor.newInstance(name, dt, nullable, metadata, exprId, qualifier)
.asInstanceOf[Attribute]
AttributeReference(name, dt, nullable, metadata)(exprId, qualifier).asInstanceOf[Attribute]

}

}
Expand Down
4 changes: 2 additions & 2 deletions src/main/scala/magellan/mapreduce/ShapeInputFormat.scala
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ private[magellan] class ShapeInputFormat
job: JobContext,
splitInfos: scala.collection.Map[String, Array[Long]]) = {

val sw = new Stopwatch().start
val sw = Stopwatch.createUnstarted().start()
val splits = ListBuffer[InputSplit]()
val files = listStatus(job)
for (file <- files) {
Expand Down Expand Up @@ -77,7 +77,7 @@ private[magellan] class ShapeInputFormat
}
sw.stop
if (log.isDebugEnabled) {
log.debug("Total # of splits generated by getSplits: " + splits.size + ", TimeTaken: " + sw.elapsedMillis)
log.debug("Total # of splits generated by getSplits: " + splits.size + ", TimeTaken: " + sw.elapsed(java.util.concurrent.TimeUnit.MILLISECONDS))
}
splits
}
Expand Down