Skip to content

Commit

Permalink
fix pyspark 2.2.0 missing
Browse files Browse the repository at this point in the history
  • Loading branch information
allwefantasy committed Nov 29, 2018
1 parent 051bf79 commit c909117
Showing 1 changed file with 12 additions and 4 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -9,13 +9,13 @@ import net.sf.json.JSONArray
import org.apache.http.client.fluent.{Form, Request}
import org.apache.spark.SparkCoreVersion
import org.apache.spark.streaming.BasicSparkOperation
import streaming.common.ScalaMethodMacros._
import streaming.common.shell.ShellCommand
import streaming.core.strategy.platform.SparkRuntime
import streaming.core.{BasicMLSQLConfig, SpecFunctions}
import streaming.dsl.ScriptSQLExec
import streaming.dsl.template.TemplateMerge
import streaming.test.pythonalg.code.ScriptCode
import streaming.common.ScalaMethodMacros._
import streaming.common.shell.ShellCommand

import scala.io.Source

Expand All @@ -35,6 +35,12 @@ class PythonMLSpec2 extends BasicSparkOperation with SpecFunctions with BasicMLS
getHome + "examples/" + name
}

def getPysparkVersion = {
val version = SparkCoreVersion.exactVersion
if (version == "2.2.0") "2.2.1"
else version
}

"SQLPythonAlgTrain" should "work fine" in {
withBatchContext(setupBatchContext(batchParamsWithAPI, "classpath:///test/empty.json")) { runtime: SparkRuntime =>
//执行sql
Expand All @@ -48,7 +54,8 @@ class PythonMLSpec2 extends BasicSparkOperation with SpecFunctions with BasicMLS
var newpath = s"/tmp/${UUID.randomUUID().toString}"
ShellCommand.execCmd(s"cp -r ${projectPath} $newpath")

val newcondafile = TemplateMerge.merge(Source.fromFile(new File(newpath + "/conda.yaml")).getLines().mkString("\n"), Map("SPARK_VERSION" -> SparkCoreVersion.exactVersion))
val newcondafile = TemplateMerge.merge(Source.fromFile(new File(newpath + "/conda.yaml")).getLines().mkString("\n"),
Map("SPARK_VERSION" -> getPysparkVersion))
Files.write(newcondafile, new File(newpath + "/conda.yaml"), Charset.forName("utf-8"))

projectPath = newpath
Expand Down Expand Up @@ -105,7 +112,8 @@ class PythonMLSpec2 extends BasicSparkOperation with SpecFunctions with BasicMLS
var newpath = s"/tmp/${UUID.randomUUID().toString}"
ShellCommand.execCmd(s"cp -r ${projectPath} $newpath")

val newcondafile = TemplateMerge.merge(Source.fromFile(new File(newpath + "/conda.yaml")).getLines().mkString("\n"), Map("SPARK_VERSION" -> SparkCoreVersion.exactVersion))
val newcondafile = TemplateMerge.merge(Source.fromFile(new File(newpath + "/conda.yaml")).getLines().mkString("\n"),
Map("SPARK_VERSION" -> getPysparkVersion))
Files.write(newcondafile, new File(newpath + "/conda.yaml"), Charset.forName("utf-8"))

projectPath = newpath
Expand Down

0 comments on commit c909117

Please sign in to comment.