forked from hbutani/spark-druid-olap
-
Notifications
You must be signed in to change notification settings - Fork 0
/
build.sbt
68 lines (45 loc) · 2.22 KB
/
build.sbt
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
name := "spark-druid-olap"
version := "0.0.1"
organization := "SparklineData"
scalaVersion := "2.10.4"
parallelExecution in Test := false
crossScalaVersions := Seq("2.10.4", "2.11.6")
sparkVersion := "1.4.0"
spName := "SparklineData/spark-druid-olap"
//spAppendScalaVersion := true
scalacOptions += "-feature"
// All Spark Packages need a license
licenses := Seq("Apache-2.0" -> url("http://opensource.org/licenses/Apache-2.0"))
// Add Spark components this package depends on, e.g, "mllib", ....
sparkComponents ++= Seq("sql")
credentials += Credentials(Path.userHome / ".github.cred")
// uncomment and change the value below to change the directory where your zip artifact will be created
// spDistDirectory := target.value
// add any Spark Package dependencies using spDependencies.
// e.g. spDependencies += "databricks/spark-avro:0.1"
resolvers ++= Seq(
"JitPack.IO" at "https://jitpack.io",
Resolver.sonatypeRepo("public")
)
val httpclientVersion = "4.5"
val json4sVersion = "3.2.10"
val scalatestVersion = "2.2.4"
val sparkdateTimeVersion = "9f6589a01fdd50250a8e155d5710490e1dd353b3"
val scoptVersion = "3.3.0"
libraryDependencies ++= Seq(
"org.apache.httpcomponents" % "httpclient" % httpclientVersion,
//"org.json4s" %% "json4s-native" % json4sVersion,
"org.json4s" %% "json4s-ext" % json4sVersion,
"com.github.SparklineData" % "spark-datetime" % sparkdateTimeVersion,
"com.github.scopt" %% "scopt" % scoptVersion,
"org.scalatest" %% "scalatest" % scalatestVersion % "test",
"com.databricks" %% "spark-csv" % "1.1.0" % "test"
)
assemblyOption in assembly := (assemblyOption in assembly).value.copy(includeScala = false)
test in assembly := {}
spShortDescription := "Spark Druid Package" // Your one line description of your package
spDescription := """Spark-Druid package enables'Logical Plans' written against a raw event dataset
to be rewritten to take advantage of a Drud Index of the Event data. It
comprises of a 'Druid DataSource' that wraps the 'raw event dataset', and a
'Druid Planner' that contains a set of Rewrite Rules to convert
'Project-Filter-Aggregation-Having-Sort-Limit' plans to Druid Index Rest calls.""".stripMargin