forked from nightscape/spark-excel
-
Notifications
You must be signed in to change notification settings - Fork 0
/
build.sc
156 lines (133 loc) · 5.71 KB
/
build.sc
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
import $ivy.`io.chris-kipp::mill-ci-release::0.1.10`
import io.kipp.mill.ci.release.CiReleaseModule
import coursier.maven.MavenRepository
import mill._, scalalib._, publish._
import Assembly._
trait SparkModule extends Cross.Module2[String, String] with SbtModule with CiReleaseModule {
outer =>
override def scalaVersion = crossValue
val sparkVersion = crossValue2
val Array(sparkMajor, sparkMinor, sparkPatch) = sparkVersion.split("\\.")
val sparkBinaryVersion = s"$sparkMajor.$sparkMinor"
override def millSourcePath = super.millSourcePath / os.up
object LowerOrEqual {
def unapply(otherVersion: String): Boolean = otherVersion match {
case s"${sparkMaj}.${sparkMin}.${sparkPat}" =>
sparkMaj == sparkMajor && (sparkMin < sparkMinor || (sparkMin == sparkMinor && sparkPat <= sparkPatch))
case s"${sparkMaj}.${sparkMin}" => sparkMaj == sparkMajor && sparkMin <= sparkMinor
case sparkMaj => sparkMaj == sparkMajor
}
}
object HigherOrEqual {
def unapply(otherVersion: String): Boolean = otherVersion match {
case s"${sparkMaj}.${sparkMin}.${sparkPat}" =>
sparkMaj == sparkMajor && (sparkMin > sparkMinor || (sparkMin == sparkMinor && sparkPat >= sparkPatch))
case s"${sparkMaj}.${sparkMin}" => sparkMaj == sparkMajor && sparkMin >= sparkMinor
case sparkMaj => sparkMaj == sparkMajor
}
}
def sparkVersionSpecificSources = T {
val versionSpecificDirs = os.list(os.pwd / "src" / "main")
val Array(sparkMajor, sparkMinor, sparkPatch) = sparkVersion.split("\\.")
val sparkBinaryVersion = s"$sparkMajor.$sparkMinor"
versionSpecificDirs.filter(_.last match {
case "scala" => true
case `sparkBinaryVersion` => true
case s"${LowerOrEqual()}_and_up" => true
case s"${LowerOrEqual()}_to_${HigherOrEqual()}" => true
case _ => false
})
}
override def sources = T.sources {
super.sources() ++ sparkVersionSpecificSources().map(PathRef(_))
}
override def docSources = T.sources(Seq[PathRef]())
override def artifactName = "spark-excel"
override def publishVersion = s"${sparkVersion}_${super.publishVersion()}"
def pomSettings = PomSettings(
description = "A Spark plugin for reading and writing Excel files",
organization = "com.crealytics",
url = "https://github.com/crealytics/spark-excel",
licenses = Seq(License.`Apache-2.0`),
versionControl = VersionControl.github("crealytics", "spark-excel"),
developers = Seq(Developer("nightscape", "Martin Mauch", "https://github.com/nightscape"))
)
def assemblyRules = Seq(
Rule.AppendPattern(".*\\.conf"), // all *.conf files will be concatenated into single file
Rule.Relocate("org.apache.commons.io.**", "shadeio.commons.io.@1"),
Rule.Relocate("org.apache.commons.compress.**", "shadeio.commons.compress.@1")
)
override def extraPublish = Seq(PublishInfo(assembly(), classifier = None, ivyConfig = "compile"))
val sparkDeps = Agg(
ivy"org.apache.spark::spark-core:$sparkVersion",
ivy"org.apache.spark::spark-sql:$sparkVersion",
ivy"org.apache.spark::spark-hive:$sparkVersion"
)
override def compileIvyDeps = if (sparkVersion < "3.3.0") {
sparkDeps ++ Agg(ivy"org.slf4j:slf4j-api:1.7.36".excludeOrg("stax"))
} else {
sparkDeps
}
val poiVersion = "5.2.5"
override def ivyDeps = {
val base = Agg(
ivy"org.apache.poi:poi:$poiVersion",
ivy"org.apache.poi:poi-ooxml:$poiVersion",
ivy"org.apache.poi:poi-ooxml-lite:$poiVersion",
ivy"org.apache.xmlbeans:xmlbeans:5.2.1",
ivy"com.norbitltd::spoiwo:2.2.1",
ivy"com.github.pjfanning:excel-streaming-reader:4.3.1",
ivy"com.github.pjfanning:poi-shared-strings:2.8.0",
ivy"commons-io:commons-io:2.16.1",
ivy"org.apache.commons:commons-compress:1.26.2",
ivy"org.apache.logging.log4j:log4j-api:2.23.1",
ivy"com.zaxxer:SparseBitSet:1.3",
ivy"org.apache.commons:commons-collections4:4.4",
ivy"com.github.virtuald:curvesapi:1.08",
ivy"commons-codec:commons-codec:1.17.0",
ivy"org.apache.commons:commons-math3:3.6.1",
ivy"org.scala-lang.modules::scala-collection-compat:2.12.0"
)
if (sparkVersion >= "3.3.0") {
base ++ Agg(ivy"org.apache.logging.log4j:log4j-core:2.23.1")
} else {
base
}
}
object test extends SbtModuleTests with TestModule.ScalaTest {
override def millSourcePath = super.millSourcePath
override def sources = T.sources {
Seq(PathRef(millSourcePath / "src" / "test" / "scala"))
}
override def resources = T.sources {
Seq(PathRef(millSourcePath / "src" / "test" / "resources"))
}
def scalaVersion = outer.scalaVersion()
def repositoriesTask = T.task {
super.repositoriesTask() ++ Seq(MavenRepository("https://jitpack.io"))
}
def ivyDeps = sparkDeps ++ Agg(
ivy"org.typelevel::cats-core:2.12.0",
ivy"org.scalatest::scalatest:3.2.18",
ivy"org.scalatestplus::scalacheck-1-16:3.2.14.0",
ivy"org.scalacheck::scalacheck:1.18.0",
ivy"com.github.alexarchambault::scalacheck-shapeless_1.15:1.3.0",
ivy"com.github.mrpowers::spark-fast-tests:1.3.0",
ivy"org.scalamock::scalamock:5.2.0"
)
}
}
val scala213 = "2.13.14"
val scala212 = "2.12.19"
val spark24 = List("2.4.1", "2.4.7", "2.4.8")
val spark30 = List("3.0.1", "3.0.3")
val spark31 = List("3.1.1", "3.1.2", "3.1.3")
val spark32 = List("3.2.4")
val spark33 = List("3.3.4")
val spark34 = List("3.4.1", "3.4.3")
val spark35 = List("3.5.1")
val sparkVersions = spark24 ++ spark30 ++ spark31 ++ spark32 ++ spark33 ++ spark34 ++ spark35
val crossMatrix =
sparkVersions.map(spark => (scala212, spark)) ++
sparkVersions.filter(_ >= "3.2").map(spark => (scala213, spark))
object `spark-excel` extends Cross[SparkModule](crossMatrix) {}