-
Notifications
You must be signed in to change notification settings - Fork 123
/
build.sbt
235 lines (204 loc) · 8.45 KB
/
build.sbt
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
/**
* (C) Copyright IBM Corp. 2015 - 2017
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
import Dependencies._
import java.io.File
import java.nio.file.Files
import java.nio.file.StandardCopyOption.REPLACE_EXISTING
scalaVersion in ThisBuild := "2.11.8"
/*
**********************************************************************************
* Common Settings and val Definitions (have to be up here for forward reference) *
**********************************************************************************
*/
val sparkBenchJar = settingKey[String]("jar name and relative path for spark-bench")
val sparkBenchLaunchJar = settingKey[String]("jar name and relative path for spark-bench-launch")
val assemblyFile = settingKey[String]("folder where assembled jars go")
val sparklaunchTestResourcesJarsFile = settingKey[String]("folder where compiled jar goes")
lazy val commonSettings = Seq(
organization := "com.ibm.sparktc",
parallelExecution in Test := false,
test in assembly := {},
sparkBenchJar := s"spark-bench-${version.value}.jar",
sparkBenchLaunchJar := s"spark-bench-launch-${version.value}.jar",
assemblyFile := s"${baseDirectory.value.getParent}/target/assembly",
sparklaunchTestResourcesJarsFile := s"${baseDirectory.value.getPath}/src/test/resources/jars/",
testOptions in Test += Tests.Argument("-oF")
)
/*
***************************
* PROJECTS *
***************************
*/
lazy val utils = project
.settings(
commonSettings,
libraryDependencies ++= sparkDeps,
libraryDependencies ++= typesafe,
libraryDependencies ++= testDeps
)
/*
There's some extra code here to clean up any created jars before assembly.
Assembly is called by regular sbt assembly and by sbt spark-launch/test.
See note below about why spark-launch/test calls cli/assembly.
*/
val cleanJars = TaskKey[Unit]("cleanJars", "remove jars before assembling jar for spark-launch test")
lazy val cli = project
.settings(
commonSettings,
name := "spark-bench",
cleanJars := {
val log = streams.value.log
log.info("Cleaning up jars before assembling")
s"rm ${assemblyFile.value}/*.jar".!
s"rm ./spark-launch/src/test/resources/jars/*.jar".!
log.info("Done cleaning jars.")
},
assembly in Compile := {
((assembly in Compile) dependsOn(cleanJars in Test)).value
},
mainClass in assembly := Some("com.ibm.sparktc.sparkbench.cli.CLIKickoff"),
assemblyOutputPath in assembly := new File(s"${assemblyFile.value}/${sparkBenchJar.value}"),
libraryDependencies ++= sparkDeps,
libraryDependencies ++= otherCompileDeps,
libraryDependencies ++= testDeps,
libraryDependencies ++= typesafe,
libraryDependencies ++= breezeDeps
)
.dependsOn(utils % "compile->compile;test->test")
.aggregate(utils)
lazy val `test-workloads` = project
.settings(
commonSettings,
name := "test-workloads",
libraryDependencies ++= sparkDeps,
libraryDependencies ++= testDeps
)
.dependsOn(utils % "compile->compile;test->test", cli % "compile->compile")
/*
spark-launch relies on having the cli fat jar to launch through spark-submit. So
in order to test spark-launch, we have to assemble the cli jar and move it into a folder
that's accessible to the test code for spark-launch.
*/
val moveJar = TaskKey[Unit]("moveJars", "move the assembled jars for spark-launch test")
val removeJar = TaskKey[Unit]("removeJars", "deletes the jars from the test resources folder")
lazy val `spark-launch` = project
.settings(
removeJar := {
val log = streams.value.log
log.info("Removing cli jars assembled for test.")
s"rm -rf ${sparklaunchTestResourcesJarsFile.value}".!
log.info("Done removing jars")
},
moveJar in Test := {
val log = streams.value.log
log.info("Assembling spark-bench and custom-workload JARs...")
(assembly in Compile in cli).value
log.info("Moving assembled JARs to resources folder for test")
s"mkdir -p ${sparklaunchTestResourcesJarsFile.value}".!
s"cp ${assemblyFile.value}/${sparkBenchJar.value} ${sparklaunchTestResourcesJarsFile.value}".!
val customTestJar = (Keys.`package` in Compile in `test-workloads`).value
s"cp $customTestJar ${sparklaunchTestResourcesJarsFile.value}".!
log.info("Done moving files.")
},
test in Test := {
((test in Test) dependsOn(moveJar in Test)).value
},
commonSettings,
name := "spark-bench-launch",
mainClass in assembly := Some("com.ibm.sparktc.sparkbench.sparklaunch.SparkLaunch"),
assemblyOutputPath in assembly := new File(s"${assemblyFile.value}/${sparkBenchLaunchJar.value}"),
libraryDependencies ++= sparkDeps,
libraryDependencies ++= otherCompileDeps,
libraryDependencies ++= testDeps,
libraryDependencies ++= typesafe,
libraryDependencies ++= jsonCreation
)
.dependsOn(utils % "compile->compile;test->test", cli % "compile->compile;test->test")
/*
*******************************
* CUSTOM TASKS *
*******************************
*/
val dist = TaskKey[Unit]("dist", "Makes the distribution file for release")
dist := {
val log = streams.value.log
log.info("Creating distribution...")
log.info("Assembling spark-bench jar...")
dependsOn((assembly in Compile in cli).value)
log.info("Assembling spark-bench-launch jar...")
dependsOn((assembly in Compile in `spark-launch`).value)
log.info("Done assembling jars")
val dir = baseDirectory.value.getName
val parent = baseDirectory.value.getParent
val tmpFolder = s"./${name.value}_${version.value}"
log.info(s"Creating folder $tmpFolder")
s"mkdir $tmpFolder".!
log.info(s"Creating folder $tmpFolder/lib")
s"mkdir $tmpFolder/lib".!
log.info("Copying files:")
log.info("...copying README.md")
s"cp readme.md $tmpFolder".!
log.info("...copying bin/")
// MAKE SURE YOU DON'T PUT TRAILING SLASHES ON THESE FILES!! It changes behavior between GNU cp and BSD cp
val binFolder = s"${baseDirectory.value.getPath}/bin"
s"cp -r $binFolder $tmpFolder".!
log.info("...copying contents of target/assembly/")
// Reverting to java API here because cp works differently between the GNU and BSD versions. >:(
val folder = new File(s"${baseDirectory.value.getPath}/target/assembly")
val files = folder.listFiles()
println(files.foreach(f => println(f.getPath)))
files.map( fyle =>
Files.copy(
fyle.toPath,
new File(s"${baseDirectory.value.getPath}/$tmpFolder/lib/${fyle.toPath.getFileName}").toPath,
REPLACE_EXISTING))
log.info("...copying examples/")
// MAKE SURE YOU DON'T PUT TRAILING SLASHES ON THESE FILES!! It changes behavior between GNU cp and BSD cp
val examplesFolder = s"${baseDirectory.value.getPath}/examples"
s"cp -r $examplesFolder $tmpFolder".!
log.info("Done copying files.")
val buildNum = sys.env.get("TRAVIS_BUILD_NUMBER")
val artifactName = buildNum match {
case None => s"${name.value}_${version.value}.tgz"
case Some(bn) => s"${name.value}_${version.value}_$bn.tgz"
}
log.info(s"Creating tar file: $artifactName")
s"tar -zcf ./$artifactName $tmpFolder".!
log.info("Done creating tar file")
log.info(s"Distribution created: $artifactName")
}
val rmDist = TaskKey[Unit]("rmDist", "removes all the dist files")
rmDist := {
val log = streams.value.log
val dir = baseDirectory.value.getName
val parent = baseDirectory.value.getParent
val tmpFolder = s"./${name.value}_${version.value}"
log.info(s"Removing $tmpFolder...")
s"rm -rf $tmpFolder".!
log.info(s"Removing $tmpFolder.tgz...")
s"""rm -f $tmpFolder.tgz""".!
log.info("Distribution files removed.")
}
val rmTemp = TaskKey[Unit]("rmTemp", "removes temporary testing files")
rmTemp := {
val tmpFolder = "/tmp/spark-bench-scalatest"
streams.value.log.info(s"Removing $tmpFolder...")
s"rm -rf $tmpFolder".!
}
dist := (dist dependsOn assembly).value
clean := (clean dependsOn rmDist dependsOn rmTemp).value
clean := (clean dependsOn (removeJar in Test in `spark-launch`)).value