forked from bbc/nmos-web-router
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Jenkinsfile
133 lines (130 loc) · 5.02 KB
/
Jenkinsfile
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
@Library("[email protected]") _
/*
- Starts up a Docker container, runs the lint, test and build targets in package.json
- Stashes resulting build artifacts
- Switches back to one of the bare-metal Agents and unstashes build artifacts
- Builds Debian packages, and uploads to the repos
- Reports status to GitHub
*/
pipeline {
agent {
label "ubuntu&&apmm-slave"
}
options {
ansiColor('xterm') // Add support for coloured output
buildDiscarder(logRotator(numToKeepStr: '10')) // Discard old builds
}
triggers {
cron(env.BRANCH_NAME == 'master' ? 'H H(0-8) * * 6' : '') // Build master some time every Saturday morning
}
parameters {
booleanParam(name: "FORCE_DEBUPLOAD", defaultValue: false, description: "Force Debian package upload")
}
environment {
http_proxy = "http://www-cache.rd.bbc.co.uk:8080"
https_proxy = "http://www-cache.rd.bbc.co.uk:8080"
HOME="/var/tmp/home-for-npm" // Override the npm cache directory to avoid: EACCES: permission denied, mkdir '/.npm'
yarn_cache_folder="/var/tmp/yarn-cache" // give an explicit location for the cache inside the jenkins workspace
}
stages {
stage("Clean Environment") {
steps {
sh 'git clean -fdx'
}
}
stage("Lint, test and build in Docker container") {
agent {
dockerfile {
// Mount the Jenkins agent's certificates
args "-v /etc/pki/tls/:/etc/pki/tls/"
}
}
steps {
bbcNpmRunScript("ci")
bbcNpmRunScript("build")
stash(name: "built-site", includes: "build/**")
}
}
stage ("Debian Source Build") {
steps {
script {
env.debSourceBuild_result = "FAILURE"
}
bbcGithubNotify(context: "deb/sourceBuild", status: "PENDING")
unstash(name: "built-site")
sh "scripts/make_dsc.sh"
bbcPrepareDsc()
stash(name: "deb_dist", includes: "deb_dist/*")
script {
env.debSourceBuild_result = "SUCCESS" // This will only run if the steps above succeeded
}
}
post {
always {
bbcGithubNotify(context: "deb/sourceBuild", status: env.debSourceBuild_result)
}
}
}
stage ("Build Packages") {
parallel{
stage ("Build Deb with pbuilder") {
steps {
script {
env.pbuilder_result = "FAILURE"
}
bbcGithubNotify(context: "deb/packageBuild", status: "PENDING")
// Build for all supported platforms and extract results into workspace
bbcParallelPbuild(
stashname: "deb_dist",
dists: bbcGetSupportedUbuntuVersions(),
arch: "amd64")
script {
env.pbuilder_result = "SUCCESS" // This will only run if the steps above succeeded
}
}
post {
success {
archiveArtifacts artifacts: "_result/**"
}
always {
bbcGithubNotify(context: "deb/packageBuild", status: env.pbuilder_result)
}
}
}
}
}
stage ("Upload Debian Package") {
// Duplicates the when clause of each upload so blue ocean can nicely display when stage skipped
when {
anyOf {
expression { return params.FORCE_DEBUPLOAD }
expression {
bbcShouldUploadArtifacts(branches: ["master"])
}
}
}
steps {
script {
env.debUpload_result = "FAILURE"
}
bbcGithubNotify(context: "deb/upload", status: "PENDING")
script {
for (def dist in bbcGetSupportedUbuntuVersions()) {
bbcDebUpload(sourceFiles: "_result/${dist}-amd64/*",
removePrefix: "_result/${dist}-amd64",
dist: "${dist}",
apt_repo: "ap/ipstudio")
}
}
script {
env.debUpload_result = "SUCCESS" // This will only run if the steps above succeeded
}
}
post {
always {
bbcGithubNotify(context: "deb/upload", status: env.debUpload_result)
}
}
}
}
}