@@ -96,7 +96,38 @@ def build_release(email_address):
96
96
# build the helm package
97
97
call_helm (staging_dir , release_base , version , email_address )
98
98
99
- # generate source code tarball
99
+ # Ensure release dirs are clean (and generate build.date files)
100
+ clean_release (release_base )
101
+
102
+ # generate staging source code tarball
103
+ tarball_name = release_package_name + "-staging.tar.gz"
104
+ tarball_path = os .path .join (staging_dir , tarball_name )
105
+ print ("creating tarball %s" % tarball_path )
106
+ with tarfile .open (tarball_path , "w:gz" ) as tar :
107
+ tar .add (os .path .join (release_base , "LICENSE" ), arcname = release_package_name + "/LICENSE" )
108
+ tar .add (os .path .join (release_base , "NOTICE" ), arcname = release_package_name + "/NOTICE" )
109
+ tar .add (release_base , arcname = release_package_name , filter = exclude_files )
110
+
111
+ # generate yunikorn-web reproducible binaries
112
+ web_hashes_amd64 = build_web_and_generate_hashes (staging_dir , release_package_name , "x86_64" )
113
+ web_hashes_arm64 = build_web_and_generate_hashes (staging_dir , release_package_name , "aarch64" )
114
+
115
+ # generate yunikorn-k8shim reproducible binaries
116
+ shim_hashes_amd64 = build_shim_and_generate_hashes (staging_dir , release_package_name , "x86_64" )
117
+ shim_hashes_arm64 = build_shim_and_generate_hashes (staging_dir , release_package_name , "aarch64" )
118
+
119
+ # merge hashes
120
+ hashes_amd64 = "\n " .join ([shim_hashes_amd64 , web_hashes_amd64 ])
121
+ hashes_arm64 = "\n " .join ([shim_hashes_arm64 , web_hashes_arm64 ])
122
+
123
+ # remove staging tarball
124
+ os .remove (tarball_path )
125
+
126
+ # update reproducible build information in README
127
+ go_version = get_go_version ()
128
+ update_reproducible_build_info (release_base , go_version , hashes_amd64 , hashes_arm64 )
129
+
130
+ # generate final source code tarball
100
131
tarball_name = release_package_name + ".tar.gz"
101
132
tarball_path = os .path .join (staging_dir , tarball_name )
102
133
print ("creating tarball %s" % tarball_path )
@@ -203,6 +234,74 @@ def download_sourcecode(base_path, repo_meta):
203
234
update_dep_ref (name , os .path .join (base_path , alias ))
204
235
return sha
205
236
237
+ # Run distclean on the source code path
238
+ def clean_release (local_repo_path ):
239
+ print ("ensuring local source repo is clean" )
240
+ path = os .getcwd ()
241
+ os .chdir (local_repo_path )
242
+ retcode = subprocess .call (['make' , 'distclean' ])
243
+ if retcode :
244
+ fail ("failed to clean staging repo" )
245
+ os .chdir (path )
246
+
247
+ # Unpack tarball into tmp dir
248
+ def unpack_staging_tarball (staging_dir , dest_dir , release_name ):
249
+ path = os .getcwd ()
250
+ os .chdir (staging_dir )
251
+ retcode = subprocess .call (['rm' , '-rf' , dest_dir ])
252
+ if retcode :
253
+ fail ("failed to clean dest dir" )
254
+ retcode = subprocess .call (['mkdir' , dest_dir ])
255
+ if retcode :
256
+ fail ("failed to create dest dir" )
257
+ os .chdir (dest_dir )
258
+ retcode = subprocess .call (['tar' , 'xf' , os .path .join (staging_dir , "%s-staging.tar.gz" % release_name )])
259
+ if retcode :
260
+ fail ("failed to unpack tarball" )
261
+ os .chdir (path )
262
+
263
+ # Generate binaries for yunikorn-web and compute checksums
264
+ def build_web_and_generate_hashes (staging_dir , release_name , arch ):
265
+ print ("generating reproducible build artifacts for yunikorn-web (%s)" % arch )
266
+ path = os .getcwd ()
267
+ tmp_dir = os .path .join (staging_dir , "tmp" )
268
+ release_dir = os .path .join (tmp_dir , release_name )
269
+ unpack_staging_tarball (staging_dir , tmp_dir , release_name )
270
+ web_dir = os .path .join (release_dir , "web" )
271
+ os .chdir (web_dir )
272
+ retcode = subprocess .call (['make' , 'HOST_ARCH=' + arch , 'build_server_prod' ])
273
+ if retcode :
274
+ fail ("failed to build yunikorn-web (%s)" % arch )
275
+ hash = get_checksum ("build/prod/yunikorn-web" , "yunikorn-web" )
276
+ os .chdir (staging_dir )
277
+ retcode = subprocess .call (['rm' , '-rf' , 'tmp' ])
278
+ if retcode :
279
+ fail ("failed to clean temp dir" )
280
+ os .chdir (path )
281
+ return hash
282
+
283
+ # Generate binaries for yunikorn-k8shim and compute checksums
284
+ def build_shim_and_generate_hashes (staging_dir , release_name , arch ):
285
+ print ("generating reproducible build artifacts for yunikorn-k8shim (%s)" % arch )
286
+ path = os .getcwd ()
287
+ tmp_dir = os .path .join (staging_dir , "tmp" )
288
+ release_dir = os .path .join (tmp_dir , release_name )
289
+ unpack_staging_tarball (staging_dir , tmp_dir , release_name )
290
+ shim_dir = os .path .join (release_dir , "k8shim" )
291
+ os .chdir (shim_dir )
292
+ retcode = subprocess .call (['make' , 'HOST_ARCH=' + arch , 'scheduler' , 'plugin' , 'admission' ])
293
+ if retcode :
294
+ fail ("failed to build yunikorn-k8shim (%s)" % arch )
295
+ adm_hash = get_checksum ("build/bin/yunikorn-admission-controller" , "yunikorn-admission-controller" )
296
+ scheduler_hash = get_checksum ("build/bin/yunikorn-scheduler" , "yunikorn-scheduler" )
297
+ plugin_hash = get_checksum ("build/bin/yunikorn-scheduler-plugin" , "yunikorn-scheduler-plugin" )
298
+ hash = "\n " .join ([adm_hash , scheduler_hash , plugin_hash ])
299
+ os .chdir (staging_dir )
300
+ retcode = subprocess .call (['rm' , '-rf' , 'tmp' ])
301
+ if retcode :
302
+ fail ("failed to clean temp dir" )
303
+ os .chdir (path )
304
+ return hash
206
305
207
306
# K8shim depends on yunikorn-core and scheduler-interface
208
307
def update_dep_ref_k8shim (local_repo_path ):
@@ -301,6 +400,14 @@ def update_required_go_version(base_path, local_repo_path):
301
400
print (f" - go version: { go_version } " )
302
401
replace (os .path .join (base_path , "README.md" ), 'Go 1.16' , 'Go ' + go_version )
303
402
403
+ # update reproducible build information in README
404
+ def update_reproducible_build_info (base_path , go_version , hashes_amd64 , hashes_arm64 ):
405
+ print ("recording go compiler used for reproducible builds" )
406
+ replace (os .path .join (base_path , "README.md" ), '@GO_VERSION@' , go_version )
407
+ print ("recording build artifact hashes (amd64)" )
408
+ replace (os .path .join (base_path , "README.md" ), '@AMD64_BINARIES@' , hashes_amd64 )
409
+ print ("recording build artifact hashes (arm64)" )
410
+ replace (os .path .join (base_path , "README.md" ), '@ARM64_BINARIES@' , hashes_arm64 )
304
411
305
412
# update required Node.js and angular versions in the README.md
306
413
def update_required_node_and_angular_versions (base_path , local_repo_path ):
@@ -364,6 +471,19 @@ def write_checksum(tarball_file, tarball_name):
364
471
sha_file .close ()
365
472
print ("sha512 checksum: %s" % sha )
366
473
474
+ # Generate a checksum for a file
475
+ def get_checksum (file_path , file_name ):
476
+ print ("generating sha512 checksum for %s" % file_name )
477
+ h = hashlib .sha512 ()
478
+ # read the file and generate the sha
479
+ with open (file_path , 'rb' ) as file :
480
+ while True :
481
+ data = file .read (65536 )
482
+ if not data :
483
+ break
484
+ h .update (data )
485
+ sha = h .hexdigest ()
486
+ return "%s %s" % (sha , file_name )
367
487
368
488
# Sign the source archive if an email is provided
369
489
def call_gpg (tarball_file , email_address ):
@@ -379,6 +499,14 @@ def call_gpg(tarball_file, email_address):
379
499
if retcode :
380
500
fail ("failed to create gpg signature" )
381
501
502
+ # Determine the specific go compiler in use
503
+ def get_go_version ():
504
+ command = ['go' , 'env' , 'GOVERSION' ]
505
+ result = subprocess .run (command , capture_output = True , text = True )
506
+ if result .returncode :
507
+ fail ("failed to get go version" )
508
+ output = re .sub (r'^go' , r'' , result .stdout .strip ())
509
+ return output
382
510
383
511
# Package the helm chart and sign if an email is provided
384
512
def call_helm (staging_dir , base_path , version , email_address ):
0 commit comments