forked from gardenlinux/gardenlinux
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathrelease_note.py
executable file
·315 lines (259 loc) · 11.5 KB
/
release_note.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
#!/usr/bin/env python3
import os
import requests
import json
import boto3
import botocore
import yaml
from yaml.loader import SafeLoader
import sys
import urllib.request
from botocore import UNSIGNED
from botocore.client import Config
import argparse
from get_kernelurls import get_kernel_urls
GARDENLINUX_GITHUB_RELEASE_BUCKET_NAME="gardenlinux-github-releases"
arches = [
'amd64',
'arm64'
]
cloud_fullname_dict = {
'ali': 'Alibaba Cloud',
'aws': 'Amazon Web Services',
'gcp': 'Google Cloud Platform',
'azure': 'Microsoft Azure'
}
def _ali_release_note(published_image_metadata):
output = ""
for pset in published_image_metadata:
for p in published_image_metadata[pset]:
for image in published_image_metadata:
output += f"- Region: {p['region_id']}, Image-Id: {p['image_id']}\n"
return output
def _aws_release_note(published_image_metadata):
output = ""
for pset in published_image_metadata:
for p in published_image_metadata[pset]:
for image in published_image_metadata:
output += f"- Region: {p['aws_region_id']}, Image-Id: {p['ami_id']}\n"
return output
def _gcp_release_note(published_image_metadata):
return f"gcp_image_name: {published_image_metadata['gcp_image_name']}\n"
def _azure_release_note(published_image_metadata):
output = ""
for pset in published_image_metadata:
if pset == 'published_marketplace_images':
output += "# all regions:\n"
for market_image in published_image_metadata[pset]:
output += f"Hyper V: {market_image['hyper_v_generation']}, "
output += f"urn: {market_image['urn']}\n"
return output
def generate_release_note_image_ids(manifests):
out = ""
for m in manifests:
out += generate_release_note_image_id_single(m)
return out
def generate_release_note_image_id_single(manifest_path):
"""
Outputs a markdown formated string for github release notes,
containing the image-ids for the respective cloud regions
"""
output = ""
with open(manifest_path) as f:
manifest_data = yaml.load(f, Loader=SafeLoader)
published_image_metadata = manifest_data['published_image_metadata']
# No publishing metadata found in manifest, assume it was not published
if published_image_metadata is None:
return ""
platform_short_name = manifest_data['platform']
arch = manifest_data['architecture']
if platform_short_name in cloud_fullname_dict:
platform_long_name = cloud_fullname_dict[platform_short_name]
output = output + f"### {platform_long_name} ({arch})\n"
else:
output = output + f"### {platform_short_name} ({arch})\n"
output += "```\n"
if platform_short_name == 'ali':
output += _ali_release_note(published_image_metadata)
elif platform_short_name == 'aws':
output += _aws_release_note(published_image_metadata)
elif platform_short_name == 'gcp':
output += _gcp_release_note(published_image_metadata)
elif platform_short_name == 'azure':
output += _azure_release_note(published_image_metadata)
else:
print(f"unknown platform {platform_short_name}")
output += "```\n"
return output
def construct_full_image_name(platform, features, arch, version, commitish):
return f"{platform}-{features}-{arch}-{version}-{commitish}"
def download_s3_file(bucket, remote_filename, local_filename):
# Note: No need to sign the request. Features that would require the client to authenticate itself are not used.
# Use case here is to simply download public data.
s3_client = boto3.client('s3', config=Config(signature_version=UNSIGNED))
s3_client.download_file(bucket, remote_filename, local_filename)
def download_meta_single_manifest(bucket, bucket_path, image_name, dest_path):
download_s3_file(bucket, f"{bucket_path}/{image_name}", f"{dest_path}/{image_name}")
return f"{dest_path}/{image_name}"
def download_all_singles(version, commitish):
if commitish == None:
raise Exception("Commitish is not set")
local_dest_path = "s3_downloads"
os.makedirs(local_dest_path, exist_ok=True)
manifests = list()
for a in arches:
for p in cloud_fullname_dict:
fname = construct_full_image_name(p, "gardener_prod", a, version, commitish)
try:
manifests.append(download_meta_single_manifest(GARDENLINUX_GITHUB_RELEASE_BUCKET_NAME, "meta/singles", fname, "s3_downloads/"))
except Exception as e:
print(f"Failed to get manifest. Error: {e}")
print(f"\tfname: meta/singles/{fname}")
# Abort generation of Release Notes - Let the CI fail
sys.exit(1)
return manifests
def get_image_object_url(bucket, object, expiration=0):
s3_config = botocore.config.Config(signature_version=botocore.UNSIGNED)
s3_client = boto3.client('s3', config=s3_config)
url = s3_client.generate_presigned_url('get_object', Params={'Bucket': bucket, 'Key': object}, ExpiresIn = expiration)
return url
def generate_image_download_section(manifests, version, commitish):
output = ""
for manifest_path in manifests:
with open(manifest_path) as f:
manifest_data = yaml.load(f, Loader=SafeLoader)
arch = manifest_data['architecture'].upper()
platform = manifest_data['platform']
paths = manifest_data['paths']
for path in paths:
if platform == 'ali' and '.qcow2' == path['suffix']:
output += f"### {cloud_fullname_dict['ali']} ({arch})\n"
output += f"* [{version}-{commitish}-rootfs.qcow2]({get_image_object_url(path['s3_bucket_name'], path['s3_key'])})\n"
elif platform == 'aws' and '.raw' == path['suffix']:
output += f"### {cloud_fullname_dict['aws']} ({arch})\n"
output += f"* [{version}-{commitish}-rootfs.raw]({get_image_object_url(path['s3_bucket_name'], path['s3_key'])})\n"
elif platform == 'gcp' and '.tar.gz' == path['suffix']:
output += f"### {cloud_fullname_dict['gcp']} ({arch})\n"
output += f"* [{version}-{commitish}-rootfs-gcpimage.tar.gz]({get_image_object_url(path['s3_bucket_name'], path['s3_key'])})\n"
elif platform == 'azure' and '.vhd' == path['suffix']:
output += f"### {cloud_fullname_dict['azure']} ({arch})\n"
output += f"* [{version}-{commitish}-rootfs.vhd]({get_image_object_url(path['s3_bucket_name'], path['s3_key'])})\n"
return output
def _parse_match_section(pkg_list: list):
output = ""
for pkg in pkg_list:
# If is dict, the package has additional information relevant for release notes
if isinstance(pkg, dict):
pkg_string = next(iter(pkg))
output += f"\n{pkg_string}:\n"
for item in pkg[pkg_string]:
for k,v in item.items():
output += f" * {k}: {v}\n"
return output
def generate_package_update_section(version):
repo_definition_url =\
f"https://gitlab.com/gardenlinux/gardenlinux-package-build/-/raw/main/packages/{version}.yaml"
output = ""
with urllib.request.urlopen(repo_definition_url) as f:
data = yaml.load(f.read().decode('utf-8'), Loader=SafeLoader)
if data['version'] != version:
print(f"ERROR: version string in {repo_definition_url} does not match {version}")
sys.exit(1)
for source in data['publish']['sources']:
# excluded section does not contain release note information
if source['type'] == 'exclude':
continue
# base mirror does not contain packages specification
if 'packages' not in source:
continue
# Only check packages lists if it contains a list of either matchSources or matchBinaries
for s in source['packages']:
if 'matchSources' in s:
output += _parse_match_section(s['matchSources'])
if 'matchBinaries' in source['packages']:
output += _parse_match_section(s['matchBinaries'])
return output
def create_github_release_notes(gardenlinux_version, commitish):
output = ""
if not gardenlinux_version.endswith('.0'):
output += "## Package Updates\n"
output += generate_package_update_section(gardenlinux_version)
output += "\n"
manifests = download_all_singles(gardenlinux_version, commitish)
output += generate_release_note_image_ids(manifests)
output += "\n"
output += "## Kernel Package direct download links\n"
output += get_kernel_urls(gardenlinux_version)
output += "\n"
output += generate_image_download_section(manifests, gardenlinux_version, commitish )
return output
def write_to_release_id_file(release_id):
try:
with open('.github_release_id', 'w') as file:
file.write(release_id)
print(f"Created .github_release_id successfully.")
except IOError as e:
print(f"Could not create .github_release_id file: {e}")
sys.exit(1)
def create_github_release(owner, repo, tag, commitish):
token = os.environ.get('GITHUB_TOKEN')
if not token:
raise ValueError("GITHUB_TOKEN environment variable not set")
headers = {
'Authorization': f'token {token}',
'Accept': 'application/vnd.github.v3+json'
}
body = create_github_release_notes(tag, commitish)
data = {
'tag_name': tag,
'name': tag,
'body': body,
'draft': False,
'prerelease': False
}
response = requests.post(f'https://api.github.com/repos/{owner}/{repo}/releases', headers=headers, data=json.dumps(data))
if response.status_code == 201:
print("Release created successfully")
response_json = response.json()
return response_json.get('id')
else:
print("Failed to create release")
print(response.json())
response.raise_for_status()
def main():
parser = argparse.ArgumentParser(description="GitHub Release Script")
subparsers = parser.add_subparsers(dest='command')
create_parser = subparsers.add_parser('create')
create_parser.add_argument('--owner', default="gardenlinux")
create_parser.add_argument('--repo', default="gardenlinux")
create_parser.add_argument('--tag', required=True)
create_parser.add_argument('--commit', required=True)
upload_parser = subparsers.add_parser('upload')
upload_parser.add_argument('--release_id', required=True)
upload_parser.add_argument('--file_path', required=True)
kernelurl_parser = subparsers.add_parser('kernelurls')
kernelurl_parser.add_argument('--version', required=True)
args = parser.parse_args()
if args.command == 'create':
release_id = create_github_release(args.owner, args.repo, args.tag, args.commit)
write_to_release_id_file(f"{release_id}")
print(f"Release created with ID: {release_id}")
elif args.command == 'upload':
# Implementation for 'upload' command
pass
elif args.command == 'kernelurls':
# Implementation for 'upload' command
output =""
output += "## Kernel Package direct download links\n"
output += get_kernel_urls(args.version)
print(output)
else:
parser.print_help()
if __name__ == "__main__":
main()
# # Example usage
# try:
# release_info = create_github_release('gardenlinux', 'gardenlinux', "1312.0", "40b9db2c")
# print(release_info)
# except Exception as e:
# print(f"Error occurred: {e}")