-
Notifications
You must be signed in to change notification settings - Fork 3
/
Copy pathdodo.py
729 lines (600 loc) · 27 KB
/
dodo.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
"""Task driver for Sentinel.
DoIt tasks are typically called from PDM. The DoIt tasks may call PDM
scripts recursively to implement a super-powered PDM "composite" type.
"""
from pathlib import Path
import subprocess
from shutil import copy2, move, rmtree
import sys
import os
import re
import gzip
import hashlib
from itertools import chain
from functools import partial
from doit import task_params
# https://groups.google.com/g/python-doit/c/GFtEuBp82xc/m/j7jFkvAGH1QJ
from doit.action import CmdAction
from doit.task import clean_targets
from doit.tools import run_once, create_folder, result_dep, title_with_actions
from doit.reporter import ConsoleReporter
# Overrides
# Tasks typically only skip printing titles if they're private. For the
# list_sby_status tasks, I want to skip the titles because it interfaces
# with their extra stdout when parallelism is on.
class MaybeSuppressReporter(ConsoleReporter): # noqa: D101
def execute_task(self, task): # noqa: D102
if task.meta and task.meta.get("suppress_reporter", False):
pass
else:
super().execute_task(task)
DOIT_CONFIG = {
"default_tasks": [],
"action_string_formatting": "new",
"reporter": MaybeSuppressReporter
}
# Helpers
# doit actions must return certain types. None is one of these, but
# paths are not (like in copy2).
def copy_(src, dst): # noqa: D103
copy2(src, dst)
# Ditto.
def move_(src, dst): # noqa: D103
move(src, dst)
# Iterate over multiple trees and remove them!
def rmtrees(paths): # noqa: D103
for p in paths:
rmtree(p, ignore_errors=True)
# Lambdas are unpickleable on Windows (works on *nix?!), and some tasks can
# be easily parallelized using multiprocessing if we use pickleable types.
# Custom titles were originally supplied using lambdas; work around the
# multiprocessing limitation this by dispatching to a single print_title
# function whose evaluation is deferred using partial. Note this runs before
# custom reporters (like MaybeSuppressReporter), and so can be combined.
def print_title(task, title): # noqa: D103
return title
# Generic tasks
def git_init(repo_dir): # noqa: D103
submod = repo_dir / ".git"
return {
"basename": "_git_init",
"name": repo_dir.stem,
"actions": [CmdAction("git submodule update --init --recursive -- .",
cwd=repo_dir)],
"targets": [submod],
"uptodate": [run_once],
}
def task_git(): # noqa: D103
riscof_tests = Path("./tests/riscof/")
yield {
"basename": "_git_init",
"name": None,
"doc": "initialize git submodules, \"doit list --all -p _git_init\" "
"for choices"
}
for p in [Path("./tests/upstream/riscv-tests"),
Path("./tests/formal/riscv-formal"),
riscof_tests / "sail-riscv", riscof_tests / "riscv-arch-test"]:
yield git_init(p)
def task__git_rev():
"""get git revision"""
return {'actions': ["git rev-parse HEAD"]}
def task__demo():
"""create a demo bitstream (for benchmarking)"""
pyfiles = [s for s in Path("./src/sentinel").glob("*.py")] + \
[Path("./examples/attosoc.py")]
return {
"actions": ["pdm demo -b build-bench"],
"file_dep": pyfiles + [Path("./src/sentinel/microcode.asm")]
}
# These two tasks do not require "pdm run" because I had trouble installing
# matplotlib into the venv. Intended usage in cases like mine is
# "doit bench_luts" or "doit plot_luts".
def task_luts(): # noqa: D103
build_dir = Path("./build-bench")
yosys_log = build_dir / "top.rpt"
nextpnr_log = build_dir / "top.tim"
luts_csv = Path("./LUTs.csv")
pyfiles = [s for s in Path("./src/sentinel").glob("*.py")] + \
[Path("./examples/attosoc.py")]
yield {
"basename": "bench_luts",
"actions": [f"{sys.executable} -m logluts --yosys-log {yosys_log} "
f"--nextpnr-log {nextpnr_log} --git . --target ice40 "
f"--add-commit --csvfile {luts_csv}"
],
"targets": [luts_csv],
"uptodate": [result_dep("_git_rev")],
"verbosity": 2,
"setup": ["_demo"],
"file_dep": pyfiles + [Path("./src/sentinel/microcode.asm")],
"doc": "build \"pdm demo\" bitstream (if out of date), record LUT usage using LogLUTs" # noqa: E501
}
yield {
"basename": "plot_luts",
"actions": [f"{sys.executable} -m logluts --yosys-log {yosys_log} "
f"--nextpnr-log {nextpnr_log} --git . --target ice40 "
f"--plot --csvfile {luts_csv}"
],
"targets": [],
"uptodate": [False],
"verbosity": 2,
"setup": ["_demo"],
"file_dep": pyfiles + [Path("./src/sentinel/microcode.asm")],
"doc": "build \"pdm demo\" bitstream (if out of date), plot LUT usage using LogLUTs" # noqa: E501
}
def task_ucode():
"""assemble microcode and copy non-bin artifacts to root"""
ucode = Path("./src/sentinel/icrocode.asm")
hex_ = ucode.with_suffix(".asm_block_ram.hex")
fdef = ucode.with_suffix(".asm_block_ram.fdef")
return {
"actions": ["{sys.executable} -m m5meta {ucodefile}",
(move_, (hex_, Path(".") / hex_.name)),
(move_, (fdef, Path(".") / fdef.name))
],
"params": [{
"name": "ucodefile",
"default": str(ucode)
}],
"targets": [Path(".") / hex_.name, Path(".") / fdef.name],
"file_dep": [ucode],
}
# YoWASP
def write_yowasp_env_toolchain(fn): # noqa: D103
envs = {
"AMARANTH_USE_YOSYS": "builtin",
"YOSYS": "yowasp-yosys",
"NEXTPNR_ICE40": "yowasp-nextpnr-ice40",
"ICEPACK": "yowasp-icepack"
}
with open(fn, "w") as fp:
for k, v in envs.items():
fp.write(f"{k}={v}\n")
def task_prepare_yowasp():
"""prepare Sentinel source for YoWASP tools"""
return {
"actions": [(write_yowasp_env_toolchain, (".env.toolchain",))],
"uptodate": [run_once],
"clean": [clean_targets],
"targets": [".env.toolchain"],
}
# RISCOF
def opam_vars(): # noqa: D103
out = subprocess.run("opam env", shell=True, stdout=subprocess.PIPE).stdout
vars = os.environ.copy()
for var in out.decode().replace("\n", " ").split("; "):
if "export" in var or not var:
continue
tmp = var.split("=")
k, v = tmp
vars[k] = v
return {"env": vars}
def compress(src, dst): # noqa: D103
with open(src, "rb") as fp:
c_bytes = gzip.compress(fp.read())
with open(dst, "wb") as fp:
fp.write(c_bytes)
def decompress(src, dst): # noqa: D103
with open(src, "rb") as fp:
d_bytes = gzip.decompress(fp.read())
with open(dst, "wb") as fp:
fp.write(d_bytes)
os.chmod(dst, 0o775)
def run_with_env(cmd, cwd, env): # noqa: D103
return subprocess.run(cmd, cwd=cwd, env=env, shell=True).check_returncode()
def task__opam():
"""extract environment vars from opam"""
return {"actions": [(opam_vars,)], "verbosity": 2}
def task__decompress_sail():
"""decompress previously-built SAIL emulator"""
riscof_tests = Path("./tests/riscof/")
comp_emu = riscof_tests / "bin/riscv_sim_RV32.gz"
bin_emu = riscof_tests / "bin/riscv_sim_RV32"
return {
"actions": [(decompress, (comp_emu, bin_emu))],
# No file dep to avoid dependency on building SAIL.
"file_dep": [],
# OTOH, no need to repeatedly decompress.
"uptodate": [run_once],
"targets": [bin_emu],
}
def task__build_sail():
"""build SAIL RISC-V emulators in opam environment, compress"""
riscof_tests = Path("./tests/riscof/")
emu = riscof_tests / "sail-riscv" / "c_emulator/riscv_sim_RV32"
# FIXME: Imprecise.
src_files = [s for s in (riscof_tests / "sail-riscv" / "model").glob("*.sail")] # noqa: E501
return {
# TODO: Maybe make public?
"title": partial(print_title, title="Building SAIL RISC-V emulators"),
"actions": [(partial(run_with_env,
cmd="make ARCH=RV32 c_emulator/riscv_sim_RV32",
cwd=riscof_tests / "sail-riscv")),
# This is not a task of it's own because there shouldn't
# be a dependency on building SAIL just for running
# RISCOF in a just checked-out repo. Delegate to
# decompression routine instead. This task should be
# run manually.
(compress, (emu, riscof_tests / "bin/riscv_sim_RV32.gz"))],
"verbosity": 2,
"file_dep": src_files,
"targets": [emu, riscof_tests / "bin/riscv_sim_RV32.gz"],
"getargs": {
"env": ("_opam", "env")
}
}
def task__riscof_gen():
"""run RISCOF's testlist command to prepare RISCOF files and directories"""
riscof_tests = Path("./tests/riscof/")
sentinel_plugin = riscof_tests / "sentinel"
config_ini = riscof_tests / "config.ini"
return {
"actions": [CmdAction("pdm run riscof testlist --config=config.ini "
"--suite=riscv-arch-test/riscv-test-suite/ "
"--env=riscv-arch-test/riscv-test-suite/env "
"--work-dir=riscof_work",
cwd=riscof_tests)],
"file_dep": [config_ini, sentinel_plugin / "sentinel_isa.yaml",
sentinel_plugin / "sentinel_platform.yaml"]
}
# This is required because RISCOF expects dut/ref dirs to not exist.
def task__clean_dut_ref_dirs():
"""remove dut/ref directories from last RISCOF run"""
riscof_tests = Path("./tests/riscof/")
riscof_work = riscof_tests / "riscof_work"
dut_dirs = [s for s in riscof_work.glob("**/dut/")]
ref_dirs = [s for s in riscof_work.glob("**/ref/")]
return {
"actions": [(rmtrees, (dut_dirs,)),
(rmtrees, (ref_dirs,))]
}
def save_last_testfile(testfile): # noqa: D103
return {"last_testfile": testfile}
def last_testfile(task, values, testfile): # noqa: D103
path_tf = Path(testfile)
if not path_tf.exists():
return False
with open(path_tf, "rb") as fp:
hash = hashlib.md5(fp.read()).hexdigest()
task.value_savers.append(partial(save_last_testfile, hash))
return values.get("last_testfile") == hash
# Task params is required because task-creation time is the only time where
# we can pass a command-line argument into uptodate. AFAICT, this still
# creates one single task regardless of input parameters. Also note that
# the param is still available in action string formatting (though I use
# f-strings instead to create relative paths).
#
# The uptodate check checks whether we've passed the same testfile in
# consecutively _by checking the file's MD5_. If the MD5 changed since the last
# run, then our report is, in fact, out-of-date, regardless of the testfile's
# location. Yes, this is all to support using custom testfiles :).
@task_params([{"name": "testfile", "short": "t",
"default": "./tests/riscof/riscof_work/test_list.yaml",
"help": "path to alternate test list"}])
def task_run_riscof(testfile):
"""run RISCOF tests against Sentinel/Sail, and report results, removes previous run's artifacts""" # noqa: E501
riscof_tests = Path("./tests/riscof/")
riscof_work = riscof_tests / "riscof_work"
pyfiles = [s for s in Path("./src/sentinel").glob("*.py")]
sail_plugin = riscof_tests / "sail_cSim"
sentinel_plugin = riscof_tests / "sentinel"
config_ini = riscof_tests / "config.ini"
sailp_files = [sail_plugin / s for s in ("env/link.ld", "env/model_test.h",
"__init__.py",
"riscof_sail_cSim.py")]
sentp_files = [sentinel_plugin / s for s in ("riscof_sentinel.py",
"env/link.ld",
"env/model_test.h",
"sentinel_isa.yaml",
"sentinel_platform.yaml")]
# Support both absolute and relative paths from dodo.py root for "pdm run"
# convenience.
path_tf = Path(testfile)
if not path_tf.is_absolute():
path_tf = path_tf.absolute()
vars = os.environ.copy()
vars["PATH"] += os.pathsep + str(riscof_tests.absolute() / "bin")
return {
"title": partial(print_title, title="Running RISCOF tests"),
"actions": [CmdAction("pdm run riscof run --config=config.ini "
"--suite=riscv-arch-test/riscv-test-suite/ "
"--env=riscv-arch-test/riscv-test-suite/env "
f"--testfile={path_tf} "
"--no-browser --no-clean",
cwd=riscof_tests,
env=vars)],
"targets": [riscof_work / "report.html"],
"verbosity": 2,
"setup": ["_git_init:sail-riscv",
"_git_init:riscv-arch-test",
"_decompress_sail",
"_riscof_gen",
"_clean_dut_ref_dirs"],
"file_dep": pyfiles + sailp_files + sentp_files + [
config_ini, Path("./src/sentinel/microcode.asm")
],
"uptodate": [partial(last_testfile, testfile=testfile)],
}
# RISC-V Formal
SBY_TESTS = (
"causal_ch0", "cover", "insn_addi_ch0", "insn_add_ch0", "insn_andi_ch0",
"insn_and_ch0", "insn_auipc_ch0", "insn_beq_ch0", "insn_bgeu_ch0",
"insn_bge_ch0", "insn_bltu_ch0", "insn_blt_ch0", "insn_bne_ch0",
"insn_jalr_ch0", "insn_jal_ch0", "insn_lbu_ch0", "insn_lb_ch0",
"insn_lhu_ch0", "insn_lh_ch0", "insn_lui_ch0", "insn_lw_ch0",
"insn_ori_ch0", "insn_or_ch0", "insn_sb_ch0", "insn_sh_ch0",
"insn_slli_ch0", "insn_sll_ch0", "insn_sltiu_ch0", "insn_slti_ch0",
"insn_sltu_ch0", "insn_slt_ch0", "insn_srai_ch0", "insn_sra_ch0",
"insn_srli_ch0", "insn_srl_ch0", "insn_sub_ch0", "insn_sw_ch0",
"insn_xori_ch0", "insn_xor_ch0", "pc_bwd_ch0", "pc_fwd_ch0", "reg_ch0",
"unique_ch0", "liveness_ch0",
"csrw_mscratch_ch0", "csrc_any_mscratch_ch0", "csrw_mcause_ch0",
"csrw_mip_ch0", "csrc_zero_mip_ch0", "csrw_mie_ch0", "csrc_zero_mie_ch0",
"csrw_mstatus_ch0", "csrc_const_mstatus_ch0", "csrw_mtvec_ch0",
"csrc_zero_mtvec_ch0", "csrw_mepc_ch0", "csrc_zero_mepc_ch0",
"csrw_mvendorid_ch0", "csrc_zero_mvendorid_ch0", "csrw_marchid_ch0",
"csrc_zero_marchid_ch0", "csrw_mimpid_ch0", "csrc_zero_mimpid_ch0",
"csrw_mhartid_ch0", "csrc_zero_mhartid_ch0", "csrw_mconfigptr_ch0",
"csrc_zero_mconfigptr_ch0", "csrw_misa_ch0", "csrc_zero_misa_ch0",
"csrw_mstatush_ch0", "csrc_zero_mstatush_ch0", "csrw_mcountinhibit_ch0",
"csrc_zero_mcountinhibit_ch0", "csrw_mtval_ch0", "csrc_zero_mtval_ch0",
"csrw_mcycle_ch0", "csrc_zero_mcycle_ch0", "csrw_minstret_ch0",
"csrc_zero_minstret_ch0", "csrw_mhpmcounter3_ch0",
"csrc_zero_mhpmcounter3_ch0", "csrw_mhpmevent3_ch0",
"csrc_zero_mhpmevent3_ch0", "csr_ill_eff_ch0", "csr_ill_302_ch0",
"csr_ill_303_ch0", "csr_ill_306_ch0", "csr_ill_34a_ch0", "csr_ill_34b_ch0",
"csr_ill_30a_ch0", "csr_ill_31a_ch0"
)
# This task is useful for when hacking on *.py files, but the RISC-V Formal
# config files haven't actually changed (and thus genchecks.py need not be
# run).
def task__formal_gen_sentinel():
"""generate Sentinel subdir and Verilog in RISC-V Formal cores dir"""
formal_tests = Path("./tests/formal/")
cores_dir = formal_tests / "riscv-formal" / "cores"
sentinel_dir = cores_dir / "sentinel"
pyfiles = [s for s in Path("./src/sentinel").glob("*.py")]
sentinel_v = sentinel_dir / "sentinel.v"
return {
"actions": [(create_folder, [cores_dir / "sentinel"]),
f"pdm gen -o {sentinel_v} -f"],
"file_dep": pyfiles + [Path("./src/sentinel/microcode.asm")],
}
def task__formal_gen_files():
"""copy Sentinel files and run RISC-V Formal's genchecks.py script"""
formal_tests = Path("./tests/formal/")
cores_dir = formal_tests / "riscv-formal" / "cores"
sentinel_dir = cores_dir / "sentinel"
genchecks = formal_tests / "riscv-formal" / "checks" / "genchecks.py"
disasm_py = formal_tests / "disasm.py"
checks_cfg = formal_tests / "checks.cfg"
wrapper_sv = formal_tests / "wrapper.sv"
return {
"actions": [(copy_, [disasm_py, sentinel_dir / disasm_py.name]),
(copy_, [checks_cfg, sentinel_dir / checks_cfg.name]),
(copy_, [wrapper_sv, sentinel_dir / wrapper_sv.name]),
CmdAction(f"{sys.executable} ../../checks/genchecks.py",
cwd=sentinel_dir)],
"file_dep": [disasm_py, checks_cfg, wrapper_sv, genchecks]
}
def maybe_disasm_move_vcd(sentinel_dir, root, sby_file): # noqa: D103
sby_dir: Path = sby_file.with_suffix("")
trace_names = [t for t in (sby_dir / "engine_0").glob("trace*.vcd")]
id_re = re.compile("[0-9]*$")
for tn in trace_names:
num = id_re.search(str(tn.stem))
rel_tn = tn.relative_to(sentinel_dir)
stem_id = sby_file.stem + num[0] if num else ""
out_path = root / stem_id
copy2(tn, out_path.with_suffix(".vcd"))
ret = subprocess.run([sys.executable, "disasm.py", rel_tn,
stem_id], stdout=subprocess.PIPE,
cwd=sentinel_dir)
ret.check_returncode()
print(ret.stdout.decode("utf-8"))
with open(out_path.with_suffix(".s"), "wb") as fp:
fp.write(ret.stdout)
def task_run_sby():
r"""run symbiyosys flow on Sentinel, "doit list --all run_sby" for choices""" # noqa: E501
root = Path(".")
formal_tests = Path("./tests/formal/")
cores_dir = formal_tests / "riscv-formal" / "cores"
sentinel_dir = cores_dir / "sentinel"
pyfiles = [s for s in Path("./src/sentinel").glob("*.py")]
genchecks = formal_tests / "riscv-formal" / "checks" / "genchecks.py"
disasm_py = formal_tests / "disasm.py"
checks_cfg = formal_tests / "checks.cfg"
wrapper_sv = formal_tests / "wrapper.sv"
# Expose this until I can figure out how to serialize the setup for
# all the sby tasks when they run in parallel. If this line isn't
# present, default behavior will try to initialize riscv-formal repo
# twice (as well as the other submodules, if "-- ." on end of "_git_init"
# tasks isn't present). This results in a failed checkout that doit thinks
# succeeded (thanks to only having the ".git" file as a target), which
# then causes dependent files like genchecks.py to never appear.
#
# Idk if the other two tasks in this setup can meaningfully race (I think
# they're idempotent), but just in case, use separate task for now.
yield {
"name": "setup",
"actions": [],
"setup": ["_git_init:riscv-formal",
"_formal_gen_sentinel",
"_formal_gen_files"],
"doc": "run sby initialization once, mostly used to avoid doit "
"parallel race conditions"
}
for c in SBY_TESTS:
sby_file = (sentinel_dir / "checks" / c).with_suffix(".sby")
yield {
"name": c,
"title": partial(print_title,
title=f"Running RISC-V Formal Test {c}"),
"actions": [CmdAction(f"sby -f {sby_file.name}",
cwd=sentinel_dir / "checks"),
(maybe_disasm_move_vcd, (sentinel_dir, root,
sby_file))],
"targets": [sentinel_dir / "checks" / c / "status"],
"file_dep": pyfiles + [genchecks, disasm_py, checks_cfg,
wrapper_sv,
Path("./src/sentinel/microcode.asm")],
"verbosity": 2,
"setup": ["run_sby:setup"],
}
# Customize the status task(s) to print all output on a single line.
# Think like autoconf scripts "checking for foo... yes"!
def echo_sby_status(checks_dir, c): # noqa: D103
# TODO: Handle "not run yet" if status doesn't exist? What about
# "out-of-date"?
with open(checks_dir / c / "status", "r") as fp:
res = fp.read()
if "FAIL" in res:
print(f"{c}... FAIL")
else:
print(f"{c}... PASS")
def task_list_sby_status():
r"""list "run_sby" subtasks' status, "doit list --all list_sby_status" for choices""" # noqa: E501
formal_tests = Path("./tests/formal/")
cores_dir = formal_tests / "riscv-formal" / "cores"
sentinel_dir = cores_dir / "sentinel"
checks_dir = sentinel_dir / "checks"
for c in SBY_TESTS:
yield {
"name": c,
"actions": [(echo_sby_status, (checks_dir, c))],
"file_dep": [sentinel_dir / "checks" / c / "status"],
"verbosity": 2,
"uptodate": [False],
"meta": {
"suppress_reporter": True
}
}
# Upstream
UNSUPPORTED_UPSTREAM = ("breakpoint",)
# I figured out the correct invocations for compiling and objdump by running
# the autoconf script, compiling normally, and seeing which flags the compiler
# and objdump are invoked with. It might not be perfect (but seems to work
# fine).
def task_compile_upstream():
r"""compile riscv-tests tests to ELF, "doit list --all compile_upstream" for choices""" # noqa: E501
flags = "-march=rv32g -mabi=ilp32 -static -mcmodel=medany \
-fvisibility=hidden -nostdlib -nostartfiles"
upstream_tests = Path("./tests/upstream/")
outdir = upstream_tests / "binaries"
cfg = upstream_tests / "riscv_test.h"
link_file = upstream_tests / "link.ld"
submod = upstream_tests / "riscv-tests" / ".git"
isa_dir = upstream_tests / "riscv-tests/isa/rv32ui"
mmode_dir = upstream_tests / "riscv-tests/isa/rv32mi"
macros_dir = upstream_tests / "riscv-tests/isa/macros/scalar"
env_dir = upstream_tests / "riscv-tests/env"
yield {
"name": "mkdir",
"actions": [(create_folder, [outdir])],
"uptodate": [run_once]
}
for source_file in chain(isa_dir.glob('*.S'), mmode_dir.glob('*.S')):
if source_file.stem in UNSUPPORTED_UPSTREAM:
continue
elf_file = outdir / source_file.with_suffix(".elf").name
bin_file = outdir / source_file.with_suffix("").name
dump_file = elf_file.with_suffix(".dump")
yield {
"title": title_with_actions,
"name": source_file.stem,
# {dependencies}, {targets}, and {{targets}} don't work
# for parallel (at least on Windows). But f-strings do.
"actions": [f"riscv64-unknown-elf-gcc {source_file} {flags} "
f"-I {upstream_tests} -I {macros_dir} -I {env_dir} "
f"-T {link_file} -o {elf_file}",
"riscv64-unknown-elf-objcopy -O binary "
f"{elf_file} {bin_file}",
f"riscv64-unknown-elf-objdump --disassemble-all "
"--disassemble-zeroes --section=.text "
"--section=.text.startup --section=.text.init "
f"--section=.data {elf_file} > {dump_file}"],
"file_dep": [source_file, cfg, link_file, submod],
"targets": [elf_file, bin_file, dump_file],
"setup": ["compile_upstream:mkdir"]
}
def save_last_platform_and_bus(platform, bus): # noqa: D103
return {"last_platform": platform, "last_bus": bus}
def last_platform_and_bus(task, values, platform, bus): # noqa: D103
task.value_savers.append(partial(save_last_platform_and_bus,
platform, bus))
return values.get("last_platform") == platform and \
values.get("last_bus") == bus
# Rust firmware development
@task_params([{"name": "platform", "short": "p",
"default": "icestick",
"help": "platform to build baseline gateware"},
{"name": "interface", "short": "i",
"default": "wishbone",
"help": "peripheral interconnect bus type"}])
def task__make_rand_firmware(platform, interface):
"""create a baseline gateware for firmware development"""
pyfiles = [s for s in Path("./src/sentinel").glob("*.py")] + \
[Path("./examples/attosoc.py")]
build_dir = Path("./build-rust")
rand_hex = build_dir / "rand.hex"
rand_asc = build_dir / "rand.asc"
return {
"actions": ["pdm demo -b build-rust -r -x rand -p {platform} -i {interface}"], # noqa: E501
"targets": [rand_hex, rand_asc],
"file_dep": pyfiles + [Path("./src/sentinel/microcode.asm")],
"uptodate": [partial(last_platform_and_bus, platform=platform,
bus=interface)],
}
def task__compile_rust_firmware():
"""compile Rust firmware and show size"""
rs_files = [s for s in chain(Path("sentinel-rt/examples").glob("*.rs"),
Path("sentinel-rt/src").glob("*.rs"))] + \
[s for s in Path(".").glob("*/Cargo.toml")] + \
[Path("Cargo.toml")]
attosoc_elf = Path("target/riscv32i-unknown-none-elf/release/examples/attosoc") # noqa: E501
return {
"actions": ["cargo build --release --example=attosoc",
f"riscv64-unknown-elf-size.exe {attosoc_elf}"],
"verbosity": 2,
"file_dep": rs_files,
"targets": [attosoc_elf]
}
def task__replace_rust_firmware():
"""replace random firmware image inside baseline gateware with Rust program""" # noqa: E501
attosoc_elf = Path("target/riscv32i-unknown-none-elf/release/examples/attosoc") # noqa: E501
build_dir = Path("./build-rust")
rand_asc = build_dir / "rand.asc"
rand_hex = build_dir / "rand.hex"
firmware_hex = build_dir / "firmware.hex"
top_asc = build_dir / "top.asc"
top_bin = build_dir / "top.bin"
return {
"actions": [f"pdm demo -b build-rust -n -g {attosoc_elf} -x firmware",
f"icebram {rand_hex} {firmware_hex} < {rand_asc} > {top_asc}", # noqa: E501
f"icepack {top_asc} {top_bin}"],
"targets": [top_bin],
"file_dep": [attosoc_elf, rand_asc, rand_hex]
}
@task_params([{"name": "programmer", "short": "p",
"choices": (("ofl", "use openFPGALoader"),
("iceprog", "use iceprog")),
"default": "iceprog",
"help": "programmer application to use"}])
def task__program_rust_firmware(programmer):
"""load Rust firmware image onto FPGA"""
build_dir = Path("./build-rust")
top_bin = build_dir / "top.bin"
match programmer:
case "ofl":
prog_action = f"openfpgaloader -b ice40_generic {top_bin}"
case "iceprog":
prog_action = f"iceprog {top_bin}"
case _:
assert False
return {
"actions": [prog_action],
"uptodate": [False],
"verbosity": 2,
"file_dep": [top_bin]
}