-
Notifications
You must be signed in to change notification settings - Fork 25
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #35 from vaughnbetz/sm_asic_flow
Sm asic flow
- Loading branch information
Showing
24 changed files
with
2,704 additions
and
620 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,169 @@ | ||
|
||
import sys | ||
import os | ||
import argparse | ||
import subprocess as sp | ||
import shlex | ||
import csv | ||
import re | ||
|
||
############### PYTHON3 ############### | ||
#This allows for the command to output to console while running but with certain commands it causes issues (awk) | ||
def run_shell_cmd(cmd_str,out_flag): | ||
cmd_lst = shlex.split(cmd_str) | ||
cmd_out = sp.Popen(cmd_lst, stdin=sp.PIPE, stdout=sp.PIPE, stderr=sp.PIPE,env=cur_env) | ||
cmd_stdout = "" | ||
for line in iter(cmd_out.stdout.readline, ""): | ||
if(cmd_out.poll() is None): | ||
cmd_stdout += line.decode("utf-8") | ||
if(out_flag == 1): | ||
sys.stdout.buffer.write(line) | ||
elif(out_flag == 2): | ||
sys.stderr.buffer.write(line) | ||
else: | ||
break | ||
_, cmd_stderr = cmd_out.communicate() | ||
cmd_stderr = cmd_stderr.decode("utf-8") | ||
print("cmd: %s returned with: %d" % (cmd_str,cmd_out.returncode)) | ||
return cmd_stdout, cmd_stderr | ||
|
||
#safe mode for running shell commands | ||
def safe_run_shell_cmd(cmd_str): | ||
cmd_lst = shlex.split(cmd_str) | ||
cmd_out = sp.run(cmd_lst, stdin=sp.PIPE, stdout=sp.PIPE, stderr=sp.PIPE,env=cur_env) | ||
cmd_stdout = cmd_out.stdout.decode("utf-8") | ||
cmd_stderr = cmd_out.stderr.decode("utf-8") | ||
# print("cmd: %s returned with: %d" % (cmd_str,cmd_out.returncode)) | ||
if(cmd_out.returncode != 0): | ||
print(cmd_stderr) | ||
return cmd_stdout, cmd_stderr | ||
############### PYTHON3 ############### | ||
|
||
#return the relative path of top level repo (COFFE in this case) | ||
def find_top_repo(repo_str): | ||
cwd_pwd_list = os.getcwd().split("/") | ||
cwd_pwd_list.reverse() | ||
repo_idx = cwd_pwd_list.index(repo_str) | ||
return "../"*(repo_idx) | ||
|
||
def make_dir(dir_rel_path): | ||
if(not os.path.isdir(dir_rel_path)): | ||
os.mkdir(dir_rel_path) | ||
# print("made directory in the path\n%s" % ( os.getcwd())) | ||
|
||
#creates output directories and move to arch out folder to prepare for scripts to run | ||
def create_out_dirs(hard_params,arch_params): | ||
gen_out_dir_name = "output_files" | ||
os.chdir(arch_params["coffe_repo_path"]) | ||
make_dir(gen_out_dir_name) | ||
os.chdir(gen_out_dir_name) | ||
#make directory for this specific coffe run | ||
make_dir(arch_params["coffe_design_name"]) | ||
os.chdir(arch_params["coffe_design_name"]) | ||
make_dir(hard_params["arch_dir"]) | ||
os.chdir(hard_params["arch_dir"]) | ||
arch_dir = os.getcwd() | ||
os.chdir(arch_params["coffe_repo_path"]) | ||
#return abs path to run ASIC flow from | ||
return arch_dir | ||
|
||
def modify_hb_params(hard_params,arch_params): | ||
# GET DIR NAMES FROM PATHS | ||
for hb_key,hb_val in hard_params.items(): | ||
if "folder" in hb_key: | ||
#takes the abs path and gets dir name | ||
out_dir_name = hb_val.split("/")[-1] | ||
if("synth_" in hb_key): | ||
hard_params["synth_dir"] = out_dir_name | ||
elif("pr_" in hb_key): | ||
hard_params["pnr_dir"] = out_dir_name | ||
elif("primetime_"in hb_key): | ||
hard_params["pt_dir"] = out_dir_name | ||
arch_out_path = arch_params["arch_out_folder"].split("/")[-1] | ||
out_dir_name = arch_out_path.split("/")[-1] | ||
hard_params["arch_dir"] = out_dir_name | ||
|
||
#outputs csv to the report_csv_out dir | ||
def reports_to_csv(report_path): | ||
os.chdir(report_path) | ||
decimal_re = re.compile(r'\d+\.{0,1}\d*') | ||
report_dict = { | ||
'mode': [], | ||
'top_level_mod' : [], | ||
'period': [], | ||
'wire_model': [], | ||
'metal_layers' : [], | ||
'utilization' : [], | ||
'area': [], | ||
'delay': [], | ||
'power': [] | ||
} | ||
for rep in os.listdir(report_path): | ||
report_csv_out_dir = "report_csv_out" | ||
if("report_" in rep): | ||
#parse asic params from rep name | ||
rep_params = rep.split("_") | ||
if(len(rep_params) < 7): | ||
continue | ||
#check to see if its the mode or average of modes reports | ||
#be careful the mode param is only gonna work for dsp block (or other verilog that has mode_0,mode_1 ports to muxes), will have to be generalized in future TODO | ||
if("mode" in rep_params[1]): | ||
report_dict["mode"].append(rep_params[1]) | ||
param_idx = 1 | ||
else: | ||
report_dict["mode"].append("none") | ||
param_idx = 0 | ||
|
||
report_dict["top_level_mod"].append(rep_params[1+param_idx]) | ||
report_dict["period"].append(rep_params[2+param_idx]) | ||
#skip one index for "wire" keyword | ||
report_dict["wire_model"].append(rep_params[3+param_idx]) | ||
report_dict["metal_layers"].append(rep_params[5+param_idx]) | ||
#remove the .txt from the last param in filename | ||
report_dict["utilization"].append(os.path.splitext(rep_params[6+param_idx])[0]) | ||
fd = open(rep,"r") | ||
delay = "" | ||
power = "" | ||
area = "" | ||
for line in fd: | ||
if("area" in line): | ||
area = decimal_re.search(line).group(0) | ||
elif("delay" in line): | ||
delay = decimal_re.search(line).group(0) | ||
elif("power" in line): | ||
power = decimal_re.search(line).group(0) | ||
report_dict['area'].append(area) | ||
report_dict['delay'].append(delay) | ||
report_dict['power'].append(power) | ||
fd.close() | ||
os.chdir(script_path) | ||
make_dir(report_csv_out_dir) | ||
for row_idx in range(len(report_dict["delay"])): | ||
dict_row = [report_dict[key][row_idx] for key in report_dict.keys()] | ||
fd = open(report_csv_out_dir + "/condensed_report.csv","w") | ||
writer = csv.writer(fd) | ||
writer.writerow(report_dict.keys()) | ||
for row_idx in range(len(report_dict["delay"])): | ||
dict_row = [report_dict[key][row_idx] for key in report_dict.keys()] | ||
writer.writerow(dict_row) | ||
fd.close() | ||
return report_dict | ||
|
||
def main(): | ||
global cur_env | ||
cur_env = os.environ.copy() | ||
global script_path | ||
script_path = os.getcwd() | ||
parser = argparse.ArgumentParser() | ||
parser.add_argument('-r','--report_path',type=str,help="path to directory containing coffe report outputs") | ||
args = parser.parse_args() | ||
arg_dict = vars(args) | ||
#create condensed reports | ||
report_pwd = os.path.join(os.getcwd(),arg_dict["report_path"]) | ||
report_dict = reports_to_csv(report_pwd) | ||
|
||
|
||
|
||
if __name__ == "__main__": | ||
main() | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,101 @@ | ||
import matplotlib.pyplot as plt | ||
import csv | ||
import sys | ||
import argparse | ||
|
||
|
||
def plot_key_vs_freq(data_dict,key=None,cost=False): | ||
#use delay to get | ||
target_freqs = [(1/float(i))*1000 if float(i) != 0 else None for i in data_dict["period"]] | ||
achieved_freqs = [(1/float(i))*1000 if float(i) != 0 else None for i in data_dict["delay"]] | ||
unique_tfreqs = list(set(target_freqs)) | ||
freq_idxs = [None]*len(target_freqs) | ||
#The freq_idxs array stores indexes for each report according to the target frequency | ||
for i,u_tfreq in enumerate(unique_tfreqs): | ||
for j,t_freq in enumerate(target_freqs): | ||
if(t_freq == u_tfreq): | ||
freq_idxs[j] = i | ||
#now we have map from target freq to achived freq, use this to get to lists for each target_freq | ||
tfreq_unique_data = [] | ||
afreq_unique_data = [] | ||
tfreq_unique_data_2 = [] | ||
for i in range(len(unique_tfreqs)): | ||
tfreq_unique_data.append([]) | ||
afreq_unique_data.append([]) | ||
tfreq_unique_data_2.append([]) | ||
for idx,i in enumerate(freq_idxs): | ||
if(cost == False): | ||
tfreq_unique_data[i].append(float(data_dict[key][idx])) | ||
else: | ||
tfreq_unique_data[i].append(float(data_dict["area"][idx])) | ||
tfreq_unique_data_2[i].append(float(data_dict["delay"][idx])) | ||
afreq_unique_data[i].append(achieved_freqs[idx]) | ||
avg_key_val = [None]*len(unique_tfreqs) | ||
avg_achieved_freq = [None]*len(unique_tfreqs) | ||
for i in range(len(unique_tfreqs)): | ||
if(cost == False): | ||
avg_key_val[i] = float(sum(tfreq_unique_data[i]))/float(len(tfreq_unique_data[i])) | ||
else: | ||
avg_key_val[i] = float(sum([area*delay for area,delay in zip(tfreq_unique_data[i],tfreq_unique_data_2[i])])/float(len(tfreq_unique_data[i]))) | ||
avg_achieved_freq[i] = float(sum(afreq_unique_data[i]))/float(len(afreq_unique_data[i])) | ||
fig, ax = plt.subplots(1,2,figsize=(12,5)) | ||
fig.tight_layout(pad=5.0) | ||
ax0_ylabel_str = "" | ||
if(cost == False): | ||
ys = [float(i) for i in data_dict[key]] | ||
fig_name = f"{key}_fig.png" | ||
if(key == "area"): | ||
ax0_ylabel_str = "Area (um2)" | ||
elif(key == "power"): | ||
ax0_ylabel_str = "Power (mW)" | ||
ys = [i*1000 for i in ys] | ||
avg_key_val = [i*1000 for i in avg_key_val] | ||
elif(key == "delay"): | ||
ax0_ylabel_str = "Delay (ns)" | ||
else: | ||
fig_name = "cost_fig.png" | ||
ax0_ylabel_str = "Area (um2) x Delay (ns)" | ||
ys = [float(i)*float(j) for i,j in zip(data_dict["area"],data_dict["delay"])] | ||
|
||
xmin,xmax = (min(achieved_freqs),max(achieved_freqs)) | ||
ymin,ymax = (min(ys), max(ys)) | ||
yrange = ymax - ymin | ||
xrange = xmax - xmin | ||
#data plot for key selected | ||
ax[0].set_xlim(xmin-(xrange/10),xmax+(xrange/10)) | ||
ax[0].set_ylim(ymin-(yrange/10),ymax+(yrange/10)) | ||
ax[0].set_xlabel("Achieved Freq (MHz)") | ||
ax[0].set_ylabel(ax0_ylabel_str) | ||
ax[0].scatter(achieved_freqs,ys,marker='x',c='b') | ||
# average plot | ||
ax[1].set_xlim(xmin-(xrange/10),xmax+(xrange/10)) | ||
ax[1].set_ylim(ymin-(yrange/10),ymax+(yrange/10)) | ||
ax[1].scatter(avg_achieved_freq,avg_key_val,marker='x',c='r') | ||
ax[1].set_xlabel("Achieved Freq (MHz)") | ||
ax[1].set_ylabel("Avg " + ax0_ylabel_str) | ||
plt.savefig(fig_name) | ||
|
||
def read_csv(csv_path): | ||
data_dict = {} | ||
with open(csv_path,'r') as infile: | ||
reader = csv.reader(infile) | ||
header = next(reader) | ||
next(reader) | ||
out_list = [list(i) for i in zip(*reader)] | ||
for idx,key in enumerate(header): | ||
data_dict[key] = out_list[idx] | ||
return data_dict | ||
|
||
def main(): | ||
parser = argparse.ArgumentParser() | ||
parser.add_argument('-c','--csv_path',type=str,help="path to condensed csv file") | ||
args = parser.parse_args() | ||
arg_dict = vars(args) | ||
data_dict = read_csv(arg_dict['csv_path']) | ||
plot_key_vs_freq(data_dict,"area") | ||
plot_key_vs_freq(data_dict,"delay") | ||
plot_key_vs_freq(data_dict,"power") | ||
plot_key_vs_freq(data_dict,cost=True) | ||
|
||
if __name__ == "__main__": | ||
main() |
Oops, something went wrong.