Skip to content

Commit

Permalink
Merge pull request #965 from iksnagreb/feature/post-synth-resources
Browse files Browse the repository at this point in the history
Enable analysis report of post synthesis resource utilization in json
  • Loading branch information
auphelia authored Apr 24, 2024
2 parents a99d82d + 7dbd811 commit 39fb885
Show file tree
Hide file tree
Showing 3 changed files with 39 additions and 6 deletions.
34 changes: 28 additions & 6 deletions src/finn/analysis/fpgadataflow/post_synth_res.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,14 +58,13 @@ def post_synth_res(model, override_synth_report_filename=None):
else:
raise Exception("Please run synthesis first")

# TODO build these indices based on table headers instead of harcoding
restype_to_ind_default = {
"LUT": 2,
"SRL": 5,
"FF": 6,
"BRAM_36K": 7,
"BRAM_18K": 8,
"DSP48": 9,
"DSP": 10,
}
restype_to_ind_vitis = {
"LUT": 4,
Expand All @@ -74,13 +73,36 @@ def post_synth_res(model, override_synth_report_filename=None):
"BRAM_36K": 9,
"BRAM_18K": 10,
"URAM": 11,
"DSP48": 12,
"DSP": 12,
}

if model.get_metadata_prop("platform") == "alveo":
restype_to_ind = restype_to_ind_vitis
# format: (human_readable_name_in_report, canonical_name)
res_types_to_search = [
("Total LUTs", "LUT"),
("SRLs", "SRL"),
("FFs", "FF"),
("RAMB36", "BRAM_36K"),
("RAMB18", "BRAM_18K"),
("URAM", "URAM"),
("DSP Blocks", "DSP"),
]

# try to infer resource type to table index by
# looking at the names in headings
header_row = root.findall(".//*[@contents='Instance']/..")
if header_row != []:
headers = [x.attrib["contents"] for x in list(header_row[0])]
restype_to_ind = {}
for res_type_name, res_type in res_types_to_search:
if res_type_name in headers:
restype_to_ind[res_type] = headers.index(res_type_name)
else:
restype_to_ind = restype_to_ind_default
# could not infer resource types from header
# fall back to default indices
if model.get_metadata_prop("platform") == "alveo":
restype_to_ind = restype_to_ind_vitis
else:
restype_to_ind = restype_to_ind_default

def get_instance_stats(inst_name):
row = root.findall(".//*[@contents='%s']/.." % inst_name)
Expand Down
10 changes: 10 additions & 0 deletions src/finn/builder/build_dataflow_steps.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,7 @@
aggregate_dict_keys,
op_and_param_counts,
)
from finn.analysis.fpgadataflow.post_synth_res import post_synth_res
from finn.analysis.fpgadataflow.res_estimation import (
res_estimation,
res_estimation_complete,
Expand Down Expand Up @@ -801,6 +802,11 @@ def step_synthesize_bitfile(model: ModelWrapper, cfg: DataflowBuildConfig):
model.get_metadata_prop("vivado_synth_rpt"),
report_dir + "/post_synth_resources.xml",
)

post_synth_resources = model.analysis(post_synth_res)
with open(report_dir + "/post_synth_resources.json", "w") as f:
json.dump(post_synth_resources, f, indent=2)

vivado_pynq_proj_dir = model.get_metadata_prop("vivado_pynq_proj")
timing_rpt = (
"%s/finn_zynq_link.runs/impl_1/top_wrapper_timing_summary_routed.rpt"
Expand All @@ -825,6 +831,10 @@ def step_synthesize_bitfile(model: ModelWrapper, cfg: DataflowBuildConfig):
model.get_metadata_prop("vivado_synth_rpt"),
report_dir + "/post_synth_resources.xml",
)

post_synth_resources = model.analysis(post_synth_res)
with open(report_dir + "/post_synth_resources.json", "w") as f:
json.dump(post_synth_resources, f, indent=2)
else:
raise Exception("Unrecognized shell_flow_type: " + str(cfg.shell_flow_type))
print("Bitfile written into " + bitfile_dir)
Expand Down
1 change: 1 addition & 0 deletions tests/util/test_build_dataflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,7 @@ def test_end2end_build_dataflow_directory():
assert os.path.isfile(output_dir + "/bitfile/finn-accel.hwh")
assert os.path.isfile(output_dir + "/report/post_synth_resources.xml")
assert os.path.isfile(output_dir + "/report/post_route_timing.rpt")
assert os.path.isfile(output_dir + "/report/post_synth_resources.json")
# verification outputs
verif_batchsize = np.load(target_dir + "/input.npy").shape[0]
for i in range(verif_batchsize):
Expand Down

0 comments on commit 39fb885

Please sign in to comment.