Commit 431a221d authored by Martin Jeřábek's avatar Martin Jeřábek

testfw: formatting

parent 2ffb1e16
import os import os
import sys import os.path
from json2html import *
import random
import logging import logging
from os.path import join, abspath
from pathlib import Path
import json import json
from pathlib import Path
from yattag import Doc from yattag import Doc
from typing import Tuple from typing import Tuple, List, Dict, Any, NewType
from json2html import *
TPslPoint = NewType('TPslPoint', Dict[str, Any])
test_dir = Path(Path(abspath(__file__)).parent).parent test_dir = Path(__file__).parent.parent.absolute()
build_dir = os.path.join(str(test_dir.absolute()), "build") build_dir = test_dir.absolute() / "build"
func_cov_dir = os.path.join(str(build_dir), "functional_coverage") func_cov_dir = build_dir / "functional_coverage"
psl_dir = os.path.join(str(func_cov_dir), "coverage_data") psl_dir = func_cov_dir / "coverage_data"
html_dir = os.path.join(str(func_cov_dir), "html") html_dir = func_cov_dir / "html"
dut_top = " " dut_top = " "
...@@ -25,29 +24,25 @@ def merge_psl_coverage_files(out_file: str, in_file_prefix: str) -> None: ...@@ -25,29 +24,25 @@ def merge_psl_coverage_files(out_file: str, in_file_prefix: str) -> None:
""" """
Merge PSL coverage details from multiple files to single file Merge PSL coverage details from multiple files to single file
""" """
if (out_file.startswith(in_file_prefix)): if out_file.startswith(in_file_prefix):
raise ValueError("File name for merging should not have the same prefix as merged files") raise ValueError("File name for merging should not have the same prefix as merged files")
json_out_path = os.path.join(func_cov_dir, out_file) json_out_path = func_cov_dir / out_file
json_out_list = [] json_out_list = [] # type: List[TPslPoint]
for filename in os.listdir(psl_dir): for in_filename in psl_dir.glob('{}*.json'.format(in_file_prefix)):
if (not (filename.startswith(in_file_prefix) and \ log.info("Merging JSON PSL coverage from: {}\n".format(in_filename))
filename.endswith(".json"))): with in_filename.open('rt') as f:
continue json_obj = json.load(f)
in_filename = os.path.join(psl_dir, filename)
print("Merging JSON PSL coverage from: {}\n".format(in_filename))
with open(in_filename, 'r') as json_in_file:
json_obj = json.load(json_in_file)
# Add test name to each PSL point # Add test name to each PSL point
for psl_point in json_obj["details"]: for psl_point in json_obj["details"]:
psl_point["test"] = filename.strip(in_file_prefix).replace(".json","") psl_point["test"] = in_filename.with_suffix('').name \
.strip(in_file_prefix)
json_out_list.extend(json_obj["details"]) json_out_list += json_obj["details"]
with open(json_out_path, 'w') as json_out_file: with json_out_path.open('wt') as f:
json.dump(json_out_list, json_out_file, indent=1) json.dump(json_out_list, f, indent=1)
def collapse_psl_coverage_files(non_collapsed): def collapse_psl_coverage_files(non_collapsed):
...@@ -67,16 +62,15 @@ def collapse_psl_coverage_files(non_collapsed): ...@@ -67,16 +62,15 @@ def collapse_psl_coverage_files(non_collapsed):
log.info("Collapsing PSL points with common hierarchy below: {}".format(dut_top)) log.info("Collapsing PSL points with common hierarchy below: {}".format(dut_top))
collapsed = [] collapsed = []
# We do stupid quadratic sort because we don't really care if it is gonna last 10 # We do stupid quadratic sort because we don't really care if it is gonna
# or 40 seconds... If we ever get to the point that this takes too long we know # last 10 or 40 seconds... If we ever get to the point that this takes too
# that we have reeealy lot of PSL points and we turned into Semiconductor monster! # long, we know that we have reeealy lot of PSL points and we turned into
# Semiconductor monster!
for psl_in in non_collapsed: for psl_in in non_collapsed:
found = False found = False
for psl_out in collapsed: for psl_out in collapsed:
# Check if name in output list is equal to searched name from
# Check if name in output list is equal to searched name from "dut_top" # "dut_top" entity down. Skip if not
# entity down. Skip if not
in_name = psl_in["name"].split(dut_top)[-1] in_name = psl_in["name"].split(dut_top)[-1]
out_name = psl_out["name"].split(dut_top)[-1] out_name = psl_out["name"].split(dut_top)[-1]
if (out_name != in_name): if (out_name != in_name):
...@@ -102,25 +96,25 @@ def collapse_psl_coverage_files(non_collapsed): ...@@ -102,25 +96,25 @@ def collapse_psl_coverage_files(non_collapsed):
psl_out["count"] += psl_in["count"] psl_out["count"] += psl_in["count"]
found = True found = True
break; break
# Input point was not collapsed into any of output points -> Add directly # Input point was not collapsed into any of output points -> Add directly
if (not found): if not found:
collapsed.append(psl_in) collapsed.append(psl_in)
return collapsed return collapsed
def get_collapsed_file_name(psl_point) -> str: def get_collapsed_file_name(psl_point: TPslPoint) -> str:
""" """
Create unique file name for collapsed PSL points Create unique file name for collapsed PSL points
""" """
file_name = dut_top + psl_point["name"].split(dut_top)[-1] file_name = dut_top + psl_point["name"].split(dut_top)[-1]
file_name = file_name.replace(".","_") file_name = file_name.replace(".", "_")
file_name = file_name.replace(" ","_") file_name = file_name.replace(" ", "_")
file_name = file_name.replace(")","_") file_name = file_name.replace(")", "_")
file_name = file_name.replace("(","_") file_name = file_name.replace("(", "_")
file_name = file_name.replace("@","_") file_name = file_name.replace("@", "_")
file_name = file_name + "_" + str(psl_point["line"]) file_name = file_name + "_" + str(psl_point["line"])
return file_name return file_name
...@@ -129,29 +123,28 @@ def load_json_psl_coverage(filename: str): ...@@ -129,29 +123,28 @@ def load_json_psl_coverage(filename: str):
""" """
Load PSL Coverage JSON file to JSON object. Load PSL Coverage JSON file to JSON object.
""" """
psl_cov_path = os.path.join(func_cov_dir, filename) psl_cov_path = func_cov_dir / filename
# Read JSON string from file # Read JSON string from file
log.info("Loading JSON PSL output: {}".format(psl_cov_path)) log.info("Loading JSON PSL output: {}".format(psl_cov_path))
with open(psl_cov_path, 'r') as json_file: with psl_cov_path.open('rt') as json_file:
return json.load(json_file) return json.load(json_file)
def split_json_coverage_by_file(json): def split_json_coverage_by_file(json) -> Dict[Path, List[TPslPoint]]:
""" """
Parse input PSL Coverage JSON file. Group PSL endpoints by file. Parse input PSL Coverage JSON file. Group PSL endpoints by file.
Return dictionary in format: Return dictionary in format:
{filename : psl_points} where psl_points is a list of PSL points in {filename : psl_points} where psl_points is a list of PSL points in
filename. filename.
""" """
file_dict = {} file_dict = {} # type: Dict[Path, List[TPslPoint]]
for psl_point in json: for psl_point in json:
file = Path(psl_point["file"])
# Create new list if first PSL of a file is parsed # Create new list if first PSL of a file is parsed
if (not(psl_point["file"] in file_dict)): if file not in file_dict:
file_dict[psl_point["file"]] = [] file_dict[file] = []
file_dict[file].append(psl_point)
file_dict[psl_point["file"]].append(psl_point)
return file_dict return file_dict
...@@ -166,7 +159,7 @@ def add_html_table_header(doc, tag, text, headers, back_color="White"): ...@@ -166,7 +159,7 @@ def add_html_table_header(doc, tag, text, headers, back_color="White"):
text(header) text(header)
def calc_coverage_results(psl_points, psl_type) -> Tuple[int,int]: def calc_coverage_results(psl_points: List[TPslPoint], psl_type) -> Tuple[int, int]:
""" """
Calculate coverage results from list of PSL points in JSON format. Calculate coverage results from list of PSL points in JSON format.
""" """
...@@ -174,12 +167,12 @@ def calc_coverage_results(psl_points, psl_type) -> Tuple[int,int]: ...@@ -174,12 +167,12 @@ def calc_coverage_results(psl_points, psl_type) -> Tuple[int,int]:
nok = 0 nok = 0
for psl_point in psl_points: for psl_point in psl_points:
if (psl_point["directive"] != psl_type): if (psl_point["directive"] != psl_type):
continue; continue
if (psl_point["status"] == "passed" or if (psl_point["status"] == "passed" or
psl_point["status"] == "covered"): psl_point["status"] == "covered"):
ok += 1 ok += 1
else: else:
nok +=1 nok += 1
return ok, nok return ok, nok
...@@ -200,7 +193,8 @@ def calc_coverage_color(coverage: float) -> str: ...@@ -200,7 +193,8 @@ def calc_coverage_color(coverage: float) -> str:
return "Red" return "Red"
def print_cov_cell_percentage(doc, tag, text, psl_points, coverage_type, merge_abs_vals) -> None: def print_cov_cell_percentage(doc, tag, text, psl_points: List[TPslPoint],
coverage_type, merge_abs_vals) -> None:
""" """
""" """
ok, nok = calc_coverage_results(psl_points, coverage_type) ok, nok = calc_coverage_results(psl_points, coverage_type)
...@@ -229,7 +223,7 @@ def print_cov_cell_percentage(doc, tag, text, psl_points, coverage_type, merge_a ...@@ -229,7 +223,7 @@ def print_cov_cell_percentage(doc, tag, text, psl_points, coverage_type, merge_a
text("NA") text("NA")
def add_psl_html_header(doc, tag, text, filename, psl_points): def add_psl_html_header(doc, tag, text, filename, psl_points: List[TPslPoint]):
""" """
Create HTML page header with info about coverage data within list of Create HTML page header with info about coverage data within list of
PSL points in JSON format. PSL points in JSON format.
...@@ -259,7 +253,8 @@ def add_psl_html_header(doc, tag, text, filename, psl_points): ...@@ -259,7 +253,8 @@ def add_psl_html_header(doc, tag, text, filename, psl_points):
coverage_type, merge_abs_vals=False) coverage_type, merge_abs_vals=False)
def add_non_colapsed_psl_table_entry(doc, tag, text, psl_point, def_bg_color="White"): def add_non_colapsed_psl_table_entry(doc, tag, text, psl_point: TPslPoint,
def_bg_color="White"):
""" """
Add HTML table entry for non-collapsed PSL functional coverage point. Add HTML table entry for non-collapsed PSL functional coverage point.
...@@ -285,7 +280,8 @@ def add_non_colapsed_psl_table_entry(doc, tag, text, psl_point, def_bg_color="Wh ...@@ -285,7 +280,8 @@ def add_non_colapsed_psl_table_entry(doc, tag, text, psl_point, def_bg_color="Wh
text(psl_point["status"]) text(psl_point["status"])
def add_colapsed_psl_table_entry(doc, tag, text, psl_point, def_bg_color="White"): def add_colapsed_psl_table_entry(doc, tag, text, psl_point: TPslPoint,
def_bg_color="White"):
""" """
Add HTML table entry for collapsed PSL functional coverage point. Adds Add HTML table entry for collapsed PSL functional coverage point. Adds
llink reference to collapsed entries on separate site. llink reference to collapsed entries on separate site.
...@@ -314,7 +310,8 @@ def add_colapsed_psl_table_entry(doc, tag, text, psl_point, def_bg_color="White" ...@@ -314,7 +310,8 @@ def add_colapsed_psl_table_entry(doc, tag, text, psl_point, def_bg_color="White"
text(psl_point["status"]) text(psl_point["status"])
def add_psl_table_entry(doc, tag, text, psl_point, def_bg_color="White"): def add_psl_table_entry(doc, tag, text, psl_point: TPslPoint,
def_bg_color="White"):
""" """
Add PSL point in JSON format to HTML table. For collapsed entries, Add PSL point in JSON format to HTML table. For collapsed entries,
overall result is shown and link to collapsed points is inserted. overall result is shown and link to collapsed points is inserted.
...@@ -329,17 +326,16 @@ def add_psl_table_entry(doc, tag, text, psl_point, def_bg_color="White"): ...@@ -329,17 +326,16 @@ def add_psl_table_entry(doc, tag, text, psl_point, def_bg_color="White"):
# Create separate page with collapsed PSL points for this PSL statement # Create separate page with collapsed PSL points for this PSL statement
# Add unique filename # Add unique filename
if ("colapsed_points" in psl_point): if ("colapsed_points" in psl_point):
file_name = os.path.join(html_dir, get_collapsed_file_name(psl_point)) file_name = html_dir / get_collapsed_file_name(psl_point)
create_psl_file_page(file_name, psl_point["colapsed_points"]); create_psl_file_page(file_name, psl_point["colapsed_points"])
def create_psl_file_page(filename: str, psl_points): def create_psl_file_page(filename: Path, psl_points: List[TPslPoint]) -> None:
""" """
Create HTML file with list of PSL coverage statements. Create HTML file with list of PSL coverage statements.
""" """
parsed_file_name = os.path.basename(filename) parsed_file_name = filename.name
html_cov_path = os.path.join(html_dir, html_cov_path = html_dir / filename.with_suffix('.html').name
"{}.html".format(parsed_file_name))
doc, tag, text = Doc().tagtext() doc, tag, text = Doc().tagtext()
...@@ -348,7 +344,7 @@ def create_psl_file_page(filename: str, psl_points): ...@@ -348,7 +344,7 @@ def create_psl_file_page(filename: str, psl_points):
# Add "Cover" and "Assertion" points # Add "Cover" and "Assertion" points
psl_types = [{"name" : "Cover Points" , "type" : "cover"}, \ psl_types = [{"name" : "Cover Points" , "type" : "cover"}, \
{"name" : "Assertions" , "type" : "assertion"}] {"name" : "Assertions", "type" : "assertion"}]
for psl_type in psl_types: for psl_type in psl_types:
with tag('p'): with tag('p'):
with tag('table', width='100%', border="1px solid black"): with tag('table', width='100%', border="1px solid black"):
...@@ -361,11 +357,11 @@ def create_psl_file_page(filename: str, psl_points): ...@@ -361,11 +357,11 @@ def create_psl_file_page(filename: str, psl_points):
if (psl_point["directive"] == psl_type["type"]): if (psl_point["directive"] == psl_type["type"]):
add_psl_table_entry(doc, tag, text, psl_point) add_psl_table_entry(doc, tag, text, psl_point)
with open(html_cov_path, 'w', encoding='utf-8') as html_file: with html_cov_path.open('wt', encoding='utf-8') as html_file:
html_file.write(doc.getvalue()) html_file.write(doc.getvalue())
def create_psl_file_refs_table(doc, tag, text, psl_by_files): def create_psl_file_refs_table(doc, tag, text, psl_by_files: Dict[Path, List[TPslPoint]]) -> None:
""" """
Create entries to HTML table for each file. Calculates Create entries to HTML table for each file. Calculates
coverage summary for each file. Adds Reference to files. coverage summary for each file. Adds Reference to files.
...@@ -373,8 +369,8 @@ def create_psl_file_refs_table(doc, tag, text, psl_by_files): ...@@ -373,8 +369,8 @@ def create_psl_file_refs_table(doc, tag, text, psl_by_files):
for file_name, psl_list in psl_by_files.items(): for file_name, psl_list in psl_by_files.items():
with tag('tr'): with tag('tr'):
with tag('td'): with tag('td'):
name = os.path.basename(file_name) name = file_name.name
with tag('a', href= os.path.join("html", name + ".html")): with tag('a', href=os.path.join("html", name + ".html")):
text(name) text(name)
coverage_types = ["cover", "assertion"] coverage_types = ["cover", "assertion"]
for coverage_type in coverage_types: for coverage_type in coverage_types:
...@@ -382,7 +378,7 @@ def create_psl_file_refs_table(doc, tag, text, psl_by_files): ...@@ -382,7 +378,7 @@ def create_psl_file_refs_table(doc, tag, text, psl_by_files):
coverage_type, merge_abs_vals=True) coverage_type, merge_abs_vals=True)
def create_psl_report(psl_by_files, psl_orig): def create_psl_report(psl_by_files: Dict[Path, List[TPslPoint]], psl_orig) -> None:
""" """
Generates PSL report. Each list within psl_by_files has separate Generates PSL report. Each list within psl_by_files has separate
HTML page. Summary page is created from psl_orig HTML page. Summary page is created from psl_orig
...@@ -391,7 +387,7 @@ def create_psl_report(psl_by_files, psl_orig): ...@@ -391,7 +387,7 @@ def create_psl_report(psl_by_files, psl_orig):
for file_name, psl_list in psl_by_files.items(): for file_name, psl_list in psl_by_files.items():
create_psl_file_page(file_name, psl_list) create_psl_file_page(file_name, psl_list)
html_rep_path = os.path.join(func_cov_dir, "functional_coverage_report.html") html_rep_path = func_cov_dir / "functional_coverage_report.html"
doc, tag, text = Doc().tagtext() doc, tag, text = Doc().tagtext()
...@@ -404,12 +400,11 @@ def create_psl_report(psl_by_files, psl_orig): ...@@ -404,12 +400,11 @@ def create_psl_report(psl_by_files, psl_orig):
add_html_table_header(doc, tag, text, header, back_color="Peru") add_html_table_header(doc, tag, text, header, back_color="Peru")
create_psl_file_refs_table(doc, tag, text, psl_by_files) create_psl_file_refs_table(doc, tag, text, psl_by_files)
with open(html_rep_path, 'w', encoding='utf-8') as html_file: with html_rep_path.open('wt', encoding='utf-8') as html_file:
html_file.write(doc.getvalue()) html_file.write(doc.getvalue())
if __name__ == "__main__": if __name__ == "__main__":
#dut_top = "can_top_level" #dut_top = "can_top_level"
dut_top = "." dut_top = "."
...@@ -424,9 +419,9 @@ if __name__ == "__main__": ...@@ -424,9 +419,9 @@ if __name__ == "__main__":
json_by_file_colapsed = {} json_by_file_colapsed = {}
json_together_colapsed = [] json_together_colapsed = []
for filename, psls_for_file in json_by_file.items(): for filename, psls_for_file in json_by_file.items():
colapsed = collapse_psl_coverage_files(psls_for_file); colapsed = collapse_psl_coverage_files(psls_for_file)
json_by_file_colapsed[filename] = colapsed json_by_file_colapsed[filename] = colapsed
json_together_colapsed.extend(colapsed) json_together_colapsed += colapsed
# Create PSL report # Create PSL report
create_psl_report(json_by_file_colapsed, json_together_colapsed) create_psl_report(json_by_file_colapsed, json_together_colapsed)
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment