Commit 431a221d authored by Martin Jeřábek's avatar Martin Jeřábek

testfw: formatting

parent 2ffb1e16
import os
import sys
from json2html import *
import random
import os.path
import logging
from os.path import join, abspath
from pathlib import Path
import json
from pathlib import Path
from yattag import Doc
from typing import Tuple
from typing import Tuple, List, Dict, Any, NewType
from json2html import *
TPslPoint = NewType('TPslPoint', Dict[str, Any])
test_dir = Path(Path(abspath(__file__)).parent).parent
build_dir = os.path.join(str(test_dir.absolute()), "build")
func_cov_dir = os.path.join(str(build_dir), "functional_coverage")
psl_dir = os.path.join(str(func_cov_dir), "coverage_data")
html_dir = os.path.join(str(func_cov_dir), "html")
test_dir = Path(__file__).parent.parent.absolute()
build_dir = test_dir.absolute() / "build"
func_cov_dir = build_dir / "functional_coverage"
psl_dir = func_cov_dir / "coverage_data"
html_dir = func_cov_dir / "html"
dut_top = " "
......@@ -25,29 +24,25 @@ def merge_psl_coverage_files(out_file: str, in_file_prefix: str) -> None:
"""
Merge PSL coverage details from multiple files to single file
"""
if (out_file.startswith(in_file_prefix)):
if out_file.startswith(in_file_prefix):
raise ValueError("File name for merging should not have the same prefix as merged files")
json_out_path = os.path.join(func_cov_dir, out_file)
json_out_list = []
for filename in os.listdir(psl_dir):
if (not (filename.startswith(in_file_prefix) and \
filename.endswith(".json"))):
continue
in_filename = os.path.join(psl_dir, filename)
print("Merging JSON PSL coverage from: {}\n".format(in_filename))
with open(in_filename, 'r') as json_in_file:
json_obj = json.load(json_in_file)
json_out_path = func_cov_dir / out_file
json_out_list = [] # type: List[TPslPoint]
for in_filename in psl_dir.glob('{}*.json'.format(in_file_prefix)):
log.info("Merging JSON PSL coverage from: {}\n".format(in_filename))
with in_filename.open('rt') as f:
json_obj = json.load(f)
# Add test name to each PSL point
for psl_point in json_obj["details"]:
psl_point["test"] = filename.strip(in_file_prefix).replace(".json","")
psl_point["test"] = in_filename.with_suffix('').name \
.strip(in_file_prefix)
json_out_list.extend(json_obj["details"])
json_out_list += json_obj["details"]
with open(json_out_path, 'w') as json_out_file:
json.dump(json_out_list, json_out_file, indent=1)
with json_out_path.open('wt') as f:
json.dump(json_out_list, f, indent=1)
def collapse_psl_coverage_files(non_collapsed):
......@@ -67,16 +62,15 @@ def collapse_psl_coverage_files(non_collapsed):
log.info("Collapsing PSL points with common hierarchy below: {}".format(dut_top))
collapsed = []
# We do stupid quadratic sort because we don't really care if it is gonna last 10
# or 40 seconds... If we ever get to the point that this takes too long we know
# that we have reeealy lot of PSL points and we turned into Semiconductor monster!
# We do stupid quadratic sort because we don't really care if it is gonna
# last 10 or 40 seconds... If we ever get to the point that this takes too
# long, we know that we have reeealy lot of PSL points and we turned into
# Semiconductor monster!
for psl_in in non_collapsed:
found = False
for psl_out in collapsed:
# Check if name in output list is equal to searched name from "dut_top"
# entity down. Skip if not
# Check if name in output list is equal to searched name from
# "dut_top" entity down. Skip if not
in_name = psl_in["name"].split(dut_top)[-1]
out_name = psl_out["name"].split(dut_top)[-1]
if (out_name != in_name):
......@@ -102,25 +96,25 @@ def collapse_psl_coverage_files(non_collapsed):
psl_out["count"] += psl_in["count"]
found = True
break;
break
# Input point was not collapsed into any of output points -> Add directly
if (not found):
if not found:
collapsed.append(psl_in)
return collapsed
def get_collapsed_file_name(psl_point) -> str:
def get_collapsed_file_name(psl_point: TPslPoint) -> str:
"""
Create unique file name for collapsed PSL points
"""
file_name = dut_top + psl_point["name"].split(dut_top)[-1]
file_name = file_name.replace(".","_")
file_name = file_name.replace(" ","_")
file_name = file_name.replace(")","_")
file_name = file_name.replace("(","_")
file_name = file_name.replace("@","_")
file_name = file_name.replace(".", "_")
file_name = file_name.replace(" ", "_")
file_name = file_name.replace(")", "_")
file_name = file_name.replace("(", "_")
file_name = file_name.replace("@", "_")
file_name = file_name + "_" + str(psl_point["line"])
return file_name
......@@ -129,29 +123,28 @@ def load_json_psl_coverage(filename: str):
"""
Load PSL Coverage JSON file to JSON object.
"""
psl_cov_path = os.path.join(func_cov_dir, filename)
psl_cov_path = func_cov_dir / filename
# Read JSON string from file
log.info("Loading JSON PSL output: {}".format(psl_cov_path))
with open(psl_cov_path, 'r') as json_file:
with psl_cov_path.open('rt') as json_file:
return json.load(json_file)
def split_json_coverage_by_file(json):
def split_json_coverage_by_file(json) -> Dict[Path, List[TPslPoint]]:
"""
Parse input PSL Coverage JSON file. Group PSL endpoints by file.
Return dictionary in format:
{filename : psl_points} where psl_points is a list of PSL points in
filename.
"""
file_dict = {}
file_dict = {} # type: Dict[Path, List[TPslPoint]]
for psl_point in json:
file = Path(psl_point["file"])
# Create new list if first PSL of a file is parsed
if (not(psl_point["file"] in file_dict)):
file_dict[psl_point["file"]] = []
file_dict[psl_point["file"]].append(psl_point)
if file not in file_dict:
file_dict[file] = []
file_dict[file].append(psl_point)
return file_dict
......@@ -166,7 +159,7 @@ def add_html_table_header(doc, tag, text, headers, back_color="White"):
text(header)
def calc_coverage_results(psl_points, psl_type) -> Tuple[int,int]:
def calc_coverage_results(psl_points: List[TPslPoint], psl_type) -> Tuple[int, int]:
"""
Calculate coverage results from list of PSL points in JSON format.
"""
......@@ -174,12 +167,12 @@ def calc_coverage_results(psl_points, psl_type) -> Tuple[int,int]:
nok = 0
for psl_point in psl_points:
if (psl_point["directive"] != psl_type):
continue;
continue
if (psl_point["status"] == "passed" or
psl_point["status"] == "covered"):
ok += 1
else:
nok +=1
nok += 1
return ok, nok
......@@ -200,7 +193,8 @@ def calc_coverage_color(coverage: float) -> str:
return "Red"
def print_cov_cell_percentage(doc, tag, text, psl_points, coverage_type, merge_abs_vals) -> None:
def print_cov_cell_percentage(doc, tag, text, psl_points: List[TPslPoint],
coverage_type, merge_abs_vals) -> None:
"""
"""
ok, nok = calc_coverage_results(psl_points, coverage_type)
......@@ -229,7 +223,7 @@ def print_cov_cell_percentage(doc, tag, text, psl_points, coverage_type, merge_a
text("NA")
def add_psl_html_header(doc, tag, text, filename, psl_points):
def add_psl_html_header(doc, tag, text, filename, psl_points: List[TPslPoint]):
"""
Create HTML page header with info about coverage data within list of
PSL points in JSON format.
......@@ -259,7 +253,8 @@ def add_psl_html_header(doc, tag, text, filename, psl_points):
coverage_type, merge_abs_vals=False)
def add_non_colapsed_psl_table_entry(doc, tag, text, psl_point, def_bg_color="White"):
def add_non_colapsed_psl_table_entry(doc, tag, text, psl_point: TPslPoint,
def_bg_color="White"):
"""
Add HTML table entry for non-collapsed PSL functional coverage point.
......@@ -285,7 +280,8 @@ def add_non_colapsed_psl_table_entry(doc, tag, text, psl_point, def_bg_color="Wh
text(psl_point["status"])
def add_colapsed_psl_table_entry(doc, tag, text, psl_point, def_bg_color="White"):
def add_colapsed_psl_table_entry(doc, tag, text, psl_point: TPslPoint,
def_bg_color="White"):
"""
Add HTML table entry for collapsed PSL functional coverage point. Adds
llink reference to collapsed entries on separate site.
......@@ -314,7 +310,8 @@ def add_colapsed_psl_table_entry(doc, tag, text, psl_point, def_bg_color="White"
text(psl_point["status"])
def add_psl_table_entry(doc, tag, text, psl_point, def_bg_color="White"):
def add_psl_table_entry(doc, tag, text, psl_point: TPslPoint,
def_bg_color="White"):
"""
Add PSL point in JSON format to HTML table. For collapsed entries,
overall result is shown and link to collapsed points is inserted.
......@@ -329,17 +326,16 @@ def add_psl_table_entry(doc, tag, text, psl_point, def_bg_color="White"):
# Create separate page with collapsed PSL points for this PSL statement
# Add unique filename
if ("colapsed_points" in psl_point):
file_name = os.path.join(html_dir, get_collapsed_file_name(psl_point))
create_psl_file_page(file_name, psl_point["colapsed_points"]);
file_name = html_dir / get_collapsed_file_name(psl_point)
create_psl_file_page(file_name, psl_point["colapsed_points"])
def create_psl_file_page(filename: str, psl_points):
def create_psl_file_page(filename: Path, psl_points: List[TPslPoint]) -> None:
"""
Create HTML file with list of PSL coverage statements.
"""
parsed_file_name = os.path.basename(filename)
html_cov_path = os.path.join(html_dir,
"{}.html".format(parsed_file_name))
parsed_file_name = filename.name
html_cov_path = html_dir / filename.with_suffix('.html').name
doc, tag, text = Doc().tagtext()
......@@ -348,7 +344,7 @@ def create_psl_file_page(filename: str, psl_points):
# Add "Cover" and "Assertion" points
psl_types = [{"name" : "Cover Points" , "type" : "cover"}, \
{"name" : "Assertions" , "type" : "assertion"}]
{"name" : "Assertions", "type" : "assertion"}]
for psl_type in psl_types:
with tag('p'):
with tag('table', width='100%', border="1px solid black"):
......@@ -361,11 +357,11 @@ def create_psl_file_page(filename: str, psl_points):
if (psl_point["directive"] == psl_type["type"]):
add_psl_table_entry(doc, tag, text, psl_point)
with open(html_cov_path, 'w', encoding='utf-8') as html_file:
with html_cov_path.open('wt', encoding='utf-8') as html_file:
html_file.write(doc.getvalue())
def create_psl_file_refs_table(doc, tag, text, psl_by_files):
def create_psl_file_refs_table(doc, tag, text, psl_by_files: Dict[Path, List[TPslPoint]]) -> None:
"""
Create entries to HTML table for each file. Calculates
coverage summary for each file. Adds Reference to files.
......@@ -373,8 +369,8 @@ def create_psl_file_refs_table(doc, tag, text, psl_by_files):
for file_name, psl_list in psl_by_files.items():
with tag('tr'):
with tag('td'):
name = os.path.basename(file_name)
with tag('a', href= os.path.join("html", name + ".html")):
name = file_name.name
with tag('a', href=os.path.join("html", name + ".html")):
text(name)
coverage_types = ["cover", "assertion"]
for coverage_type in coverage_types:
......@@ -382,7 +378,7 @@ def create_psl_file_refs_table(doc, tag, text, psl_by_files):
coverage_type, merge_abs_vals=True)
def create_psl_report(psl_by_files, psl_orig):
def create_psl_report(psl_by_files: Dict[Path, List[TPslPoint]], psl_orig) -> None:
"""
Generates PSL report. Each list within psl_by_files has separate
HTML page. Summary page is created from psl_orig
......@@ -391,7 +387,7 @@ def create_psl_report(psl_by_files, psl_orig):
for file_name, psl_list in psl_by_files.items():
create_psl_file_page(file_name, psl_list)
html_rep_path = os.path.join(func_cov_dir, "functional_coverage_report.html")
html_rep_path = func_cov_dir / "functional_coverage_report.html"
doc, tag, text = Doc().tagtext()
......@@ -404,12 +400,11 @@ def create_psl_report(psl_by_files, psl_orig):
add_html_table_header(doc, tag, text, header, back_color="Peru")
create_psl_file_refs_table(doc, tag, text, psl_by_files)
with open(html_rep_path, 'w', encoding='utf-8') as html_file:
with html_rep_path.open('wt', encoding='utf-8') as html_file:
html_file.write(doc.getvalue())
if __name__ == "__main__":
#dut_top = "can_top_level"
dut_top = "."
......@@ -424,9 +419,9 @@ if __name__ == "__main__":
json_by_file_colapsed = {}
json_together_colapsed = []
for filename, psls_for_file in json_by_file.items():
colapsed = collapse_psl_coverage_files(psls_for_file);
colapsed = collapse_psl_coverage_files(psls_for_file)
json_by_file_colapsed[filename] = colapsed
json_together_colapsed.extend(colapsed)
json_together_colapsed += colapsed
# Create PSL report
create_psl_report(json_by_file_colapsed, json_together_colapsed)
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment