Skip to content

Commit

Permalink
frontend tests now use new OO style, removed AttrDict usage
Browse files Browse the repository at this point in the history
  • Loading branch information
stefandesouza committed Oct 22, 2023
1 parent 6384ea2 commit db02359
Show file tree
Hide file tree
Showing 3 changed files with 81 additions and 82 deletions.
103 changes: 51 additions & 52 deletions osaca/frontend.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@
from datetime import datetime as dt

from osaca.semantics import INSTR_FLAGS, ArchSemantics, KernelDG, MachineModel
from osaca.parser import AttrDict


def _get_version(*file_paths):
Expand Down Expand Up @@ -54,7 +53,7 @@ def _is_comment(self, instruction_form):
:type instruction_form: `dict`
:returns: `True` if comment line, `False` otherwise
"""
return instruction_form["comment"] is not None and instruction_form["instruction"] is None
return instruction_form.comment is not None and instruction_form.instruction is None

def throughput_analysis(self, kernel, show_lineno=False, show_cmnts=True):
"""
Expand Down Expand Up @@ -82,14 +81,14 @@ def throughput_analysis(self, kernel, show_lineno=False, show_cmnts=True):
s += separator + "\n"
for instruction_form in kernel:
line = "{:4d} {} {} {}".format(
instruction_form["line_number"],
instruction_form.line_number,
self._get_port_pressure(
instruction_form["port_pressure"], port_len, separator=sep_list
instruction_form.port_pressure, port_len, separator=sep_list
),
self._get_flag_symbols(instruction_form["flags"])
if instruction_form["instruction"] is not None
self._get_flag_symbols(instruction_form.flags)
if instruction_form.instruction is not None
else " ",
instruction_form["line"].strip().replace("\t", " "),
instruction_form.line.strip().replace("\t", " "),
)
line = line if show_lineno else col_sep + col_sep.join(line.split(col_sep)[1:])
if show_cmnts is False and self._is_comment(instruction_form):
Expand All @@ -113,20 +112,20 @@ def latency_analysis(self, cp_kernel, separator="|"):
for instruction_form in cp_kernel:
s += (
"{:4d} {} {:4.1f} {}{}{} {}".format(
instruction_form["line_number"],
instruction_form.line_number,
separator,
instruction_form["latency_cp"],
instruction_form.latency_cp,
separator,
"X" if INSTR_FLAGS.LT_UNKWN in instruction_form["flags"] else " ",
"X" if INSTR_FLAGS.LT_UNKWN in instruction_form.flags else " ",
separator,
instruction_form["line"],
instruction_form.line,
)
) + "\n"
s += (
"\n{:4} {} {:4.1f}".format(
" " * max([len(str(instr_form["line_number"])) for instr_form in cp_kernel]),
" " * max([len(str(instr_form.line_number)) for instr_form in cp_kernel]),
" " * len(separator),
sum([instr_form["latency_cp"] for instr_form in cp_kernel]),
sum([instr_form.latency_cp for instr_form in cp_kernel]),
)
) + "\n"
return s
Expand All @@ -151,9 +150,9 @@ def loopcarried_dependencies(self, dep_dict, separator="|"):
separator,
dep_dict[dep]["latency"],
separator,
dep_dict[dep]["root"]["line"].strip(),
dep_dict[dep]["root"].line.strip(),
separator,
[node["line_number"] for node, lat in dep_dict[dep]["dependencies"]],
[node.line_number for node, lat in dep_dict[dep]["dependencies"]],
)
return s

Expand Down Expand Up @@ -238,10 +237,10 @@ def full_analysis_dict(
if lcd_warning:
warnings.append("LCDWarning")

if INSTR_FLAGS.TP_UNKWN in [flag for instr in kernel for flag in instr["flags"]]:
warnings.append("UnknownInstrWarning")
#if INSTR_FLAGS.TP_UNKWN in [flag for instr in kernel for flag in instr.flags]:

Check failure on line 240 in osaca/frontend.py

View workflow job for this annotation

GitHub Actions / Flake8

osaca/frontend.py#L240

Block comment should start with '# ' (E265)
# warnings.append("UnknownInstrWarning")

tp_sum = ArchSemantics.get_throughput_sum(kernel) or kernel[0]["port_pressure"]
tp_sum = ArchSemantics.get_throughput_sum(kernel) or kernel[0].port_pressure
cp_kernel = kernel_dg.get_critical_path()

dep_dict = kernel_dg.get_loopcarried_dependencies()
Expand All @@ -254,39 +253,39 @@ def full_analysis_dict(
"Warnings": warnings,
"Kernel": [
{
"Line": re.sub(r"\s+", " ", x["line"].strip()),
"LineNumber": x["line_number"],
"Flags": list(x["flags"]),
"Instruction": x["instruction"],
"Operands": AttrDict.get_dict(x["operands"]),
"SemanticOperands": AttrDict.get_dict(x["semantic_operands"]),
"Label": x["label"],
"Directive": x["directive"],
"Latency": float(x["latency"]),
"LatencyCP": float(x["latency_cp"]),
"LatencyLCD": float(x["latency_lcd"]),
"Throughput": float(x["throughput"]),
"LatencyWithoutLoad": float(x["latency_wo_load"]),
"Line": re.sub(r"\s+", " ", x.line.strip()),
"LineNumber": x.line_number,
"Flags": list(x.flags),
"Instruction": x.instruction,
"Operands": x.operands,
"SemanticOperands": x.semantic_operands,
"Label": x.label,
"Directive": x.directive,
"Latency": float(x.latency),
"LatencyCP": float(x.latency_cp),
"LatencyLCD": float(x.latency_lcd),
"Throughput": float(x.throughput),
"LatencyWithoutLoad": float(x.latency_wo_load),
"PortPressure": {
self._machine_model.get_ports()[i]: v
for i, v in enumerate(x["port_pressure"])
for i, v in enumerate(x.port_pressure)
},
"PortUops": [
{
"Ports": list(y[1]),
"Cycles": y[0],
}
for y in x["port_uops"]
for y in x.port_uops
],
"Comment": x["comment"],
"Comment": x.comment,
}
for x in kernel
],
"Summary": {
"PortPressure": {
self._machine_model.get_ports()[i]: v for i, v in enumerate(tp_sum)
},
"CriticalPath": sum([x["latency_cp"] for x in cp_kernel]),
"CriticalPath": sum([x.latency_cp for x in cp_kernel]),
"LCD": lcd_sum,
},
"Target": {
Expand Down Expand Up @@ -325,22 +324,22 @@ def combined_view(
# Separator for ports
separator = "-" * sum([x + 3 for x in port_len]) + "-"
# ... for line numbers
separator += "--" + len(str(kernel[-1]["line_number"])) * "-"
separator += "--" + len(str(kernel[-1].line_number)) * "-"
col_sep = "|"
# for LCD/CP column
separator += "-" * (2 * 6 + len(col_sep)) + "-" * len(col_sep) + "--"
sep_list = self._get_separator_list(col_sep)
headline = "Port pressure in cycles"
headline_str = "{{:^{}}}".format(len(separator))
# Prepare CP/LCD variable
cp_lines = [x["line_number"] for x in cp_kernel]
cp_lines = [x.line_number for x in cp_kernel]
lcd_sum = 0.0
lcd_lines = {}
if dep_dict:
longest_lcd = max(dep_dict, key=lambda ln: dep_dict[ln]["latency"])
lcd_sum = dep_dict[longest_lcd]["latency"]
lcd_lines = {
instr["line_number"]: lat for instr, lat in dep_dict[longest_lcd]["dependencies"]
instr.line_number: lat for instr, lat in dep_dict[longest_lcd]["dependencies"]
}

port_line = (
Expand All @@ -354,40 +353,40 @@ def combined_view(
for instruction_form in kernel:
if show_cmnts is False and self._is_comment(instruction_form):
continue
line_number = instruction_form["line_number"]
used_ports = [list(uops[1]) for uops in instruction_form["port_uops"]]
line_number = instruction_form.line_number
used_ports = [list(uops[1]) for uops in instruction_form.port_uops]
used_ports = list(set([p for uops_ports in used_ports for p in uops_ports]))
s += "{:4d} {}{} {} {}\n".format(
line_number,
self._get_port_pressure(
instruction_form["port_pressure"], port_len, used_ports, sep_list
instruction_form.port_pressure, port_len, used_ports, sep_list
),
self._get_lcd_cp_ports(
instruction_form["line_number"],
instruction_form.line_number,
cp_kernel if line_number in cp_lines else None,
lcd_lines.get(line_number),
),
self._get_flag_symbols(instruction_form["flags"])
if instruction_form["instruction"] is not None
self._get_flag_symbols(instruction_form.flags)
if instruction_form.instruction is not None
else " ",
instruction_form["line"].strip().replace("\t", " "),
instruction_form.line.strip().replace("\t", " "),
)
s += "\n"
# check for unknown instructions and throw warning if called without --ignore-unknown
if not ignore_unknown and INSTR_FLAGS.TP_UNKWN in [
flag for instr in kernel for flag in instr["flags"]
flag for instr in kernel for flag in instr.flags
]:
num_missing = len(
[instr["flags"] for instr in kernel if INSTR_FLAGS.TP_UNKWN in instr["flags"]]
[instr.flags for instr in kernel if INSTR_FLAGS.TP_UNKWN in instr.flags]
)
s += self._missing_instruction_error(num_missing)
else:
# lcd_sum already calculated before
tp_sum = ArchSemantics.get_throughput_sum(kernel)
# if ALL instructions are unknown, take a line of 0s
if not tp_sum:
tp_sum = kernel[0]["port_pressure"]
cp_sum = sum([x["latency_cp"] for x in cp_kernel])
tp_sum = kernel[0].port_pressure
cp_sum = sum([x.latency_cp for x in cp_kernel])
s += (
lineno_filler
+ self._get_port_pressure(tp_sum, port_len, separator=" ")
Expand Down Expand Up @@ -500,14 +499,14 @@ def _get_port_pressure(self, ports, port_len, used_ports=[], separator="|"):

def _get_node_by_lineno(self, lineno, kernel):
"""Returns instruction form from kernel by its line number."""
nodes = [instr for instr in kernel if instr["line_number"] == lineno]
nodes = [instr for instr in kernel if instr.line_number == lineno]
return nodes[0] if len(nodes) > 0 else None

def _get_lcd_cp_ports(self, line_number, cp_dg, dep_lat, separator="|"):
"""Returns the CP and LCD line for one instruction."""
lat_cp = lat_lcd = ""
if cp_dg:
lat_cp = float(self._get_node_by_lineno(line_number, cp_dg)["latency_cp"])
lat_cp = float(self._get_node_by_lineno(line_number, cp_dg).latency_cp)
if dep_lat is not None:
lat_lcd = float(dep_lat)
return "{} {:>4} {} {:>4} {}".format(separator, lat_cp, separator, lat_lcd, separator)
Expand All @@ -516,7 +515,7 @@ def _get_max_port_len(self, kernel):
"""Returns the maximal length needed to print all throughputs of the kernel."""
port_len = [4 for x in self._machine_model.get_ports()]
for instruction_form in kernel:
for i, port in enumerate(instruction_form["port_pressure"]):
for i, port in enumerate(instruction_form.port_pressure):
if len("{:.2f}".format(port)) > port_len[i]:
port_len[i] = len("{:.2f}".format(port))
return port_len
Expand Down
12 changes: 6 additions & 6 deletions osaca/semantics/kernel_dg.py
Original file line number Diff line number Diff line change
Expand Up @@ -230,18 +230,18 @@ def get_critical_path(self):
longest_path = nx.algorithms.dag.dag_longest_path(self.dg, weight="latency")
# TODO verify that we can remove the next two lince due to earlier initialization
for line_number in longest_path:
self._get_node_by_lineno(int(line_number))["latency_cp"] = 0
self._get_node_by_lineno(int(line_number)).latency_cp = 0
# set cp latency to instruction
path_latency = 0.0
for s, d in nx.utils.pairwise(longest_path):
node = self._get_node_by_lineno(int(s))
node["latency_cp"] = self.dg.edges[(s, d)]["latency"]
path_latency += node["latency_cp"]
node.latency_cp = self.dg.edges[(s, d)]["latency"]
path_latency += node.latency_cp
# add latency for last instruction
node = self._get_node_by_lineno(int(longest_path[-1]))
node["latency_cp"] = node["latency"]
if max_latency_instr["latency"] > path_latency:
max_latency_instr["latency_cp"] = float(max_latency_instr["latency"])
node.latency_cp = node.latency
if max_latency_instr.latency > path_latency:
max_latency_instr.latency_cp = float(max_latency_instr.latency)
return [max_latency_instr]
else:
return [x for x in self.kernel if x.line_number in longest_path]
Expand Down
48 changes: 24 additions & 24 deletions tests/test_frontend.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ def test_frontend_AArch64(self):
fe = Frontend(path_to_yaml=os.path.join(self.MODULE_DATA_DIR, "tx2.yml"))
fe.full_analysis(self.kernel_AArch64, dg, verbose=True)
# TODO compare output with checked string

Check failure on line 83 in tests/test_frontend.py

View workflow job for this annotation

GitHub Actions / Flake8

tests/test_frontend.py#L83

Blank line contains whitespace (W293)
def test_dict_output_x86(self):
dg = KernelDG(self.kernel_x86, self.parser_x86, self.machine_model_csx, self.semantics_csx)
fe = Frontend(path_to_yaml=os.path.join(self.MODULE_DATA_DIR, "csx.yml"))
Expand All @@ -89,29 +89,29 @@ def test_dict_output_x86(self):
self.assertEqual("csx", analysis_dict["Header"]["Architecture"])
self.assertEqual(len(analysis_dict["Warnings"]), 0)
for i, line in enumerate(self.kernel_x86):
self.assertEqual(line["throughput"], analysis_dict["Kernel"][i]["Throughput"])
self.assertEqual(line["latency"], analysis_dict["Kernel"][i]["Latency"])
self.assertEqual(line.throughput, analysis_dict["Kernel"][i]["Throughput"])
self.assertEqual(line.latency, analysis_dict["Kernel"][i]["Latency"])
self.assertEqual(
line["latency_wo_load"], analysis_dict["Kernel"][i]["LatencyWithoutLoad"]
line.latency_wo_load, analysis_dict["Kernel"][i]["LatencyWithoutLoad"]
)
self.assertEqual(line["latency_cp"], analysis_dict["Kernel"][i]["LatencyCP"])
self.assertEqual(line["instruction"], analysis_dict["Kernel"][i]["Instruction"])
self.assertEqual(len(line["operands"]), len(analysis_dict["Kernel"][i]["Operands"]))
self.assertEqual(line.latency_cp, analysis_dict["Kernel"][i]["LatencyCP"])
self.assertEqual(line.instruction, analysis_dict["Kernel"][i]["Instruction"])
self.assertEqual(len(line.operands), len(analysis_dict["Kernel"][i]["Operands"]))
self.assertEqual(
len(line["semantic_operands"]["source"]),
len(line.semantic_operands["source"]),
len(analysis_dict["Kernel"][i]["SemanticOperands"]["source"]),
)
self.assertEqual(
len(line["semantic_operands"]["destination"]),
len(line.semantic_operands["destination"]),
len(analysis_dict["Kernel"][i]["SemanticOperands"]["destination"]),
)
self.assertEqual(
len(line["semantic_operands"]["src_dst"]),
len(line.semantic_operands["src_dst"]),
len(analysis_dict["Kernel"][i]["SemanticOperands"]["src_dst"]),
)
self.assertEqual(line["flags"], analysis_dict["Kernel"][i]["Flags"])
self.assertEqual(line["line_number"], analysis_dict["Kernel"][i]["LineNumber"])

self.assertEqual(line.flags, analysis_dict["Kernel"][i]["Flags"])
self.assertEqual(line.line_number, analysis_dict["Kernel"][i]["LineNumber"])

Check failure on line 114 in tests/test_frontend.py

View workflow job for this annotation

GitHub Actions / Flake8

tests/test_frontend.py#L114

Blank line contains whitespace (W293)
def test_dict_output_AArch64(self):
reduced_kernel = reduce_to_section(self.kernel_AArch64, self.semantics_tx2._isa)
dg = KernelDG(
Expand All @@ -126,28 +126,28 @@ def test_dict_output_AArch64(self):
self.assertEqual("tx2", analysis_dict["Header"]["Architecture"])
self.assertEqual(len(analysis_dict["Warnings"]), 0)
for i, line in enumerate(reduced_kernel):
self.assertEqual(line["throughput"], analysis_dict["Kernel"][i]["Throughput"])
self.assertEqual(line["latency"], analysis_dict["Kernel"][i]["Latency"])
self.assertEqual(line.throughput, analysis_dict["Kernel"][i]["Throughput"])
self.assertEqual(line.latency, analysis_dict["Kernel"][i]["Latency"])
self.assertEqual(
line["latency_wo_load"], analysis_dict["Kernel"][i]["LatencyWithoutLoad"]
line.latency_wo_load, analysis_dict["Kernel"][i]["LatencyWithoutLoad"]
)
self.assertEqual(line["latency_cp"], analysis_dict["Kernel"][i]["LatencyCP"])
self.assertEqual(line["instruction"], analysis_dict["Kernel"][i]["Instruction"])
self.assertEqual(len(line["operands"]), len(analysis_dict["Kernel"][i]["Operands"]))
self.assertEqual(line.latency_cp, analysis_dict["Kernel"][i]["LatencyCP"])
self.assertEqual(line.instruction, analysis_dict["Kernel"][i]["Instruction"])
self.assertEqual(len(line.operands), len(analysis_dict["Kernel"][i]["Operands"]))
self.assertEqual(
len(line["semantic_operands"]["source"]),
len(line.semantic_operands["source"]),
len(analysis_dict["Kernel"][i]["SemanticOperands"]["source"]),
)
self.assertEqual(
len(line["semantic_operands"]["destination"]),
len(line.semantic_operands["destination"]),
len(analysis_dict["Kernel"][i]["SemanticOperands"]["destination"]),
)
self.assertEqual(
len(line["semantic_operands"]["src_dst"]),
len(line.semantic_operands["src_dst"]),
len(analysis_dict["Kernel"][i]["SemanticOperands"]["src_dst"]),
)
self.assertEqual(line["flags"], analysis_dict["Kernel"][i]["Flags"])
self.assertEqual(line["line_number"], analysis_dict["Kernel"][i]["LineNumber"])
self.assertEqual(line.flags, analysis_dict["Kernel"][i]["Flags"])
self.assertEqual(line.line_number, analysis_dict["Kernel"][i]["LineNumber"])

##################
# Helper functions
Expand Down

0 comments on commit db02359

Please sign in to comment.