[#73912] implement more robust SAIF parser
This commit is contained in:
parent
87ea573b2e
commit
812fe0ef0c
|
|
@ -0,0 +1,292 @@
|
|||
#!/usr/bin/env python3
|
||||
# pylint: disable=C0103,C0114,C0115,C0116,C0209,C0302,R0902,R0903,R0904,R0912,R0913,R0914,R0915,R0916,W0212,W0511,W0603,W1201
|
||||
######################################################################
|
||||
|
||||
import argparse
|
||||
import re
|
||||
|
||||
SUCCESS_CODE = 0
|
||||
FAILURE_CODE = 1
|
||||
|
||||
INSTANCE_TYPE = "INSTANCE"
|
||||
NET_LIST_TYPE = "NET"
|
||||
SIGNAL_TYPE = "SIGNAL"
|
||||
|
||||
EOF_ERROR = "Unexpected EOF"
|
||||
|
||||
def saif_assert(expression, message):
|
||||
if not expression:
|
||||
raise Exception(message)
|
||||
|
||||
def saif_error(message):
|
||||
raise Exception(message)
|
||||
|
||||
class SAIFSignalBit:
|
||||
name: str
|
||||
high_time: int
|
||||
low_time: int
|
||||
transitions: int
|
||||
|
||||
def __init__(self, name):
|
||||
self.name = name
|
||||
self.high_time = 0
|
||||
self.low_time = 0
|
||||
self.transitions = 0
|
||||
|
||||
|
||||
class SAIFInstance:
|
||||
|
||||
def __init__(self, scope_name):
|
||||
self.scope_name = scope_name
|
||||
self.parent_instance = None
|
||||
self.nets = {}
|
||||
self.child_instances = {}
|
||||
|
||||
|
||||
class SAIFToken:
|
||||
|
||||
def __init__(self, token):
|
||||
self.token = token
|
||||
self.type = ''
|
||||
self.value = ''
|
||||
|
||||
|
||||
class SAIFParser:
|
||||
|
||||
def __init__(self):
|
||||
self.token_stack = []
|
||||
|
||||
# for parsing simplicity
|
||||
self.token_stack.append(SAIFToken('saif_root'))
|
||||
|
||||
self.current_instance = None
|
||||
|
||||
self.has_saifile_header = False
|
||||
self.direction = ''
|
||||
self.saif_version = ''
|
||||
self.top_instances = {}
|
||||
self.duration = 0
|
||||
self.divider = ''
|
||||
self.timescale = ''
|
||||
|
||||
def parse(self, saif_filename):
|
||||
file_contents = ''
|
||||
|
||||
with open(saif_filename, 'r', encoding="utf8") as saif_file:
|
||||
file_contents = saif_file.read()
|
||||
|
||||
tokens = file_contents.replace('(', ' ( ').replace(')', ' ) ').split()
|
||||
num_of_tokens = len(tokens)
|
||||
|
||||
index = 0
|
||||
while index < num_of_tokens:
|
||||
token = tokens[index]
|
||||
index += 1
|
||||
|
||||
if token == '(':
|
||||
self.token_stack.append(SAIFToken(token))
|
||||
self.token_stack[-1].type = self.token_stack[-2].type
|
||||
self.token_stack[-1].value = self.token_stack[-2].value
|
||||
|
||||
continue
|
||||
|
||||
if token == ')':
|
||||
if self.token_stack[-1].type == INSTANCE_TYPE:
|
||||
self.current_instance = self.current_instance.parent_instance
|
||||
|
||||
self.token_stack.pop()
|
||||
continue
|
||||
|
||||
if re.match(r'SAIFILE', token):
|
||||
self.has_saifile_header = True
|
||||
|
||||
continue
|
||||
|
||||
if re.match(r'DIRECTION', token):
|
||||
saif_assert(index < num_of_tokens, EOF_ERROR)
|
||||
self.direction = tokens[index].replace('\"', '')
|
||||
index += 1
|
||||
|
||||
continue
|
||||
|
||||
if re.match(r'SAIFVERSION', token):
|
||||
saif_assert(index < num_of_tokens, EOF_ERROR)
|
||||
self.saif_version = tokens[index].replace('\"', '')
|
||||
index += 1
|
||||
|
||||
continue
|
||||
|
||||
if re.match(r'DESIGN|DATE|VENDOR|PROGRAM_NAME|VERSION', token):
|
||||
# noop, only skip value
|
||||
saif_assert(index < num_of_tokens, EOF_ERROR)
|
||||
index += 1
|
||||
|
||||
continue
|
||||
|
||||
if re.match(r'DIVIDER', token):
|
||||
saif_assert(index < num_of_tokens, EOF_ERROR)
|
||||
self.divider = tokens[index]
|
||||
index += 1
|
||||
|
||||
continue
|
||||
|
||||
if re.match(r'TIMESCALE', token):
|
||||
saif_assert(index < num_of_tokens, EOF_ERROR)
|
||||
self.timescale = tokens[index]
|
||||
index += 1
|
||||
|
||||
continue
|
||||
|
||||
if re.match(r'DURATION', token):
|
||||
saif_assert(index < num_of_tokens, EOF_ERROR)
|
||||
self.duration = tokens[index]
|
||||
index += 1
|
||||
|
||||
continue
|
||||
|
||||
if re.match(r'INSTANCE', token):
|
||||
saif_assert(index < num_of_tokens, EOF_ERROR)
|
||||
instance_name = tokens[index]
|
||||
index += 1
|
||||
|
||||
self.token_stack[-1].type = INSTANCE_TYPE
|
||||
self.token_stack[-1].value = instance_name
|
||||
|
||||
instance = SAIFInstance(instance_name)
|
||||
|
||||
if self.current_instance is None:
|
||||
self.top_instances[instance_name] = instance
|
||||
else:
|
||||
self.current_instance.child_instances[instance_name] = instance
|
||||
|
||||
instance.parent_instance = self.current_instance
|
||||
self.current_instance = instance
|
||||
|
||||
continue
|
||||
|
||||
if re.match(r'NET', token):
|
||||
self.token_stack[-1].type = NET_LIST_TYPE
|
||||
|
||||
continue
|
||||
|
||||
if re.match(r'T1', token):
|
||||
net_name = self.token_stack[-1].value
|
||||
saif_assert(index < num_of_tokens, EOF_ERROR)
|
||||
self.current_instance.nets[net_name].high_time = tokens[index]
|
||||
index += 1
|
||||
|
||||
continue
|
||||
|
||||
if re.match(r'T0', token):
|
||||
net_name = self.token_stack[-1].value
|
||||
saif_assert(index < num_of_tokens, EOF_ERROR)
|
||||
self.current_instance.nets[net_name].low_time = tokens[index]
|
||||
index += 1
|
||||
|
||||
continue
|
||||
|
||||
if re.match(r'TC', token):
|
||||
net_name = self.token_stack[-1].value
|
||||
saif_assert(index < num_of_tokens, EOF_ERROR)
|
||||
self.current_instance.nets[net_name].transitions = tokens[index]
|
||||
index += 1
|
||||
|
||||
continue
|
||||
|
||||
if re.match(r'TZ|TX|TB|TG|IG|IK', token):
|
||||
# noop, only skip value
|
||||
index += 1
|
||||
continue
|
||||
|
||||
if self.token_stack[-2].type == NET_LIST_TYPE:
|
||||
self.token_stack[-1].type = SIGNAL_TYPE
|
||||
self.token_stack[-1].value = token
|
||||
self.current_instance.nets[token] = (SAIFSignalBit(token))
|
||||
|
||||
saif_assert(self.has_saifile_header, "SAIF file doesn't contain a SAIFILE keyword")
|
||||
saif_assert(self.direction == "backward", f"SAIF file doesn't have a valid/compatible direction: {self.direction}")
|
||||
saif_assert(self.saif_version == "2.0", f"SAIF file doesn't have a valid/compatible version: {self.saif_version}")
|
||||
|
||||
# only 'saif_root' token should be left
|
||||
saif_assert(len(self.token_stack) == 1, "Incorrect nesting of scopes")
|
||||
|
||||
|
||||
def compare_saif_instances(first: SAIFInstance, second: SAIFInstance):
|
||||
if len(first.nets) != len(second.nets):
|
||||
saif_error(f"Number of nets doesn't match in {first.scope_name}: "
|
||||
f"{len(first.nets)} != {len(second.nets)}")
|
||||
|
||||
for signal_name, saif_signal in first.nets.items():
|
||||
if signal_name not in second.nets:
|
||||
saif_error(f"Signal {signal_name} doesn't exist in the second object\n")
|
||||
|
||||
other_signal = second.nets[signal_name]
|
||||
|
||||
if (saif_signal.high_time != other_signal.high_time
|
||||
or saif_signal.low_time != other_signal.low_time
|
||||
or saif_signal.transitions != other_signal.transitions):
|
||||
saif_error("Incompatible signal bit parameters in "
|
||||
f"{signal_name}\n")
|
||||
|
||||
if len(first.child_instances) != len(second.child_instances):
|
||||
saif_error(f"Number of child instances doesn't match in {first.scope_name}: "
|
||||
f"{len(first.child_instances)} != {len(second.child_instances)}")
|
||||
|
||||
for instance_name, instance in first.child_instances.items():
|
||||
if instance_name not in second.child_instances:
|
||||
saif_error(f"Instance {instance_name} doesn't exist in the second object\n")
|
||||
|
||||
compare_saif_instances(instance, second.child_instances[instance_name])
|
||||
|
||||
|
||||
def compare_saif_contents(first_file: str, second_file: str):
|
||||
"""Test if second SAIF file has the same values as the first"""
|
||||
|
||||
first_saif = SAIFParser()
|
||||
first_saif.parse(first_file)
|
||||
|
||||
second_saif = SAIFParser()
|
||||
second_saif.parse(second_file)
|
||||
|
||||
if first_saif.duration != second_saif.duration:
|
||||
saif_error(f"Duration of trace doesn't match: {first_saif.duration} != {second_saif.duration}")
|
||||
|
||||
if first_saif.divider != second_saif.divider:
|
||||
saif_error(f"Dividers don't match: {first_saif.divider} != {second_saif.divider}")
|
||||
|
||||
if first_saif.timescale != second_saif.timescale:
|
||||
saif_error(f"Timescale doesn't match: {first_saif.timescale} != {second_saif.timescale}")
|
||||
|
||||
if len(first_saif.top_instances) != len(second_saif.top_instances):
|
||||
saif_error("Number of top instances doesn't match: "
|
||||
f"{len(first_saif.top_instances)} != {len(second_saif.top_instances)}")
|
||||
|
||||
for top_instance_name, top_instance in first_saif.top_instances.items():
|
||||
if top_instance_name not in second_saif.top_instances:
|
||||
saif_error(f"Top instance {top_instance_name} missing in other SAIF")
|
||||
|
||||
compare_saif_instances(top_instance, second_saif.top_instances[top_instance_name])
|
||||
|
||||
return SUCCESS_CODE
|
||||
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
allow_abbrev=False,
|
||||
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||
description="""""")
|
||||
|
||||
parser.add_argument('--first',
|
||||
action='store',
|
||||
help='First SAIF file')
|
||||
|
||||
parser.add_argument('--second',
|
||||
action='store',
|
||||
help='Second SAIF file')
|
||||
|
||||
parser.set_defaults(stop=True)
|
||||
args = parser.parse_args()
|
||||
|
||||
try:
|
||||
compare_saif_contents(args.first, args.second)
|
||||
except Exception as error:
|
||||
print(error)
|
||||
|
|
@ -52,6 +52,7 @@ Quitting = False
|
|||
Vltmt_Threads = 3
|
||||
forker = None
|
||||
Start = None
|
||||
nodist_directory = "../nodist"
|
||||
|
||||
# So an 'import vltest_bootstrap' inside test files will do nothing
|
||||
sys.modules['vltest_bootstrap'] = {}
|
||||
|
|
@ -67,145 +68,6 @@ class staticproperty(property):
|
|||
return self.fget()
|
||||
|
||||
|
||||
#######################################################################
|
||||
#######################################################################
|
||||
# SAIF parser utilities
|
||||
|
||||
|
||||
class SAIFSignalBit:
|
||||
last_val: int
|
||||
high_time: int
|
||||
low_time: int
|
||||
transitions: int
|
||||
|
||||
def __init__(self):
|
||||
self.last_val = 0
|
||||
self.high_time = 0
|
||||
self.low_time = 0
|
||||
self.transitions = 0
|
||||
|
||||
def aggregate(self, dt: int, new_val: int):
|
||||
if new_val != self.last_val:
|
||||
self.transitions += 1
|
||||
|
||||
if self.last_val == 1:
|
||||
self.high_time += dt
|
||||
|
||||
self.last_val = new_val
|
||||
|
||||
|
||||
class SAIFSignal:
|
||||
|
||||
def __init__(self, signal_name, signal_width=0):
|
||||
self.name = signal_name
|
||||
self.width = signal_width
|
||||
self.last_time = 0
|
||||
|
||||
self.bits = []
|
||||
for _ in range(self.width):
|
||||
self.bits.append(SAIFSignalBit())
|
||||
|
||||
|
||||
class SAIFInstance:
|
||||
|
||||
def __init__(self, scope_name):
|
||||
self.scope_name = scope_name
|
||||
self.parent_instance = None
|
||||
self.nets = {}
|
||||
self.child_instances = {}
|
||||
|
||||
|
||||
class SAIFParser:
|
||||
|
||||
def __init__(self):
|
||||
self.top_instances = {}
|
||||
self.current_instance = None
|
||||
self.traversing_nets = False
|
||||
self.duration = 0
|
||||
self.divider = ''
|
||||
self.timescale = ''
|
||||
|
||||
def parse(self, saif_filename):
|
||||
with open(saif_filename, 'r', encoding="utf8") as saif_file:
|
||||
for line in saif_file:
|
||||
line = line.strip()
|
||||
if not line:
|
||||
continue
|
||||
|
||||
match = re.search(r'\(DIVIDER\s+(.)', line)
|
||||
if match:
|
||||
self.divider = match.groups()[0]
|
||||
|
||||
match = re.search(r'\(TIMESCALE\s+(\d+\s*\w+)', line)
|
||||
if match:
|
||||
self.timescale = match.groups()[0]
|
||||
|
||||
match = re.search(r'\s*\(DURATION\s+(\d+)', line)
|
||||
if match:
|
||||
self.duration = int(match.groups()[0])
|
||||
|
||||
match = re.search(r'INSTANCE\s+([\w\.\$]+)', line)
|
||||
if match:
|
||||
instance_name = match.groups()[0]
|
||||
|
||||
instance = SAIFInstance(instance_name)
|
||||
|
||||
if self.current_instance is None:
|
||||
self.top_instances[instance_name] = instance
|
||||
else:
|
||||
self.current_instance.child_instances[instance_name] = instance
|
||||
|
||||
instance.parent_instance = self.current_instance
|
||||
self.current_instance = instance
|
||||
|
||||
match = re.search(r'NET', line)
|
||||
if match:
|
||||
self.traversing_nets = True
|
||||
|
||||
match = re.search(r'((?:[\w\[\]])+)(?:\\\[(\d+)\\\])*\s+(\(T.+\))+', line)
|
||||
if match:
|
||||
signal_name, bit_index, bit_values = match.groups()
|
||||
|
||||
if bit_index is None:
|
||||
bit_index = 0
|
||||
|
||||
if signal_name not in self.current_instance.nets:
|
||||
saif_signal = SAIFSignal(signal_name)
|
||||
self.current_instance.nets[signal_name] = saif_signal
|
||||
|
||||
current_signal = self.current_instance.nets[signal_name]
|
||||
|
||||
for _ in range(0, int(bit_index) - current_signal.width + 1):
|
||||
current_signal.bits.append(SAIFSignalBit())
|
||||
|
||||
current_signal.width = int(bit_index) + 1
|
||||
|
||||
match = re.search(r'T0 (\d+)', bit_values)
|
||||
if match:
|
||||
low_time = match.groups()[0]
|
||||
|
||||
current_signal.bits[int(bit_index)].low_time = int(low_time)
|
||||
|
||||
match = re.search(r'T1 (\d+)', bit_values)
|
||||
if match:
|
||||
high_time = match.groups()[0]
|
||||
|
||||
current_signal.bits[int(bit_index)].high_time = int(high_time)
|
||||
|
||||
match = re.search(r'TC (\d+)', bit_values)
|
||||
if match:
|
||||
toggle_count = match.groups()[0]
|
||||
|
||||
current_signal.bits[int(bit_index)].transitions = int(toggle_count)
|
||||
|
||||
match = re.match(r'\s+\)\s+', line)
|
||||
if match:
|
||||
if self.traversing_nets:
|
||||
self.traversing_nets = False
|
||||
else:
|
||||
self.current_instance = self.current_instance.parent_instance
|
||||
|
||||
|
||||
#######################################################################
|
||||
#######################################################################
|
||||
# VtOs -- OS extensions
|
||||
|
|
@ -2507,71 +2369,15 @@ class VlTest:
|
|||
self.fst2vcd(fn1, tmp)
|
||||
self.vcd_identical(tmp, fn2)
|
||||
|
||||
def compare_saif_instances(self, first: SAIFInstance, second: SAIFInstance):
|
||||
if len(first.nets) != len(second.nets):
|
||||
self.error(f"Number of nets doesn't match in {first.scope_name}: "
|
||||
f"{len(first.nets)} != {len(second.nets)}")
|
||||
|
||||
for signal_name, saif_signal in first.nets.items():
|
||||
if signal_name not in second.nets:
|
||||
self.error(f"Signal {signal_name} doesn't exist in the second object\n")
|
||||
|
||||
other_signal = second.nets[signal_name]
|
||||
if other_signal.width != saif_signal.width:
|
||||
self.error("Incompatible signal width in "
|
||||
f"{signal_name} {saif_signal.width} != {other_signal.width}\n")
|
||||
|
||||
for bit_index in range(saif_signal.width):
|
||||
signal_bit = saif_signal.bits[bit_index]
|
||||
other_signal_bit = other_signal.bits[bit_index]
|
||||
|
||||
if (signal_bit.high_time != other_signal_bit.high_time
|
||||
or signal_bit.low_time != other_signal_bit.low_time
|
||||
or signal_bit.transitions != other_signal_bit.transitions):
|
||||
self.error("Incompatible signal bit parameters in "
|
||||
f"{signal_name}[{bit_index}]\n")
|
||||
|
||||
if len(first.child_instances) != len(second.child_instances):
|
||||
self.error(f"Number of child instances doesn't match in {first.scope_name}: "
|
||||
f"{len(first.child_instances)} != {len(second.child_instances)}")
|
||||
|
||||
for instance_name, instance in first.child_instances.items():
|
||||
if instance_name not in second.child_instances:
|
||||
self.error(f"Instance {instance_name} doesn't exist in the second object\n")
|
||||
|
||||
self.compare_saif_instances(instance, second.child_instances[instance_name])
|
||||
|
||||
def compare_saif_contents(self, first: SAIFParser, second: SAIFParser):
|
||||
"""Test if second SAIF file has the same values as the first"""
|
||||
|
||||
if first.duration != second.duration:
|
||||
self.error(f"Duration of trace doesn't match: {first.duration} != {second.duration}")
|
||||
|
||||
if first.divider != second.divider:
|
||||
self.error(f"Dividers don't match: {first.divider} != {second.divider}")
|
||||
|
||||
if first.timescale != second.timescale:
|
||||
self.error(f"Timescale doesn't match: {first.timescale} != {second.timescale}")
|
||||
|
||||
if len(first.top_instances) != len(second.top_instances):
|
||||
self.error("Number of top instances doesn't match: "
|
||||
f"{len(first.top_instances)} != {len(second.top_instances)}")
|
||||
|
||||
for top_instance_name, top_instance in first.top_instances.items():
|
||||
if top_instance_name not in second.top_instances:
|
||||
self.error(f"Top instance {top_instance_name} missing in other SAIF")
|
||||
|
||||
self.compare_saif_instances(top_instance, second.top_instances[top_instance_name])
|
||||
|
||||
def saif_identical(self, fn1: str, fn2: str) -> None:
|
||||
"""Test if two SAIF files have logically-identical contents"""
|
||||
test_result_saif = SAIFParser()
|
||||
test_result_saif.parse(fn1)
|
||||
|
||||
golden_saif = SAIFParser()
|
||||
golden_saif.parse(fn2)
|
||||
|
||||
self.compare_saif_contents(golden_saif, test_result_saif)
|
||||
cmd = nodist_directory + '/verilator_saif_diff --first "' + fn1 + '" --second "' + fn2 + '"'
|
||||
print("\t " + cmd + "\n")
|
||||
out = test.run_capture(cmd, check=True)
|
||||
if out != '':
|
||||
print(out)
|
||||
self.error("SAIF files don't match!")
|
||||
|
||||
def _vcd_read(self, filename: str) -> str:
|
||||
data = {}
|
||||
|
|
|
|||
Loading…
Reference in New Issue