2019-04-26 21:21:50 +02:00
|
|
|
# See LICENSE for licensing information.
|
|
|
|
|
#
|
2019-06-14 17:43:41 +02:00
|
|
|
# Copyright (c) 2016-2019 Regents of the University of California and The Board
|
|
|
|
|
# of Regents for the Oklahoma Agricultural and Mechanical College
|
|
|
|
|
# (acting for and on behalf of Oklahoma State University)
|
|
|
|
|
# All rights reserved.
|
2019-04-26 21:21:50 +02:00
|
|
|
#
|
2018-02-05 19:22:38 +01:00
|
|
|
import unittest,warnings
|
2019-05-31 19:51:42 +02:00
|
|
|
import pdb,traceback
|
2018-07-11 01:39:32 +02:00
|
|
|
import sys,os,glob,copy
|
2018-11-10 20:54:28 +01:00
|
|
|
import shutil
|
2019-05-31 19:51:42 +02:00
|
|
|
sys.path.append(os.getenv("OPENRAM_HOME"))
|
2018-01-30 01:59:29 +01:00
|
|
|
from globals import OPTS
|
2018-02-05 19:22:38 +01:00
|
|
|
import debug
|
2018-01-30 01:59:29 +01:00
|
|
|
|
|
|
|
|
class openram_test(unittest.TestCase):
|
|
|
|
|
""" Base unit test that we have some shared classes in. """
|
2018-11-10 20:54:28 +01:00
|
|
|
|
2019-06-25 20:24:25 +02:00
|
|
|
|
2018-01-30 01:59:29 +01:00
|
|
|
def local_drc_check(self, w):
|
2018-07-11 01:39:32 +02:00
|
|
|
|
|
|
|
|
self.reset()
|
|
|
|
|
|
2019-02-22 23:38:00 +01:00
|
|
|
tempgds = "{0}{1}.gds".format(OPTS.openram_temp,w.name)
|
2018-01-30 01:59:29 +01:00
|
|
|
w.gds_write(tempgds)
|
|
|
|
|
import verify
|
|
|
|
|
|
2018-07-11 01:39:32 +02:00
|
|
|
result=verify.run_drc(w.name, tempgds)
|
|
|
|
|
if result != 0:
|
2018-07-13 18:38:43 +02:00
|
|
|
self.fail("DRC failed: {}".format(w.name))
|
2018-07-11 01:39:32 +02:00
|
|
|
|
2019-02-07 20:08:34 +01:00
|
|
|
if OPTS.purge_temp:
|
|
|
|
|
self.cleanup()
|
2019-06-25 20:24:25 +02:00
|
|
|
|
2018-02-05 23:52:51 +01:00
|
|
|
def local_check(self, a, final_verification=False):
|
2018-02-05 19:22:38 +01:00
|
|
|
|
2018-07-11 01:39:32 +02:00
|
|
|
self.reset()
|
2019-06-25 20:24:25 +02:00
|
|
|
|
2019-02-22 23:38:00 +01:00
|
|
|
tempspice = "{0}{1}.sp".format(OPTS.openram_temp,a.name)
|
|
|
|
|
tempgds = "{0}{1}.gds".format(OPTS.openram_temp,a.name)
|
2018-01-30 01:59:29 +01:00
|
|
|
|
2020-04-06 23:08:38 +02:00
|
|
|
a.lvs_write(tempspice)
|
2018-09-12 10:53:41 +02:00
|
|
|
# cannot write gds in netlist_only mode
|
|
|
|
|
if not OPTS.netlist_only:
|
|
|
|
|
a.gds_write(tempgds)
|
2018-01-30 01:59:29 +01:00
|
|
|
|
2018-09-12 10:53:41 +02:00
|
|
|
import verify
|
2019-02-22 23:38:00 +01:00
|
|
|
result=verify.run_drc(a.name, tempgds, extract=True, final_verification=final_verification)
|
2018-09-12 10:53:41 +02:00
|
|
|
if result != 0:
|
2018-11-10 22:44:36 +01:00
|
|
|
#zip_file = "/tmp/{0}_{1}".format(a.name,os.getpid())
|
|
|
|
|
#debug.info(0,"Archiving failed files to {}.zip".format(zip_file))
|
|
|
|
|
#shutil.make_archive(zip_file, 'zip', OPTS.openram_temp)
|
2018-09-12 10:53:41 +02:00
|
|
|
self.fail("DRC failed: {}".format(a.name))
|
2018-01-31 20:48:41 +01:00
|
|
|
|
2019-06-25 20:24:25 +02:00
|
|
|
|
2019-02-22 23:38:00 +01:00
|
|
|
result=verify.run_lvs(a.name, tempgds, tempspice, final_verification=final_verification)
|
2018-09-12 10:53:41 +02:00
|
|
|
if result != 0:
|
2018-11-10 22:44:36 +01:00
|
|
|
#zip_file = "/tmp/{0}_{1}".format(a.name,os.getpid())
|
|
|
|
|
#debug.info(0,"Archiving failed files to {}.zip".format(zip_file))
|
|
|
|
|
#shutil.make_archive(zip_file, 'zip', OPTS.openram_temp)
|
2018-09-12 10:53:41 +02:00
|
|
|
self.fail("LVS mismatch: {}".format(a.name))
|
2018-01-30 01:59:29 +01:00
|
|
|
|
2019-07-11 00:56:51 +02:00
|
|
|
# For debug...
|
2019-07-12 19:17:33 +02:00
|
|
|
#import pdb; pdb.set_trace()
|
2018-02-01 20:53:02 +01:00
|
|
|
if OPTS.purge_temp:
|
|
|
|
|
self.cleanup()
|
2019-06-30 09:50:18 +02:00
|
|
|
|
2019-06-25 20:24:25 +02:00
|
|
|
def run_pex(self, a, output=None):
|
|
|
|
|
if output == None:
|
|
|
|
|
output = OPTS.openram_temp + a.name + ".pex.netlist"
|
|
|
|
|
tempspice = "{0}{1}.sp".format(OPTS.openram_temp,a.name)
|
|
|
|
|
tempgds = "{0}{1}.gds".format(OPTS.openram_temp,a.name)
|
|
|
|
|
|
|
|
|
|
import verify
|
|
|
|
|
result=verify.run_pex(a.name, tempgds, tempspice, output=output, final_verification=False)
|
|
|
|
|
if result != 0:
|
|
|
|
|
self.fail("PEX ERROR: {}".format(a.name))
|
|
|
|
|
return output
|
2018-01-31 19:35:51 +01:00
|
|
|
|
2018-12-06 08:23:40 +01:00
|
|
|
def find_feasible_test_period(self, delay_obj, sram, load, slew):
|
|
|
|
|
"""Creates a delay simulation to determine a feasible period for the functional tests to run.
|
|
|
|
|
Only determines the feasible period for a single port and assumes that for all ports for performance.
|
|
|
|
|
"""
|
|
|
|
|
debug.info(1, "Finding feasible period for current test.")
|
|
|
|
|
delay_obj.set_load_slew(load, slew)
|
|
|
|
|
test_port = delay_obj.read_ports[0] #Only test one port, assumes other ports have similar period.
|
2019-05-16 02:17:26 +02:00
|
|
|
delay_obj.analysis_init(probe_address="1"*sram.addr_size, probe_data=(sram.word_size-1))
|
2019-06-25 20:24:25 +02:00
|
|
|
delay_obj.find_feasible_period_one_port(test_port)
|
|
|
|
|
return delay_obj.period
|
|
|
|
|
|
2018-01-31 19:35:51 +01:00
|
|
|
def cleanup(self):
|
|
|
|
|
""" Reset the duplicate checker and cleanup files. """
|
2018-01-30 01:59:29 +01:00
|
|
|
files = glob.glob(OPTS.openram_temp + '*')
|
|
|
|
|
for f in files:
|
2018-01-30 16:58:31 +01:00
|
|
|
# Only remove the files
|
|
|
|
|
if os.path.isfile(f):
|
2019-06-25 20:24:25 +02:00
|
|
|
os.remove(f)
|
2018-01-30 01:59:29 +01:00
|
|
|
|
2018-01-31 19:35:51 +01:00
|
|
|
def reset(self):
|
2019-06-25 20:24:25 +02:00
|
|
|
"""
|
2018-07-11 01:39:32 +02:00
|
|
|
Reset everything after each test.
|
|
|
|
|
"""
|
|
|
|
|
# Reset the static duplicate name checker for unit tests.
|
2018-07-10 01:07:30 +02:00
|
|
|
import hierarchy_design
|
|
|
|
|
hierarchy_design.hierarchy_design.name_map=[]
|
2018-07-27 01:05:24 +02:00
|
|
|
|
|
|
|
|
def check_golden_data(self, data, golden_data, error_tolerance=1e-2):
|
|
|
|
|
"""
|
|
|
|
|
This function goes through two dictionaries, key by key and compares
|
|
|
|
|
each item. It uses relative comparisons for the items and returns false
|
|
|
|
|
if there is a mismatch.
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
# Check each result
|
|
|
|
|
data_matches = True
|
|
|
|
|
for k in data.keys():
|
|
|
|
|
if type(data[k])==list:
|
|
|
|
|
for i in range(len(data[k])):
|
|
|
|
|
if not self.isclose(k,data[k][i],golden_data[k][i],error_tolerance):
|
|
|
|
|
data_matches = False
|
|
|
|
|
else:
|
2018-11-08 21:16:59 +01:00
|
|
|
if not self.isclose(k,data[k],golden_data[k],error_tolerance):
|
|
|
|
|
data_matches = False
|
2018-07-27 01:05:24 +02:00
|
|
|
if not data_matches:
|
|
|
|
|
import pprint
|
|
|
|
|
data_string=pprint.pformat(data)
|
2018-07-27 23:25:52 +02:00
|
|
|
debug.error("Results exceeded {:.1f}% tolerance compared to golden results:\n".format(error_tolerance*100)+data_string)
|
2018-07-27 01:05:24 +02:00
|
|
|
return data_matches
|
2019-06-25 20:24:25 +02:00
|
|
|
|
2018-07-11 01:39:32 +02:00
|
|
|
|
2016-11-08 18:57:35 +01:00
|
|
|
|
2018-07-27 00:45:17 +02:00
|
|
|
def isclose(self,key,value,actual_value,error_tolerance=1e-2):
|
2018-01-31 20:48:41 +01:00
|
|
|
""" This is used to compare relative values. """
|
|
|
|
|
import debug
|
2018-07-27 18:34:44 +02:00
|
|
|
relative_diff = self.relative_diff(value,actual_value)
|
2018-01-31 20:48:41 +01:00
|
|
|
check = relative_diff <= error_tolerance
|
2018-07-27 18:34:44 +02:00
|
|
|
if check:
|
2018-07-27 00:45:17 +02:00
|
|
|
debug.info(2,"CLOSE\t{0: <10}\t{1:.3f}\t{2:.3f}\tdiff={3:.1f}%".format(key,value,actual_value,relative_diff*100))
|
|
|
|
|
return True
|
2018-07-27 18:34:44 +02:00
|
|
|
else:
|
|
|
|
|
debug.error("NOT CLOSE\t{0: <10}\t{1:.3f}\t{2:.3f}\tdiff={3:.1f}%".format(key,value,actual_value,relative_diff*100))
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
def relative_diff(self, value1, value2):
|
2019-06-25 20:24:25 +02:00
|
|
|
""" Compute the relative difference of two values and normalize to the largest.
|
2018-07-27 18:34:44 +02:00
|
|
|
If largest value is 0, just return the difference."""
|
|
|
|
|
|
|
|
|
|
# Edge case to avoid divide by zero
|
|
|
|
|
if value1==0 and value2==0:
|
|
|
|
|
return 0.0
|
|
|
|
|
|
|
|
|
|
# Don't need relative, exact compare
|
|
|
|
|
if value1==value2:
|
|
|
|
|
return 0.0
|
|
|
|
|
|
|
|
|
|
# Get normalization value
|
|
|
|
|
norm_value = abs(max(value1, value2))
|
|
|
|
|
# Edge case where greater is a zero
|
|
|
|
|
if norm_value == 0:
|
|
|
|
|
min_value = abs(min(value1, value2))
|
2019-06-25 20:24:25 +02:00
|
|
|
|
2018-07-27 18:34:44 +02:00
|
|
|
return abs(value1 - value2) / norm_value
|
|
|
|
|
|
2018-01-31 20:48:41 +01:00
|
|
|
|
2018-07-27 00:45:17 +02:00
|
|
|
def relative_compare(self, value,actual_value,error_tolerance):
|
2018-01-31 20:48:41 +01:00
|
|
|
""" This is used to compare relative values. """
|
2018-07-27 00:45:17 +02:00
|
|
|
if (value==actual_value): # if we don't need a relative comparison!
|
2018-01-31 20:48:41 +01:00
|
|
|
return True
|
2018-07-27 00:45:17 +02:00
|
|
|
return (abs(value - actual_value) / max(value,actual_value) <= error_tolerance)
|
2018-01-31 20:48:41 +01:00
|
|
|
|
2018-07-27 18:34:44 +02:00
|
|
|
def isapproxdiff(self, filename1, filename2, error_tolerance=0.001):
|
2018-01-31 20:48:41 +01:00
|
|
|
"""Compare two files.
|
|
|
|
|
|
|
|
|
|
Arguments:
|
2019-06-25 20:24:25 +02:00
|
|
|
|
2018-07-27 18:34:44 +02:00
|
|
|
filename1 -- First file name
|
2019-06-25 20:24:25 +02:00
|
|
|
|
2018-07-27 18:34:44 +02:00
|
|
|
filename2 -- Second file name
|
2018-01-31 20:48:41 +01:00
|
|
|
|
|
|
|
|
Return value:
|
2019-06-25 20:24:25 +02:00
|
|
|
|
2018-01-31 20:48:41 +01:00
|
|
|
True if the files are the same, False otherwise.
|
2019-06-25 20:24:25 +02:00
|
|
|
|
2018-01-31 20:48:41 +01:00
|
|
|
"""
|
|
|
|
|
import re
|
|
|
|
|
import debug
|
|
|
|
|
|
2018-06-30 00:11:14 +02:00
|
|
|
numeric_const_pattern = r"""
|
|
|
|
|
[-+]? # optional sign
|
|
|
|
|
(?:
|
|
|
|
|
(?: \d* \. \d+ ) # .1 .12 .123 etc 9.1 etc 98.1 etc
|
|
|
|
|
|
|
|
|
|
|
(?: \d+ \.? ) # 1. 12. 123. etc 1 12 123 etc
|
|
|
|
|
)
|
|
|
|
|
# followed by optional exponent part if desired
|
|
|
|
|
(?: [Ee] [+-]? \d+ ) ?
|
|
|
|
|
"""
|
|
|
|
|
rx = re.compile(numeric_const_pattern, re.VERBOSE)
|
2018-07-27 18:34:44 +02:00
|
|
|
fp1 = open(filename1, 'rb')
|
|
|
|
|
fp2 = open(filename2, 'rb')
|
|
|
|
|
mismatches=0
|
|
|
|
|
line_num=0
|
|
|
|
|
while True:
|
|
|
|
|
line_num+=1
|
|
|
|
|
line1 = fp1.readline().decode('utf-8')
|
|
|
|
|
line2 = fp2.readline().decode('utf-8')
|
|
|
|
|
#print("line1:",line1)
|
|
|
|
|
#print("line2:",line2)
|
|
|
|
|
|
|
|
|
|
# 1. Find all of the floats using a regex
|
|
|
|
|
line1_floats=rx.findall(line1)
|
|
|
|
|
line2_floats=rx.findall(line2)
|
|
|
|
|
debug.info(3,"line1_floats: "+str(line1_floats))
|
|
|
|
|
debug.info(3,"line2_floats: "+str(line2_floats))
|
|
|
|
|
|
2019-06-25 20:24:25 +02:00
|
|
|
|
2018-07-27 18:34:44 +02:00
|
|
|
# 2. Remove the floats from the string
|
|
|
|
|
for f in line1_floats:
|
|
|
|
|
line1=line1.replace(f,"",1)
|
|
|
|
|
for f in line2_floats:
|
|
|
|
|
line2=line2.replace(f,"",1)
|
|
|
|
|
#print("line1:",line1)
|
|
|
|
|
#print("line2:",line2)
|
2018-01-31 20:48:41 +01:00
|
|
|
|
2018-07-27 18:34:44 +02:00
|
|
|
# 3. Convert to floats rather than strings
|
|
|
|
|
line1_floats = [float(x) for x in line1_floats]
|
|
|
|
|
line2_floats = [float(x) for x in line1_floats]
|
2019-06-25 20:24:25 +02:00
|
|
|
|
2018-07-27 18:34:44 +02:00
|
|
|
# 4. Check if remaining string matches
|
|
|
|
|
if line1 != line2:
|
2019-06-25 20:24:25 +02:00
|
|
|
#Uncomment if you want to see all the individual chars of the two lines
|
2018-09-11 04:33:59 +02:00
|
|
|
#print(str([i for i in line1]))
|
|
|
|
|
#print(str([i for i in line2]))
|
2018-07-27 18:34:44 +02:00
|
|
|
if mismatches==0:
|
|
|
|
|
debug.error("Mismatching files:\nfile1={0}\nfile2={1}".format(filename1,filename2))
|
|
|
|
|
mismatches += 1
|
|
|
|
|
debug.error("MISMATCH Line ({0}):\n{1}\n!=\n{2}".format(line_num,line1.rstrip('\n'),line2.rstrip('\n')))
|
2018-01-31 20:48:41 +01:00
|
|
|
|
2018-07-27 18:34:44 +02:00
|
|
|
# 5. Now compare that the floats match
|
|
|
|
|
elif len(line1_floats)!=len(line2_floats):
|
|
|
|
|
if mismatches==0:
|
|
|
|
|
debug.error("Mismatching files:\nfile1={0}\nfile2={1}".format(filename1,filename2))
|
|
|
|
|
mismatches += 1
|
|
|
|
|
debug.error("MISMATCH Line ({0}) Length {1} != {2}".format(line_num,len(line1_floats),len(line2_floats)))
|
|
|
|
|
else:
|
|
|
|
|
for (float1,float2) in zip(line1_floats,line2_floats):
|
|
|
|
|
relative_diff = self.relative_diff(float1,float2)
|
|
|
|
|
check = relative_diff <= error_tolerance
|
|
|
|
|
if not check:
|
|
|
|
|
if mismatches==0:
|
|
|
|
|
debug.error("Mismatching files:\nfile1={0}\nfile2={1}".format(filename1,filename2))
|
|
|
|
|
mismatches += 1
|
|
|
|
|
debug.error("MISMATCH Line ({0}) Float {1} != {2} diff: {3:.1f}%".format(line_num,float1,float2,relative_diff*100))
|
|
|
|
|
|
|
|
|
|
# Only show the first 10 mismatch lines
|
|
|
|
|
if not line1 and not line2 or mismatches>10:
|
|
|
|
|
fp1.close()
|
|
|
|
|
fp2.close()
|
|
|
|
|
return mismatches==0
|
2018-01-31 20:48:41 +01:00
|
|
|
|
2018-07-27 18:34:44 +02:00
|
|
|
# Never reached
|
|
|
|
|
return False
|
2018-01-31 20:48:41 +01:00
|
|
|
|
|
|
|
|
|
2018-07-27 18:34:44 +02:00
|
|
|
def isdiff(self,filename1,filename2):
|
2018-01-31 20:48:41 +01:00
|
|
|
""" This is used to compare two files and display the diff if they are different.. """
|
|
|
|
|
import debug
|
|
|
|
|
import filecmp
|
|
|
|
|
import difflib
|
2018-07-27 18:34:44 +02:00
|
|
|
check = filecmp.cmp(filename1,filename2)
|
2018-01-31 20:48:41 +01:00
|
|
|
if not check:
|
2018-07-27 18:34:44 +02:00
|
|
|
debug.error("MISMATCH file1={0} file2={1}".format(filename1,filename2))
|
2018-09-17 19:03:55 +02:00
|
|
|
f1 = open(filename1,mode="r",encoding='utf-8')
|
|
|
|
|
s1 = f1.readlines()
|
2018-07-27 18:34:44 +02:00
|
|
|
f1.close()
|
2018-09-17 19:03:55 +02:00
|
|
|
f2 = open(filename2,mode="r",encoding='utf-8')
|
2018-01-31 20:48:41 +01:00
|
|
|
s2 = f2.readlines()
|
2018-07-27 18:34:44 +02:00
|
|
|
f2.close()
|
|
|
|
|
mismatches=0
|
2018-09-17 19:03:55 +02:00
|
|
|
for line in list(difflib.unified_diff(s1, s2)):
|
2018-07-27 18:34:44 +02:00
|
|
|
mismatches += 1
|
2018-09-17 19:03:55 +02:00
|
|
|
if mismatches==0:
|
|
|
|
|
print("DIFF LINES:")
|
|
|
|
|
|
|
|
|
|
if mismatches<11:
|
|
|
|
|
print(line.rstrip('\n'))
|
|
|
|
|
else:
|
2018-07-27 18:34:44 +02:00
|
|
|
return False
|
|
|
|
|
return False
|
2018-01-31 20:48:41 +01:00
|
|
|
else:
|
2018-07-27 18:34:44 +02:00
|
|
|
debug.info(2,"MATCH {0} {1}".format(filename1,filename2))
|
|
|
|
|
return True
|
2016-11-12 16:56:50 +01:00
|
|
|
|
2019-06-25 20:24:25 +02:00
|
|
|
|
2016-11-15 18:55:18 +01:00
|
|
|
def header(filename, technology):
|
2018-07-11 23:18:36 +02:00
|
|
|
# Skip the header for gitlab regression
|
|
|
|
|
import getpass
|
|
|
|
|
if getpass.getuser() == "gitlab-runner":
|
|
|
|
|
return
|
2019-06-25 20:24:25 +02:00
|
|
|
|
2016-11-08 18:57:35 +01:00
|
|
|
tst = "Running Test for:"
|
2018-05-12 01:32:00 +02:00
|
|
|
print("\n")
|
|
|
|
|
print(" ______________________________________________________________________________ ")
|
|
|
|
|
print("|==============================================================================|")
|
|
|
|
|
print("|=========" + tst.center(60) + "=========|")
|
|
|
|
|
print("|=========" + technology.center(60) + "=========|")
|
|
|
|
|
print("|=========" + filename.center(60) + "=========|")
|
2017-11-16 22:52:58 +01:00
|
|
|
from globals import OPTS
|
2018-05-12 01:32:00 +02:00
|
|
|
print("|=========" + OPTS.openram_temp.center(60) + "=========|")
|
|
|
|
|
print("|==============================================================================|")
|
2019-05-31 19:51:42 +02:00
|
|
|
|
|
|
|
|
def debugTestRunner(post_mortem=None):
|
|
|
|
|
"""unittest runner doing post mortem debugging on failing tests"""
|
2019-07-12 19:57:59 +02:00
|
|
|
if post_mortem is None and not OPTS.purge_temp:
|
2019-05-31 19:51:42 +02:00
|
|
|
post_mortem = pdb.post_mortem
|
|
|
|
|
class DebugTestResult(unittest.TextTestResult):
|
|
|
|
|
def addError(self, test, err):
|
|
|
|
|
# called before tearDown()
|
|
|
|
|
traceback.print_exception(*err)
|
2019-07-12 19:57:59 +02:00
|
|
|
if post_mortem:
|
|
|
|
|
post_mortem(err[2])
|
2019-05-31 19:51:42 +02:00
|
|
|
super(DebugTestResult, self).addError(test, err)
|
|
|
|
|
def addFailure(self, test, err):
|
|
|
|
|
traceback.print_exception(*err)
|
2019-07-12 19:57:59 +02:00
|
|
|
if post_mortem:
|
|
|
|
|
post_mortem(err[2])
|
2019-05-31 19:51:42 +02:00
|
|
|
super(DebugTestResult, self).addFailure(test, err)
|
|
|
|
|
return unittest.TextTestRunner(resultclass=DebugTestResult)
|