mirror of https://github.com/VLSIDA/OpenRAM.git
Fixed units in LIB files when cacti is selected as the model. Changed model data gather to only use the extended config.
This commit is contained in:
parent
11ff8713c5
commit
116f102ebf
|
|
@ -91,14 +91,14 @@ class cacti(simulation):
|
||||||
if "power" in mname:
|
if "power" in mname:
|
||||||
port_data[port][mname].append(power.dynamic)
|
port_data[port][mname].append(power.dynamic)
|
||||||
elif "delay" in mname and port in self.read_ports:
|
elif "delay" in mname and port in self.read_ports:
|
||||||
port_data[port][mname].append(total_delay.delay / 1e3)
|
port_data[port][mname].append(total_delay.delay / 1e-9)
|
||||||
elif "slew" in mname and port in self.read_ports:
|
elif "slew" in mname and port in self.read_ports:
|
||||||
port_data[port][mname].append(total_delay.slew / 1e3)
|
port_data[port][mname].append(total_delay.slew / 1e-9)
|
||||||
|
|
||||||
# Margin for error in period. Calculated by averaging required margin for a small and large
|
# Margin for error in period. Calculated by averaging required margin for a small and large
|
||||||
# memory. FIXME: margin is quite large, should be looked into.
|
# memory. FIXME: margin is quite large, should be looked into.
|
||||||
period_margin = 1.85
|
period_margin = 1.85
|
||||||
sram_data = {"min_period": (max_delay / 1e3) * 2 * period_margin,
|
sram_data = {"min_period": (max_delay / 1e-9) * 2 * period_margin,
|
||||||
"leakage_power": power.leakage}
|
"leakage_power": power.leakage}
|
||||||
|
|
||||||
debug.info(2, "SRAM Data:\n{}".format(sram_data))
|
debug.info(2, "SRAM Data:\n{}".format(sram_data))
|
||||||
|
|
|
||||||
|
|
@ -30,18 +30,19 @@ def get_config_mods(openram_dir):
|
||||||
files_names = [name for name in os.listdir(openram_dir) if os.path.isfile(openram_dir+'/'+name)]
|
files_names = [name for name in os.listdir(openram_dir) if os.path.isfile(openram_dir+'/'+name)]
|
||||||
log = [name for name in files_names if '.log' in name][0]
|
log = [name for name in files_names if '.log' in name][0]
|
||||||
dataset_name = log[:-4]
|
dataset_name = log[:-4]
|
||||||
|
sys.path.append(openram_dir)
|
||||||
print("Extracting dataset:{}".format(dataset_name))
|
print("Extracting dataset:{}".format(dataset_name))
|
||||||
|
|
||||||
# Check that the config files exist (including special extended config)
|
# Check that the config files exist (including special extended config)
|
||||||
dir_path = openram_dir+"/"
|
dir_path = openram_dir+"/"
|
||||||
#sys.path.append(dir_path)
|
#sys.path.append(dir_path)
|
||||||
imp_mod = None
|
#imp_mod = None
|
||||||
imp_mod_extended = None
|
imp_mod_extended = None
|
||||||
if not os.path.exists(openram_dir+'/'+dataset_name+".py"):
|
# if not os.path.exists(openram_dir+'/'+dataset_name+".py"):
|
||||||
print("Python module for {} not found.".format(dataset_name))
|
# print("Python module for {} not found.".format(dataset_name))
|
||||||
imp_mod = None
|
# imp_mod = None
|
||||||
else:
|
# else:
|
||||||
imp_mod = import_module(dataset_name, openram_dir+"/"+dataset_name+".py")
|
# imp_mod = import_module(dataset_name, openram_dir+"/"+dataset_name+".py")
|
||||||
|
|
||||||
if not os.path.exists(openram_dir+'/'+dataset_name+extended_name+".py"):
|
if not os.path.exists(openram_dir+'/'+dataset_name+extended_name+".py"):
|
||||||
print("Extended Python module for {} not found.".format(dataset_name))
|
print("Extended Python module for {} not found.".format(dataset_name))
|
||||||
|
|
@ -51,7 +52,7 @@ def get_config_mods(openram_dir):
|
||||||
|
|
||||||
datasheet_fname = openram_dir+"/"+dataset_name+data_file_ext
|
datasheet_fname = openram_dir+"/"+dataset_name+data_file_ext
|
||||||
|
|
||||||
return dataset_name, imp_mod, imp_mod_extended, datasheet_fname
|
return dataset_name, imp_mod_extended, datasheet_fname
|
||||||
|
|
||||||
def get_corners(datafile_contents, dataset_name, tech):
|
def get_corners(datafile_contents, dataset_name, tech):
|
||||||
"""Search through given datasheet to find all corners available"""
|
"""Search through given datasheet to find all corners available"""
|
||||||
|
|
@ -89,7 +90,7 @@ singlevalue_names = ['write_rise_power_0',
|
||||||
'read_rise_power_0',
|
'read_rise_power_0',
|
||||||
'read_fall_power_0']
|
'read_fall_power_0']
|
||||||
|
|
||||||
def write_to_csv(dataset_name, csv_file, datasheet_fname, imp_mod, imp_mod_extended, mode):
|
def write_to_csv(dataset_name, csv_file, datasheet_fname, imp_mod, mode):
|
||||||
|
|
||||||
|
|
||||||
writer = csv.writer(csv_file,lineterminator='\n')
|
writer = csv.writer(csv_file,lineterminator='\n')
|
||||||
|
|
@ -119,7 +120,7 @@ def write_to_csv(dataset_name, csv_file, datasheet_fname, imp_mod, imp_mod_exten
|
||||||
contents = f.read()
|
contents = f.read()
|
||||||
f.close()
|
f.close()
|
||||||
|
|
||||||
available_corners = get_corners(contents, dataset_name, imp_mod_extended.tech_name)
|
available_corners = get_corners(contents, dataset_name, imp_mod.tech_name)
|
||||||
|
|
||||||
# Loop through corners, adding data for each corner
|
# Loop through corners, adding data for each corner
|
||||||
for (temp, voltage, process) in available_corners:
|
for (temp, voltage, process) in available_corners:
|
||||||
|
|
@ -185,6 +186,10 @@ def write_to_csv(dataset_name, csv_file, datasheet_fname, imp_mod, imp_mod_exten
|
||||||
for dval in datasheet_multivalues+datasheet_singlevalues:
|
for dval in datasheet_multivalues+datasheet_singlevalues:
|
||||||
if dval == None:
|
if dval == None:
|
||||||
print("Error occurred while searching through datasheet: {}".format(datasheet_fname))
|
print("Error occurred while searching through datasheet: {}".format(datasheet_fname))
|
||||||
|
print("datasheet_multivalues",datasheet_multivalues)
|
||||||
|
print("datasheet_singlevalues",datasheet_singlevalues)
|
||||||
|
print("multivalue_regexs",multivalue_regexs[0])
|
||||||
|
sys.exit()
|
||||||
return None
|
return None
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
|
@ -195,7 +200,7 @@ def write_to_csv(dataset_name, csv_file, datasheet_fname, imp_mod, imp_mod_exten
|
||||||
# All the extracted values are delays but val[2] is the max delay
|
# All the extracted values are delays but val[2] is the max delay
|
||||||
feature_vals = [imp_mod.num_words,
|
feature_vals = [imp_mod.num_words,
|
||||||
imp_mod.word_size,
|
imp_mod.word_size,
|
||||||
imp_mod_extended.words_per_row,
|
imp_mod.words_per_row,
|
||||||
las,
|
las,
|
||||||
area_vals[1],
|
area_vals[1],
|
||||||
process,
|
process,
|
||||||
|
|
@ -226,8 +231,8 @@ def extract_data(openram_dir, out_dir, is_first):
|
||||||
a CSV files with data used in model."""
|
a CSV files with data used in model."""
|
||||||
|
|
||||||
# Get dataset name used by all the files e.g. sram_1b_16
|
# Get dataset name used by all the files e.g. sram_1b_16
|
||||||
dataset_name, inp_mod, imp_mod_extended, datasheet_fname = get_config_mods(openram_dir)
|
dataset_name, inp_mod, datasheet_fname = get_config_mods(openram_dir)
|
||||||
if inp_mod == None or imp_mod_extended == None:
|
if inp_mod == None:
|
||||||
print("Config file(s) for this run not found. Skipping...")
|
print("Config file(s) for this run not found. Skipping...")
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|
@ -236,7 +241,7 @@ def extract_data(openram_dir, out_dir, is_first):
|
||||||
else:
|
else:
|
||||||
mode = 'a+'
|
mode = 'a+'
|
||||||
with open("{}/sim_data.csv".format(out_dir), mode, newline='\n') as data_file:
|
with open("{}/sim_data.csv".format(out_dir), mode, newline='\n') as data_file:
|
||||||
write_to_csv(dataset_name, data_file, datasheet_fname, inp_mod, imp_mod_extended, mode)
|
write_to_csv(dataset_name, data_file, datasheet_fname, inp_mod, mode)
|
||||||
|
|
||||||
return out_dir
|
return out_dir
|
||||||
|
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue