Removed carriage returns, adjusted signal names generation for variable delay chain size.

This commit is contained in:
Hunter Nichols 2019-01-18 00:23:50 -08:00
parent 4ced6be6bd
commit 5885e3b635
2 changed files with 147 additions and 141 deletions

View File

@ -33,8 +33,10 @@ class model_check(delay):
self.wl_delay_meas_names = ["delay_wl_en_bar", "delay_wl_en", "delay_dvr_en_bar", "delay_wl"] self.wl_delay_meas_names = ["delay_wl_en_bar", "delay_wl_en", "delay_dvr_en_bar", "delay_wl"]
self.wl_slew_meas_names = ["slew_wl_gated_clk_bar","slew_wl_en_bar", "slew_wl_en", "slew_drv_en_bar", "slew_wl"] self.wl_slew_meas_names = ["slew_wl_gated_clk_bar","slew_wl_en_bar", "slew_wl_en", "slew_drv_en_bar", "slew_wl"]
self.rbl_delay_meas_names = ["delay_gated_clk_nand", "delay_delay_chain_in", "delay_delay_chain_stage_1", "delay_delay_chain_stage_2"] dc_delay_names = ["delay_delay_chain_stage_{}".format(stage) for stage in range(1,self.get_num_delay_stages()+1)]
self.rbl_slew_meas_names = ["slew_rbl_gated_clk_bar","slew_gated_clk_nand", "slew_delay_chain_in", "slew_delay_chain_stage_1", "slew_delay_chain_stage_2"] self.rbl_delay_meas_names = ["delay_gated_clk_nand", "delay_delay_chain_in"]+dc_delay_names
dc_slew_names = ["slew_delay_chain_stage_{}".format(stage) for stage in range(1,self.get_num_delay_stages()+1)]
self.rbl_slew_meas_names = ["slew_rbl_gated_clk_bar","slew_gated_clk_nand", "slew_delay_chain_in"]+dc_slew_names
self.sae_delay_meas_names = ["delay_pre_sen", "delay_sen_bar", "delay_sen"] self.sae_delay_meas_names = ["delay_pre_sen", "delay_sen_bar", "delay_sen"]
self.sae_slew_meas_names = ["slew_replica_bl0", "slew_pre_sen", "slew_sen_bar", "slew_sen"] self.sae_slew_meas_names = ["slew_replica_bl0", "slew_pre_sen", "slew_sen_bar", "slew_sen"]
@ -42,7 +44,8 @@ class model_check(delay):
delay.create_signal_names(self) delay.create_signal_names(self)
#Signal names are all hardcoded, need to update to make it work for probe address and different configurations. #Signal names are all hardcoded, need to update to make it work for probe address and different configurations.
self.wl_signal_names = ["Xsram.Xcontrol0.gated_clk_bar", "Xsram.Xcontrol0.Xbuf_wl_en.zb_int", "Xsram.wl_en0", "Xsram.Xbank0.Xwordline_driver0.wl_bar_15", "Xsram.Xbank0.wl_15"] self.wl_signal_names = ["Xsram.Xcontrol0.gated_clk_bar", "Xsram.Xcontrol0.Xbuf_wl_en.zb_int", "Xsram.wl_en0", "Xsram.Xbank0.Xwordline_driver0.wl_bar_15", "Xsram.Xbank0.wl_15"]
self.rbl_en_signal_names = ["Xsram.Xcontrol0.gated_clk_bar", "Xsram.Xcontrol0.Xand2_rbl_in.zb_int", "Xsram.Xcontrol0.rbl_in", "Xsram.Xcontrol0.Xreplica_bitline.Xdelay_chain.dout_1", "Xsram.Xcontrol0.Xreplica_bitline.delayed_en"] delay_chain_signal_names = ["Xsram.Xcontrol0.Xreplica_bitline.Xdelay_chain.dout_{}".format(stage) for stage in range(1,self.get_num_delay_stages())] + ["Xsram.Xcontrol0.Xreplica_bitline.delayed_en"]
self.rbl_en_signal_names = ["Xsram.Xcontrol0.gated_clk_bar", "Xsram.Xcontrol0.Xand2_rbl_in.zb_int", "Xsram.Xcontrol0.rbl_in"] + delay_chain_signal_names
self.sae_signal_names = ["Xsram.Xcontrol0.Xreplica_bitline.bl0_0", "Xsram.Xcontrol0.pre_s_en", "Xsram.Xcontrol0.Xbuf_s_en.zb_int", "Xsram.s_en0"] self.sae_signal_names = ["Xsram.Xcontrol0.Xreplica_bitline.bl0_0", "Xsram.Xcontrol0.pre_s_en", "Xsram.Xcontrol0.Xbuf_s_en.zb_int", "Xsram.s_en0"]
def get_all_signal_names(self): def get_all_signal_names(self):
@ -178,7 +181,10 @@ class model_check(delay):
def get_model_delays(self, port): def get_model_delays(self, port):
"""Get model delays based on port. Currently assumes single RW port.""" """Get model delays based on port. Currently assumes single RW port."""
return self.sram.control_logic_rw.get_wl_sen_delays() return self.sram.control_logic_rw.get_wl_sen_delays()
def get_num_delay_stages(self):
return len(self.sram.control_logic_rw.replica_bitline.delay_fanout_list)
def scale_delays(self, delay_list): def scale_delays(self, delay_list):
"""Takes in a list of measured delays and convert it to simple units to easily compare to model values.""" """Takes in a list of measured delays and convert it to simple units to easily compare to model values."""
converted_values = [] converted_values = []

View File

@ -1,137 +1,137 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
""" """
Run a regression test on various srams Run a regression test on various srams
""" """
import unittest import unittest
from testutils import header,openram_test from testutils import header,openram_test
import sys,os import sys,os
sys.path.append(os.path.join(sys.path[0],"..")) sys.path.append(os.path.join(sys.path[0],".."))
import globals import globals
from globals import OPTS from globals import OPTS
import debug import debug
import csv import csv
from sram import sram from sram import sram
from sram_config import sram_config from sram_config import sram_config
MODEL_DIR = "model_data/" MODEL_DIR = "model_data/"
class data_collection(openram_test): class data_collection(openram_test):
def runTest(self): def runTest(self):
self.init_data_gen() self.init_data_gen()
#Run on one size to initialize CSV writing (csv names come from return value). Strange, but it is okay for now. #Run on one size to initialize CSV writing (csv names come from return value). Strange, but it is okay for now.
sram_data = self.get_sram_data(1,16,1) sram_data = self.get_sram_data(1,16,1)
self.initialize_csv_file(sram_data) self.initialize_csv_file(sram_data)
self.add_sram_data_to_csv(sram_data, 1, 16, 1) self.add_sram_data_to_csv(sram_data, 1, 16, 1)
#Run openRAM for several size configurations #Run openRAM for several size configurations
#word_size_list, num_word_list, words_per_row_list = self.get_sram_configs() #word_size_list, num_word_list, words_per_row_list = self.get_sram_configs()
word_size_list, num_word_list, words_per_row_list = [4], [16], [1] #for quick testing. word_size_list, num_word_list, words_per_row_list = [4], [16], [1] #for quick testing.
for word_size in word_size_list: for word_size in word_size_list:
for num_word in num_word_list: for num_word in num_word_list:
for words_per_row in words_per_row_list: for words_per_row in words_per_row_list:
#Unfortunately, init needs to be called everytime #Unfortunately, init needs to be called everytime
self.init_data_gen() self.init_data_gen()
sram_data = self.get_sram_data(word_size, num_word, words_per_row) sram_data = self.get_sram_data(word_size, num_word, words_per_row)
self.add_sram_data_to_csv(sram_data, word_size, num_word, words_per_row) self.add_sram_data_to_csv(sram_data, word_size, num_word, words_per_row)
self.close_files() self.close_files()
debug.info(1,"Data Generated") debug.info(1,"Data Generated")
globals.end_openram() globals.end_openram()
def init_data_gen(self): def init_data_gen(self):
"""Initialization for the data test to run""" """Initialization for the data test to run"""
globals.init_openram("config_20_{0}".format(OPTS.tech_name)) globals.init_openram("config_20_{0}".format(OPTS.tech_name))
OPTS.spice_name="hspice" #Much faster than ngspice. OPTS.spice_name="hspice" #Much faster than ngspice.
OPTS.trim_netlist = False OPTS.trim_netlist = False
OPTS.netlist_only = True OPTS.netlist_only = True
OPTS.analytical_delay = False OPTS.analytical_delay = False
# This is a hack to reload the characterizer __init__ with the spice version # This is a hack to reload the characterizer __init__ with the spice version
from importlib import reload from importlib import reload
import characterizer import characterizer
reload(characterizer) reload(characterizer)
def close_files(self): def close_files(self):
"""Closes all files stored in the file dict""" """Closes all files stored in the file dict"""
for key,file in self.csv_files.items(): for key,file in self.csv_files.items():
file.close() file.close()
def get_sram_configs(self): def get_sram_configs(self):
"""Generate lists of wordsizes, number of words, and column mux size (words per row) to be tested.""" """Generate lists of wordsizes, number of words, and column mux size (words per row) to be tested."""
min_word_size = 1 min_word_size = 1
max_word_size = 16 max_word_size = 16
min_num_words_log2 = 4 min_num_words_log2 = 4
max_num_words_log2 = 8 max_num_words_log2 = 8
word_sizes = [i for i in range(min_word_size,max_word_size+1)] word_sizes = [i for i in range(min_word_size,max_word_size+1)]
num_words = [2**i for i in range(min_num_words_log2,max_num_words_log2+1)] num_words = [2**i for i in range(min_num_words_log2,max_num_words_log2+1)]
words_per_row = [1] words_per_row = [1]
return word_sizes, num_words, words_per_row return word_sizes, num_words, words_per_row
def add_sram_data_to_csv(self, sram_data, word_size, num_words, words_per_row): def add_sram_data_to_csv(self, sram_data, word_size, num_words, words_per_row):
"""Writes data to its respective CSV file. There is a CSV for each measurement target (wordline, sense amp enable, and models)""" """Writes data to its respective CSV file. There is a CSV for each measurement target (wordline, sense amp enable, and models)"""
sram_specs = [word_size,num_words,words_per_row] sram_specs = [word_size,num_words,words_per_row]
for data_name, data_values in sram_data.items(): for data_name, data_values in sram_data.items():
self.csv_writers[data_name].writerow(sram_specs+sram_data[data_name]) self.csv_writers[data_name].writerow(sram_specs+sram_data[data_name])
debug.info(2,"Data Added to CSV file.") debug.info(2,"Data Added to CSV file.")
def initialize_csv_file(self, sram_data): def initialize_csv_file(self, sram_data):
"""Opens a CSV file and writer for every data set being written (wl/sae measurements and model values)""" """Opens a CSV file and writer for every data set being written (wl/sae measurements and model values)"""
#CSV File writing #CSV File writing
header_dict = self.delay_obj.get_all_signal_names() header_dict = self.delay_obj.get_all_signal_names()
self.csv_files = {} self.csv_files = {}
self.csv_writers = {} self.csv_writers = {}
for data_name, header_list in header_dict.items(): for data_name, header_list in header_dict.items():
self.csv_files[data_name] = open('{}data_{}.csv'.format(MODEL_DIR,data_name), 'w') self.csv_files[data_name] = open('{}data_{}.csv'.format(MODEL_DIR,data_name), 'w')
fields = ('word_size', 'num_words', 'words_per_row', *header_list) fields = ('word_size', 'num_words', 'words_per_row', *header_list)
self.csv_writers[data_name] = csv.writer(self.csv_files[data_name], lineterminator = '\n') self.csv_writers[data_name] = csv.writer(self.csv_files[data_name], lineterminator = '\n')
self.csv_writers[data_name].writerow(fields) self.csv_writers[data_name].writerow(fields)
def get_sram_data(self, word_size, num_words, words_per_row): def get_sram_data(self, word_size, num_words, words_per_row):
"""Generates the SRAM based on input configuration and returns the data.""" """Generates the SRAM based on input configuration and returns the data."""
from characterizer import model_check from characterizer import model_check
c = sram_config(word_size=word_size, c = sram_config(word_size=word_size,
num_words=num_words, num_words=num_words,
num_banks=1) num_banks=1)
#minimum 16 rows. Most sizes below 16*16 will try to automatically use less rows unless enforced. #minimum 16 rows. Most sizes below 16*16 will try to automatically use less rows unless enforced.
#if word_size*num_words < 256: #if word_size*num_words < 256:
c.words_per_row=words_per_row #Force no column mux until incorporated into analytical delay. c.words_per_row=words_per_row #Force no column mux until incorporated into analytical delay.
debug.info(1, "Getting data for {} bit, {} words SRAM with 1 bank".format(word_size, num_words)) debug.info(1, "Getting data for {} bit, {} words SRAM with 1 bank".format(word_size, num_words))
s = sram(c, name="sram_{}ws_{}words".format(word_size, num_words)) s = sram(c, name="sram_{}ws_{}words".format(word_size, num_words))
tempspice = OPTS.openram_temp + "temp.sp" tempspice = OPTS.openram_temp + "temp.sp"
s.sp_write(tempspice) s.sp_write(tempspice)
corner = (OPTS.process_corners[0], OPTS.supply_voltages[0], OPTS.temperatures[0]) corner = (OPTS.process_corners[0], OPTS.supply_voltages[0], OPTS.temperatures[0])
self.delay_obj = model_check(s.s, tempspice, corner) self.delay_obj = model_check(s.s, tempspice, corner)
import tech import tech
#Only 1 at a time #Only 1 at a time
probe_address = "1" * s.s.addr_size probe_address = "1" * s.s.addr_size
probe_data = s.s.word_size - 1 probe_data = s.s.word_size - 1
loads = [tech.spice["msflop_in_cap"]*4] loads = [tech.spice["msflop_in_cap"]*4]
slews = [tech.spice["rise_time"]*2] slews = [tech.spice["rise_time"]*2]
sram_data = self.delay_obj.analyze(probe_address,probe_data,slews,loads) sram_data = self.delay_obj.analyze(probe_address,probe_data,slews,loads)
return sram_data return sram_data
def remove_lists_from_dict(self, dict): def remove_lists_from_dict(self, dict):
"""Check all the values in the dict and replaces the list items with its first value.""" """Check all the values in the dict and replaces the list items with its first value."""
#This is useful because the tests performed here only generate 1 value but a list #This is useful because the tests performed here only generate 1 value but a list
#with 1 item makes writing it to a csv later harder. #with 1 item makes writing it to a csv later harder.
for key in dict.keys(): for key in dict.keys():
if type(dict[key]) is list: if type(dict[key]) is list:
if len(dict[key]) > 0: if len(dict[key]) > 0:
dict[key] = dict[key][0] dict[key] = dict[key][0]
else: else:
del dict[key] del dict[key]
# instantiate a copdsay of the class to actually run the test # instantiate a copdsay of the class to actually run the test
if __name__ == "__main__": if __name__ == "__main__":
(OPTS, args) = globals.parse_args() (OPTS, args) = globals.parse_args()
del sys.argv[1:] del sys.argv[1:]
header(__file__, OPTS.tech_name) header(__file__, OPTS.tech_name)
unittest.main() unittest.main()