From 87380a4801d4c73341b64dadd0d100d178bb2199 Mon Sep 17 00:00:00 2001 From: Jesse Cirimelli-Low Date: Sun, 13 Jan 2019 14:34:46 -0800 Subject: [PATCH 1/8] complete log file generation --- compiler/debug.py | 31 +++++++++++++++++++++++++++++ compiler/globals.py | 48 ++++++++++++++++++++++----------------------- compiler/openram.py | 9 +++++---- compiler/sram.py | 20 +++++++++---------- 4 files changed, 70 insertions(+), 38 deletions(-) diff --git a/compiler/debug.py b/compiler/debug.py index 1bf46db0..ea5eb45c 100644 --- a/compiler/debug.py +++ b/compiler/debug.py @@ -14,20 +14,51 @@ def check(check,str): index) = inspect.getouterframes(inspect.currentframe())[1] if not check: sys.stderr.write("ERROR: file {0}: line {1}: {2}\n".format(os.path.basename(filename),line_number,str)) + log("ERROR: file {0}: line {1}: {2}\n".format(os.path.basename(filename),line_number,str)) + assert 0 def error(str,return_value=0): (frame, filename, line_number, function_name, lines, index) = inspect.getouterframes(inspect.currentframe())[1] sys.stderr.write("ERROR: file {0}: line {1}: {2}\n".format(os.path.basename(filename),line_number,str)) + log("ERROR: file {0}: line {1}: {2}\n".format(os.path.basename(filename),line_number,str)) + assert return_value==0 def warning(str): (frame, filename, line_number, function_name, lines, index) = inspect.getouterframes(inspect.currentframe())[1] sys.stderr.write("WARNING: file {0}: line {1}: {2}\n".format(os.path.basename(filename),line_number,str)) + log("WARNING: file {0}: line {1}: {2}\n".format(os.path.basename(filename),line_number,str)) + +def print_raw(str): + print(str) + log(str) + + +def log(str): + try: + if log.create_file: + compile_log = open(globals.OPTS.output_path + globals.OPTS.output_name + '.log',"w+") + log.create_file = 0 + else: + compile_log = open(globals.OPTS.output_path + globals.OPTS.output_name + '.log',"a") + + if len(log.setup_output) != 0: + for line in log.setup_output: + compile_log.write(line) + log.setup_output = [] + compile_log.write(str + '\n') + except: + log.setup_out.append(str + "\n") + +#use a static list of strings to store messages until the global paths are set up +log.setup_output = [] +log.create_file = 1 + def info(lev, str): from globals import OPTS if (OPTS.debug_level >= lev): diff --git a/compiler/globals.py b/compiler/globals.py index a6360e24..f5c36551 100644 --- a/compiler/globals.py +++ b/compiler/globals.py @@ -71,26 +71,26 @@ def print_banner(): if OPTS.is_unit_test: return - print("|==============================================================================|") + debug.print_raw("|==============================================================================|") name = "OpenRAM Compiler" - print("|=========" + name.center(60) + "=========|") - print("|=========" + " ".center(60) + "=========|") - print("|=========" + "VLSI Design and Automation Lab".center(60) + "=========|") - print("|=========" + "Computer Science and Engineering Department".center(60) + "=========|") - print("|=========" + "University of California Santa Cruz".center(60) + "=========|") - print("|=========" + " ".center(60) + "=========|") - print("|=========" + "VLSI Computer Architecture Research Group".center(60) + "=========|") - print("|=========" + "Electrical and Computer Engineering Department".center(60) + "=========|") - print("|=========" + "Oklahoma State University".center(60) + "=========|") - print("|=========" + " ".center(60) + "=========|") + debug.print_raw("|=========" + name.center(60) + "=========|") + debug.print_raw("|=========" + " ".center(60) + "=========|") + debug.print_raw("|=========" + "VLSI Design and Automation Lab".center(60) + "=========|") + debug.print_raw("|=========" + "Computer Science and Engineering Department".center(60) + "=========|") + debug.print_raw("|=========" + "University of California Santa Cruz".center(60) + "=========|") + debug.print_raw("|=========" + " ".center(60) + "=========|") + debug.print_raw("|=========" + "VLSI Computer Architecture Research Group".center(60) + "=========|") + debug.print_raw("|=========" + "Electrical and Computer Engineering Department".center(60) + "=========|") + debug.print_raw("|=========" + "Oklahoma State University".center(60) + "=========|") + debug.print_raw("|=========" + " ".center(60) + "=========|") user_info = "Usage help: openram-user-group@ucsc.edu" - print("|=========" + user_info.center(60) + "=========|") + debug.print_raw("|=========" + user_info.center(60) + "=========|") dev_info = "Development help: openram-dev-group@ucsc.edu" - print("|=========" + dev_info.center(60) + "=========|") + debug.print_raw("|=========" + dev_info.center(60) + "=========|") temp_info = "Temp dir: {}".format(OPTS.openram_temp) - print("|=========" + temp_info.center(60) + "=========|") - print("|=========" + "See LICENSE for license info".center(60) + "=========|") - print("|==============================================================================|") + debug.print_raw("|=========" + temp_info.center(60) + "=========|") + debug.print_raw("|=========" + "See LICENSE for license info".center(60) + "=========|") + debug.print_raw("|==============================================================================|") def check_versions(): @@ -397,7 +397,7 @@ def print_time(name, now_time, last_time=None, indentation=2): time = str(round((now_time-last_time).total_seconds(),1)) + " seconds" else: time = now_time.strftime('%m/%d/%Y %H:%M:%S') - print("{0} {1}: {2}".format("*"*indentation,name,time)) + debug.print_raw("{0} {1}: {2}".format("*"*indentation,name,time)) def report_status(): @@ -413,20 +413,20 @@ def report_status(): if not OPTS.tech_name: debug.error("Tech name must be specified in config file.") - print("Technology: {0}".format(OPTS.tech_name)) - print("Total size: {} bits".format(OPTS.word_size*OPTS.num_words*OPTS.num_banks)) - print("Word size: {0}\nWords: {1}\nBanks: {2}".format(OPTS.word_size, + debug.print_raw("Technology: {0}".format(OPTS.tech_name)) + debug.print_raw("Total size: {} bits".format(OPTS.word_size*OPTS.num_words*OPTS.num_banks)) + debug.print_raw("Word size: {0}\nWords: {1}\nBanks: {2}".format(OPTS.word_size, OPTS.num_words, OPTS.num_banks)) - print("RW ports: {0}\nR-only ports: {1}\nW-only ports: {2}".format(OPTS.num_rw_ports, + debug.print_raw("RW ports: {0}\nR-only ports: {1}\nW-only ports: {2}".format(OPTS.num_rw_ports, OPTS.num_r_ports, OPTS.num_w_ports)) if OPTS.netlist_only: - print("Netlist only mode (no physical design is being done).") + debug.print_raw("Netlist only mode (no physical design is being done).") if not OPTS.inline_lvsdrc: - print("DRC/LVS/PEX is only run on the top-level design.") + debug.print_raw("DRC/LVS/PEX is only run on the top-level design.") if not OPTS.check_lvsdrc: - print("DRC/LVS/PEX is completely disabled.") + debug.print_raw("DRC/LVS/PEX is completely disabled.") diff --git a/compiler/openram.py b/compiler/openram.py index 78241f6a..0fe3f7cd 100755 --- a/compiler/openram.py +++ b/compiler/openram.py @@ -44,15 +44,16 @@ from sram_config import sram_config # Configure the SRAM organization c = sram_config(word_size=OPTS.word_size, num_words=OPTS.num_words) -print("Words per row: {}".format(c.words_per_row)) +debug.print_raw("Words per row: {}".format(c.words_per_row)) #from parser import * -output_extensions = ["sp","v","lib","py","html"] +output_extensions = ["sp","v","lib","py","html","log"] if not OPTS.netlist_only: output_extensions.extend(["gds","lef"]) output_files = ["{0}{1}.{2}".format(OPTS.output_path,OPTS.output_name,x) for x in output_extensions] -print("Output files are: ") -print(*output_files,sep="\n") +debug.print_raw("Output files are: ") +for path in output_files: + debug.print_raw(path) from sram import sram diff --git a/compiler/sram.py b/compiler/sram.py index 4971de08..a929434e 100644 --- a/compiler/sram.py +++ b/compiler/sram.py @@ -65,21 +65,21 @@ class sram(): # Write the layout start_time = datetime.datetime.now() gdsname = OPTS.output_path + self.s.name + ".gds" - print("GDS: Writing to {0}".format(gdsname)) + debug.print_raw("GDS: Writing to {0}".format(gdsname)) self.gds_write(gdsname) print_time("GDS", datetime.datetime.now(), start_time) # Create a LEF physical model start_time = datetime.datetime.now() lefname = OPTS.output_path + self.s.name + ".lef" - print("LEF: Writing to {0}".format(lefname)) + debug.print_raw("LEF: Writing to {0}".format(lefname)) self.lef_write(lefname) print_time("LEF", datetime.datetime.now(), start_time) # Save the spice file start_time = datetime.datetime.now() spname = OPTS.output_path + self.s.name + ".sp" - print("SP: Writing to {0}".format(spname)) + debug.print_raw("SP: Writing to {0}".format(spname)) self.sp_write(spname) print_time("Spice writing", datetime.datetime.now(), start_time) @@ -98,14 +98,14 @@ class sram(): # Characterize the design start_time = datetime.datetime.now() from characterizer import lib - print("LIB: Characterizing... ") + debug.print_raw("LIB: Characterizing... ") if OPTS.analytical_delay: - print("Using analytical delay models (no characterization)") + debug.print_raw("Using analytical delay models (no characterization)") else: if OPTS.spice_name!="": - print("Performing simulation-based characterization with {}".format(OPTS.spice_name)) + debug.print_raw("Performing simulation-based characterization with {}".format(OPTS.spice_name)) if OPTS.trim_netlist: - print("Trimming netlist to speed up characterization.") + debug.print_raw("Trimming netlist to speed up characterization.") lib(out_dir=OPTS.output_path, sram=self.s, sp_file=sp_file) print_time("Characterization", datetime.datetime.now(), start_time) @@ -114,20 +114,20 @@ class sram(): start_time = datetime.datetime.now() from shutil import copyfile copyfile(OPTS.config_file + '.py', OPTS.output_path + OPTS.output_name + '.py') - print("Config: Writing to {0}".format(OPTS.output_path + OPTS.output_name + '.py')) + debug.print_raw("Config: Writing to {0}".format(OPTS.output_path + OPTS.output_name + '.py')) print_time("Config", datetime.datetime.now(), start_time) # Write the datasheet start_time = datetime.datetime.now() from datasheet_gen import datasheet_gen dname = OPTS.output_path + self.s.name + ".html" - print("Datasheet: Writing to {0}".format(dname)) + debug.print_raw("Datasheet: Writing to {0}".format(dname)) datasheet_gen.datasheet_write(self.s,dname) print_time("Datasheet", datetime.datetime.now(), start_time) # Write a verilog model start_time = datetime.datetime.now() vname = OPTS.output_path + self.s.name + ".v" - print("Verilog: Writing to {0}".format(vname)) + debug.print_raw("Verilog: Writing to {0}".format(vname)) self.verilog_write(vname) print_time("Verilog", datetime.datetime.now(), start_time) From b66c53a99a867cd78ac42f51979a2a318fed1cea Mon Sep 17 00:00:00 2001 From: Jesse Cirimelli-Low Date: Sun, 13 Jan 2019 15:02:13 -0800 Subject: [PATCH 2/8] added log file to datasheet --- compiler/datasheet/datasheet_gen.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/compiler/datasheet/datasheet_gen.py b/compiler/datasheet/datasheet_gen.py index db03216b..9fcf921f 100644 --- a/compiler/datasheet/datasheet_gen.py +++ b/compiler/datasheet/datasheet_gen.py @@ -491,11 +491,12 @@ def parse_characterizer_csv(sram,f,pages): new_sheet.dlv_table.add_row(['.lef','LEF files','{0}.{1}'.format(OPTS.output_name,'lef')]) - new_sheet.dlv_table.add_row(['.sp','SPICE netlists','{0}.{1}'.format(OPTS.output_name,'sp')]) + new_sheet.dlv_table.add_row(['.log','OpenRAM compile log','{0}.{1}'.format(OPTS.output_name,'log')]) new_sheet.dlv_table.add_row(['.v','Verilog simulation models','{0}.{1}'.format(OPTS.output_name,'v')]) new_sheet.dlv_table.add_row(['.html','This datasheet','{0}.{1}'.format(OPTS.output_name,'html')]) new_sheet.dlv_table.add_row(['.lib','Synthesis models','{1}'.format(LIB_NAME,LIB_NAME.replace(OUT_DIR,''))]) new_sheet.dlv_table.add_row(['.py','OpenRAM configuration file','{0}.{1}'.format(OPTS.output_name,'py')]) + new_sheet.dlv_table.add_row(['.sp','SPICE netlists','{0}.{1}'.format(OPTS.output_name,'sp')]) new_sheet.io_table.add_row(['WORD_SIZE',WORD_SIZE]) new_sheet.io_table.add_row(['NUM_WORDS',NUM_WORDS]) From 903cafb3362a940906dc47c399a4189b8ae650e2 Mon Sep 17 00:00:00 2001 From: Jesse Cirimelli-Low Date: Tue, 15 Jan 2019 19:47:48 -0800 Subject: [PATCH 3/8] html parsing finished --- compiler/datasheet/datasheet.py | 54 ++-- compiler/datasheet/datasheet_gen.py | 266 ++++++++++-------- .../datasheet/library_page/lib_table_gen.py | 43 +++ compiler/datasheet/library_page/library.py | 16 ++ .../datasheet/library_page/library_gen.py | 60 ++++ 5 files changed, 287 insertions(+), 152 deletions(-) create mode 100644 compiler/datasheet/library_page/lib_table_gen.py create mode 100644 compiler/datasheet/library_page/library.py create mode 100644 compiler/datasheet/library_page/library_gen.py diff --git a/compiler/datasheet/datasheet.py b/compiler/datasheet/datasheet.py index ce84c22c..566dfc80 100644 --- a/compiler/datasheet/datasheet.py +++ b/compiler/datasheet/datasheet.py @@ -4,59 +4,60 @@ import csv import base64 from globals import OPTS + class datasheet(): """ Defines the layout,but not the data, of the html datasheet """ - def __init__(self,identifier): + + def __init__(self, identifier): self.name = identifier self.html = "" - def generate_html(self): """ Generates html tables using flask-table """ with open(os.path.abspath(os.environ.get("OPENRAM_HOME")) + '/datasheet/assets/datasheet.css', 'r') as datasheet_css: - #css styling is kept in a seperate file + # css styling is kept in a seperate file self.html += datasheet_css.read() - -# with open(OPTS.openram_temp + "/datasheet.info") as info: + +# with open(OPTS.openram_temp + "/datasheet.info") as info: self.html += '' - + self.html += '-->' + vlsi_logo = 0 - with open(os.path.abspath(os.environ.get("OPENRAM_HOME")) + '/datasheet/assets/vlsi_logo.png' , "rb") as image_file: + with open(os.path.abspath(os.environ.get("OPENRAM_HOME")) + '/datasheet/assets/vlsi_logo.png', "rb") as image_file: vlsi_logo = base64.b64encode(image_file.read()) openram_logo = 0 - with open(os.path.abspath(os.environ.get("OPENRAM_HOME")) + '/datasheet/assets/openram_logo_placeholder.png' , "rb") as image_file: + with open(os.path.abspath(os.environ.get("OPENRAM_HOME")) + '/datasheet/assets/openram_logo_placeholder.png', "rb") as image_file: openram_logo = base64.b64encode(image_file.read()) + self.html += 'VLSIDA'.format(str(vlsi_logo)[ + 2:-1]) - self.html += 'VLSIDA'.format(str(vlsi_logo)[2:-1]) - + self.html += '

' + \ + self.name + '.html' + '

' + self.html += '

Compiled at: ' + self.time + '

' + self.html += '

' + \ + 'DRC errors: ' + str(self.DRC) + '

' + self.html += '

' + \ + 'LVS errors: ' + str(self.LVS) + '

' + self.html += '

' + \ + 'Git commit id: ' + str(self.git_id) + '

' - - - - self.html +='

'+ self.name + '.html' + '

' - self.html +='

Compiled at: '+ self.time + '

' - self.html +='

'+ 'DRC errors: ' + str(self.DRC) + '

' - self.html +='

'+ 'LVS errors: ' + str(self.LVS) + '

' - self.html += '

'+ 'Git commit id: ' + str(self.git_id) + '

' - - self.html +='

Ports and Configuration

' + self.html += '

Ports and Configuration

' # self.html += in_out(self.io,table_id='data').__html__().replace('<','<').replace('"','"').replace('>',">") self.html += self.io_table.to_html() - - self.html +='

Operating Conditions

' + + self.html += '

Operating Conditions

' # self.html += operating_conditions(self.operating,table_id='data').__html__() self.html += self.operating_table.to_html() @@ -68,9 +69,6 @@ class datasheet(): # self.html += characterization_corners(self.corners,table_id='data').__html__() self.html += self.corners_table.to_html() - self.html +='

Deliverables

' + self.html += '

Deliverables

' # self.html += deliverables(self.dlv,table_id='data').__html__().replace('<','<').replace('"','"').replace('>',">") self.html += self.dlv_table.to_html() - - - diff --git a/compiler/datasheet/datasheet_gen.py b/compiler/datasheet/datasheet_gen.py index 9fcf921f..93ab783c 100644 --- a/compiler/datasheet/datasheet_gen.py +++ b/compiler/datasheet/datasheet_gen.py @@ -1,21 +1,21 @@ #!/usr/bin/env python3 """ -This is a script to load data from the characterization and layout processes into +This is a script to load data from the characterization and layout processes into a web friendly html datasheet. """ -#TODO: -#include log file -#Diagram generation -#Improve css +# TODO: +# include log file +# Diagram generation +# Improve css -import debug from globals import OPTS -import os, math -import optparse +import os +import math import csv -from datasheet import * -from table_gen import * +import datasheet +import table_gen + def process_name(corner): """ @@ -30,20 +30,20 @@ def process_name(corner): else: return "custom" -def parse_characterizer_csv(sram,f,pages): + +def parse_characterizer_csv(sram, f, pages): """ Parses output data of the Liberty file generator in order to construct the timing and current table """ with open(f) as csv_file: csv_reader = csv.reader(csv_file, delimiter=',') - line_count = 0 for row in csv_reader: found = 0 col = 0 - #defines layout of csv file + # defines layout of csv file NAME = row[col] col += 1 @@ -85,29 +85,28 @@ def parse_characterizer_csv(sram,f,pages): WORD_SIZE = row[col] col += 1 - + ORIGIN_ID = row[col] col += 1 DATETIME = row[col] - col+= 1 + col += 1 DRC = row[col] col += 1 LVS = row[col] col += 1 - - for sheet in pages: + for sheet in pages: if sheet.name == NAME: found = 1 - #if the .lib information is for an existing datasheet compare timing data + # if the .lib information is for an existing datasheet compare timing data for item in sheet.operating_table.rows: - #check if the new corner data is worse than the previous worse corner data + # check if the new corner data is worse than the previous worse corner data if item[0] == 'Operating Temperature': if float(TEMP) > float(item[3]): @@ -128,14 +127,13 @@ def parse_characterizer_csv(sram,f,pages): if item[0] == 'Operating Frequncy (F)': try: if float(math.floor(1000/float(MIN_PERIOD)) < float(item[3])): - item[3] = str(math.floor(1000/float(MIN_PERIOD))) + item[3] = str(math.floor( + 1000/float(MIN_PERIOD))) except Exception: pass - while(True): - if(row[col].startswith('DIN')): start = col for item in sheet.timing_table.rows: @@ -253,7 +251,6 @@ def parse_characterizer_csv(sram,f,pages): col += 1 - elif(row[col].startswith('WEb')): start = col for item in sheet.timing_table.rows: @@ -293,7 +290,6 @@ def parse_characterizer_csv(sram,f,pages): col += 1 - elif(row[col].startswith('ADDR')): start = col for item in sheet.timing_table.rows: @@ -333,198 +329,220 @@ def parse_characterizer_csv(sram,f,pages): col += 1 - - else: break - - new_sheet.corners_table.add_row([PROC,process_name(PROC),VOLT,TEMP,LIB_NAME.replace(OUT_DIR,'').replace(NAME,'')]) - new_sheet.dlv_table.add_row(['.lib','Synthesis models','{1}'.format(LIB_NAME,LIB_NAME.replace(OUT_DIR,''))]) + datasheet.new_sheet.corners_table.add_row([PROC, process_name( + PROC), VOLT, TEMP, LIB_NAME.replace(OUT_DIR, '').replace(NAME, '')]) + datasheet.new_sheet.dlv_table.add_row( + ['.lib', 'Synthesis models', '{1}'.format(LIB_NAME, LIB_NAME.replace(OUT_DIR, ''))]) if found == 0: - - #if this is the first corner for this sram, run first time configuration and set up tables - new_sheet = datasheet(NAME) + + # if this is the first corner for this sram, run first time configuration and set up tables + new_sheet = datasheet.datasheet(NAME) pages.append(new_sheet) new_sheet.git_id = ORIGIN_ID new_sheet.time = DATETIME new_sheet.DRC = DRC new_sheet.LVS = LVS - new_sheet.description = [NAME, NUM_WORDS, NUM_BANKS, NUM_RW_PORTS, NUM_W_PORTS, NUM_R_PORTS, TECH_NAME, WORD_SIZE, ORIGIN_ID, DATETIME] + new_sheet.description = [NAME, NUM_WORDS, NUM_BANKS, NUM_RW_PORTS, + NUM_W_PORTS, NUM_R_PORTS, TECH_NAME, WORD_SIZE, ORIGIN_ID, DATETIME] - new_sheet.corners_table = table_gen("corners") - new_sheet.corners_table.add_row(['Corner Name','Process','Power Supply','Temperature','Library Name Suffix']) - new_sheet.corners_table.add_row([PROC,process_name(PROC),VOLT,TEMP,LIB_NAME.replace(OUT_DIR,'').replace(NAME,'')]) - new_sheet.operating_table = table_gen("operating_table") - new_sheet.operating_table.add_row(['Parameter','Min','Typ','Max','Units']) - new_sheet.operating_table.add_row(['Power supply (VDD) range',VOLT,VOLT,VOLT,'Volts']) - new_sheet.operating_table.add_row(['Operating Temperature',TEMP,TEMP,TEMP,'Celsius']) + new_sheet.corners_table = table_gen.table_gen("corners") + new_sheet.corners_table.add_row( + ['Corner Name', 'Process', 'Power Supply', 'Temperature', 'Library Name Suffix']) + new_sheet.corners_table.add_row([PROC, process_name( + PROC), VOLT, TEMP, LIB_NAME.replace(OUT_DIR, '').replace(NAME, '')]) + new_sheet.operating_table = table_gen.table_gen("operating_table") + new_sheet.operating_table.add_row( + ['Parameter', 'Min', 'Typ', 'Max', 'Units']) + new_sheet.operating_table.add_row( + ['Power supply (VDD) range', VOLT, VOLT, VOLT, 'Volts']) + new_sheet.operating_table.add_row( + ['Operating Temperature', TEMP, TEMP, TEMP, 'Celsius']) try: - new_sheet.operating_table.add_row(['Operating Frequency (F)','','',str(math.floor(1000/float(MIN_PERIOD))),'MHz']) + new_sheet.operating_table.add_row(['Operating Frequency (F)', '', '', str( + math.floor(1000/float(MIN_PERIOD))), 'MHz']) except Exception: - new_sheet.operating_table.add_row(['Operating Frequency (F)','','',"not available in netlist only",'MHz']) #failed to provide non-zero MIN_PERIOD - new_sheet.timing_table = table_gen("timing") - new_sheet.timing_table.add_row(['Parameter','Min','Max','Units']) + # failed to provide non-zero MIN_PERIOD + new_sheet.operating_table.add_row( + ['Operating Frequency (F)', '', '', "not available in netlist only", 'MHz']) + new_sheet.timing_table = table_gen.table_gen("timing") + new_sheet.timing_table.add_row( + ['Parameter', 'Min', 'Max', 'Units']) while(True): if(row[col].startswith('DIN')): start = col - - new_sheet.timing_table.add_row(['{0} setup rising'.format(row[start]),row[col+1],row[col+2],'ns']) + + new_sheet.timing_table.add_row( + ['{0} setup rising'.format(row[start]), row[col+1], row[col+2], 'ns']) col += 2 - - new_sheet.timing_table.add_row(['{0} setup falling'.format(row[start]),row[col+1],row[col+2],'ns']) - - col += 2 - - new_sheet.timing_table.add_row(['{0} hold rising'.format(row[start]),row[col+1],row[col+2],'ns']) + new_sheet.timing_table.add_row( + ['{0} setup falling'.format(row[start]), row[col+1], row[col+2], 'ns']) col += 2 - new_sheet.timing_table.add_row(['{0} hold falling'.format(row[start]),row[col+1],row[col+2],'ns']) + new_sheet.timing_table.add_row( + ['{0} hold rising'.format(row[start]), row[col+1], row[col+2], 'ns']) col += 2 - col +=1 + new_sheet.timing_table.add_row( + ['{0} hold falling'.format(row[start]), row[col+1], row[col+2], 'ns']) + + col += 2 + + col += 1 elif(row[col].startswith('DOUT')): start = col - - new_sheet.timing_table.add_row(['{0} cell rise'.format(row[start]),row[col+1],row[col+2],'ns']) + + new_sheet.timing_table.add_row( + ['{0} cell rise'.format(row[start]), row[col+1], row[col+2], 'ns']) col += 2 - - new_sheet.timing_table.add_row(['{0} cell fall'.format(row[start]),row[col+1],row[col+2],'ns']) - - col += 2 - - new_sheet.timing_table.add_row(['{0} rise transition'.format(row[start]),row[col+1],row[col+2],'ns']) + new_sheet.timing_table.add_row( + ['{0} cell fall'.format(row[start]), row[col+1], row[col+2], 'ns']) col += 2 - new_sheet.timing_table.add_row(['{0} fall transition'.format(row[start]),row[col+1],row[col+2],'ns']) + new_sheet.timing_table.add_row( + ['{0} rise transition'.format(row[start]), row[col+1], row[col+2], 'ns']) col += 2 - col +=1 + new_sheet.timing_table.add_row( + ['{0} fall transition'.format(row[start]), row[col+1], row[col+2], 'ns']) + + col += 2 + + col += 1 elif(row[col].startswith('CSb')): start = col - - new_sheet.timing_table.add_row(['{0} setup rising'.format(row[start]),row[col+1],row[col+2],'ns']) + + new_sheet.timing_table.add_row( + ['{0} setup rising'.format(row[start]), row[col+1], row[col+2], 'ns']) col += 2 - - new_sheet.timing_table.add_row(['{0} setup falling'.format(row[start]),row[col+1],row[col+2],'ns']) - - col += 2 - - new_sheet.timing_table.add_row(['{0} hold rising'.format(row[start]),row[col+1],row[col+2],'ns']) + new_sheet.timing_table.add_row( + ['{0} setup falling'.format(row[start]), row[col+1], row[col+2], 'ns']) col += 2 - new_sheet.timing_table.add_row(['{0} hold falling'.format(row[start]),row[col+1],row[col+2],'ns']) + new_sheet.timing_table.add_row( + ['{0} hold rising'.format(row[start]), row[col+1], row[col+2], 'ns']) col += 2 - col +=1 + new_sheet.timing_table.add_row( + ['{0} hold falling'.format(row[start]), row[col+1], row[col+2], 'ns']) + + col += 2 + + col += 1 elif(row[col].startswith('WEb')): start = col - - new_sheet.timing_table.add_row(['{0} setup rising'.format(row[start]),row[col+1],row[col+2],'ns']) + + new_sheet.timing_table.add_row( + ['{0} setup rising'.format(row[start]), row[col+1], row[col+2], 'ns']) col += 2 - - new_sheet.timing_table.add_row(['{0} setup falling'.format(row[start]),row[col+1],row[col+2],'ns']) - - col += 2 - - new_sheet.timing_table.add_row(['{0} hold rising'.format(row[start]),row[col+1],row[col+2],'ns']) + new_sheet.timing_table.add_row( + ['{0} setup falling'.format(row[start]), row[col+1], row[col+2], 'ns']) col += 2 - new_sheet.timing_table.add_row(['{0} hold falling'.format(row[start]),row[col+1],row[col+2],'ns']) + new_sheet.timing_table.add_row( + ['{0} hold rising'.format(row[start]), row[col+1], row[col+2], 'ns']) col += 2 - col +=1 + new_sheet.timing_table.add_row( + ['{0} hold falling'.format(row[start]), row[col+1], row[col+2], 'ns']) + + col += 2 + + col += 1 elif(row[col].startswith('ADDR')): start = col - - new_sheet.timing_table.add_row(['{0} setup rising'.format(row[start]),row[col+1],row[col+2],'ns']) + + new_sheet.timing_table.add_row( + ['{0} setup rising'.format(row[start]), row[col+1], row[col+2], 'ns']) col += 2 - - new_sheet.timing_table.add_row(['{0} setup falling'.format(row[start]),row[col+1],row[col+2],'ns']) - - col += 2 - - new_sheet.timing_table.add_row(['{0} hold rising'.format(row[start]),row[col+1],row[col+2],'ns']) + new_sheet.timing_table.add_row( + ['{0} setup falling'.format(row[start]), row[col+1], row[col+2], 'ns']) col += 2 - new_sheet.timing_table.add_row(['{0} hold falling'.format(row[start]),row[col+1],row[col+2],'ns']) + new_sheet.timing_table.add_row( + ['{0} hold rising'.format(row[start]), row[col+1], row[col+2], 'ns']) col += 2 - col +=1 + new_sheet.timing_table.add_row( + ['{0} hold falling'.format(row[start]), row[col+1], row[col+2], 'ns']) + + col += 2 + + col += 1 else: break + new_sheet.dlv_table = table_gen.table_gen("dlv") + new_sheet.dlv_table.add_row(['Type', 'Description', 'Link']) - - new_sheet.dlv_table = table_gen("dlv") - new_sheet.dlv_table.add_row(['Type','Description','Link']) - - new_sheet.io_table = table_gen("io") + new_sheet.io_table = table_gen.table_gen("io") new_sheet.io_table.add_row(['Type', 'Value']) if not OPTS.netlist_only: - #physical layout files should not be generated in netlist only mode - new_sheet.dlv_table.add_row(['.gds','GDSII layout views','{0}.{1}'.format(OPTS.output_name,'gds')]) - new_sheet.dlv_table.add_row(['.lef','LEF files','{0}.{1}'.format(OPTS.output_name,'lef')]) - - - new_sheet.dlv_table.add_row(['.log','OpenRAM compile log','{0}.{1}'.format(OPTS.output_name,'log')]) - new_sheet.dlv_table.add_row(['.v','Verilog simulation models','{0}.{1}'.format(OPTS.output_name,'v')]) - new_sheet.dlv_table.add_row(['.html','This datasheet','{0}.{1}'.format(OPTS.output_name,'html')]) - new_sheet.dlv_table.add_row(['.lib','Synthesis models','{1}'.format(LIB_NAME,LIB_NAME.replace(OUT_DIR,''))]) - new_sheet.dlv_table.add_row(['.py','OpenRAM configuration file','{0}.{1}'.format(OPTS.output_name,'py')]) - new_sheet.dlv_table.add_row(['.sp','SPICE netlists','{0}.{1}'.format(OPTS.output_name,'sp')]) - - new_sheet.io_table.add_row(['WORD_SIZE',WORD_SIZE]) - new_sheet.io_table.add_row(['NUM_WORDS',NUM_WORDS]) - new_sheet.io_table.add_row(['NUM_BANKS',NUM_BANKS]) - new_sheet.io_table.add_row(['NUM_RW_PORTS',NUM_RW_PORTS]) - new_sheet.io_table.add_row(['NUM_R_PORTS',NUM_R_PORTS]) - new_sheet.io_table.add_row(['NUM_W_PORTS',NUM_W_PORTS]) - new_sheet.io_table.add_row(['Area',sram.width * sram.height]) - - + # physical layout files should not be generated in netlist only mode + new_sheet.dlv_table.add_row( + ['.gds', 'GDSII layout views', '{0}.{1}'.format(OPTS.output_name, 'gds')]) + new_sheet.dlv_table.add_row( + ['.lef', 'LEF files', '{0}.{1}'.format(OPTS.output_name, 'lef')]) + new_sheet.dlv_table.add_row( + ['.log', 'OpenRAM compile log', '{0}.{1}'.format(OPTS.output_name, 'log')]) + new_sheet.dlv_table.add_row( + ['.v', 'Verilog simulation models', '{0}.{1}'.format(OPTS.output_name, 'v')]) + new_sheet.dlv_table.add_row( + ['.html', 'This datasheet', '{0}.{1}'.format(OPTS.output_name, 'html')]) + new_sheet.dlv_table.add_row( + ['.lib', 'Synthesis models', '{1}'.format(LIB_NAME, LIB_NAME.replace(OUT_DIR, ''))]) + new_sheet.dlv_table.add_row( + ['.py', 'OpenRAM configuration file', '{0}.{1}'.format(OPTS.output_name, 'py')]) + new_sheet.dlv_table.add_row( + ['.sp', 'SPICE netlists', '{0}.{1}'.format(OPTS.output_name, 'sp')]) + new_sheet.io_table.add_row(['WORD_SIZE', WORD_SIZE]) + new_sheet.io_table.add_row(['NUM_WORDS', NUM_WORDS]) + new_sheet.io_table.add_row(['NUM_BANKS', NUM_BANKS]) + new_sheet.io_table.add_row(['NUM_RW_PORTS', NUM_RW_PORTS]) + new_sheet.io_table.add_row(['NUM_R_PORTS', NUM_R_PORTS]) + new_sheet.io_table.add_row(['NUM_W_PORTS', NUM_W_PORTS]) + new_sheet.io_table.add_row(['Area', sram.width * sram.height]) class datasheet_gen(): - def datasheet_write(sram,name): - + def datasheet_write(sram, name): in_dir = OPTS.openram_temp if not (os.path.isdir(in_dir)): os.mkdir(in_dir) - datasheets = [] parse_characterizer_csv(sram, in_dir + "/datasheet.info", datasheets) - for sheets in datasheets: with open(name, 'w+') as f: sheets.generate_html() diff --git a/compiler/datasheet/library_page/lib_table_gen.py b/compiler/datasheet/library_page/lib_table_gen.py new file mode 100644 index 00000000..c35d09c0 --- /dev/null +++ b/compiler/datasheet/library_page/lib_table_gen.py @@ -0,0 +1,43 @@ +class table_gen: + def __init__(self, name): + self.name = name + self.rows = [] + self.table_id = 'data' + + def add_row(self, row): + self.rows.append(row) + + def gen_table_head(self): + html = '' + + html += '' + html += '' + for col in self.rows[0]: + html += '' + str(col) + '' + html += '' + html += '' + return html + + def gen_table_body(self): + html = '' + + html += '' + html += '' + for row in self.rows[1:]: + html += '' + for col in row: + html += '' + str(col) + '' + html += '' + html += '' + html += '' + return html + + def to_html(self): + + html = '' + html += '' + html += self.gen_table_head() + html += self.gen_table_body() + html += '
' + + return html diff --git a/compiler/datasheet/library_page/library.py b/compiler/datasheet/library_page/library.py new file mode 100644 index 00000000..d23d4a75 --- /dev/null +++ b/compiler/datasheet/library_page/library.py @@ -0,0 +1,16 @@ +import os +import base64 + + +class library(): + + def __init__(self): + self.html = '' + + def generate_html(self): + vlsi_logo = 0 + with open(os.path.abspath(os.environ.get("OPENRAM_HOME")) + '/datasheet/assets/vlsi_logo.png', "rb") as image_file: + vlsi_logo = base64.b64encode(image_file.read()) + + self.html += 'VLSIDA'.format(str(vlsi_logo)[ + 2:-1]) diff --git a/compiler/datasheet/library_page/library_gen.py b/compiler/datasheet/library_page/library_gen.py new file mode 100644 index 00000000..ca7f631d --- /dev/null +++ b/compiler/datasheet/library_page/library_gen.py @@ -0,0 +1,60 @@ +import library +import csv + + +class library_item(): + def __init__(self): + self.comment = '' + self.word_size = '' + self.num_words = '' + self.num_banks = '' + self.num_rw_ports = '' + self.num_r_ports = '' + self.num_w_ports = '' + self.Area = '' + self.git_id = '' + self.technology = '' + self.min_op = '' + + +class library_gen(): + def library_write(name): + with open(name, 'w+') as f: + library_page.generate_html() + f.write(library_page.html) + + def search_file(file, name): + length = len(name) + part = file.read(length) + i = 0 + while True: + if part == name: + break + char = file.read(1) + if not char: + return + part = part[1:] + char + i += 1 + return i + + def parse_html(file): + item = library_item() + start_tag = '' + + with open(file, 'r') as f: + start_byte = library_gen.search_file(f, start_tag) + len(start_tag) + end_byte = library_gen.search_file(f, end_tag) + start_byte + + f.seek(start_byte) + item.comment = f.read(end_byte - start_byte) + print(item.comment) + return item + + def parse_comment(comment, item): + + pass + + +library_page = library.library() +library_gen.parse_html('../../temp/sram_2_16_scn4m_subm.html') From 813a551691fd6e98f09e3f38184ca7b0c528b2e4 Mon Sep 17 00:00:00 2001 From: Jesse Cirimelli-Low Date: Tue, 15 Jan 2019 20:48:20 -0800 Subject: [PATCH 4/8] comment parsing 1/2 complete; page gen setup complete --- compiler/datasheet/datasheet.py | 12 ++- compiler/datasheet/library_page/library.py | 2 +- .../datasheet/library_page/library_gen.py | 87 ++++++++++++++++--- 3 files changed, 83 insertions(+), 18 deletions(-) diff --git a/compiler/datasheet/datasheet.py b/compiler/datasheet/datasheet.py index 566dfc80..22a50cb9 100644 --- a/compiler/datasheet/datasheet.py +++ b/compiler/datasheet/datasheet.py @@ -1,6 +1,5 @@ from table_gen import * import os -import csv import base64 from globals import OPTS @@ -22,13 +21,12 @@ class datasheet(): # css styling is kept in a seperate file self.html += datasheet_css.read() - -# with open(OPTS.openram_temp + "/datasheet.info") as info: + with open(OPTS.openram_temp + "/datasheet.info") as info: self.html += '' diff --git a/compiler/datasheet/library_page/library.py b/compiler/datasheet/library_page/library.py index d23d4a75..3477164c 100644 --- a/compiler/datasheet/library_page/library.py +++ b/compiler/datasheet/library_page/library.py @@ -7,7 +7,7 @@ class library(): def __init__(self): self.html = '' - def generate_html(self): + def generate_html(self,book): vlsi_logo = 0 with open(os.path.abspath(os.environ.get("OPENRAM_HOME")) + '/datasheet/assets/vlsi_logo.png', "rb") as image_file: vlsi_logo = base64.b64encode(image_file.read()) diff --git a/compiler/datasheet/library_page/library_gen.py b/compiler/datasheet/library_page/library_gen.py index ca7f631d..ca5942f7 100644 --- a/compiler/datasheet/library_page/library_gen.py +++ b/compiler/datasheet/library_page/library_gen.py @@ -1,9 +1,10 @@ import library -import csv +from pathlib import Path class library_item(): def __init__(self): + self.name = '' self.comment = '' self.word_size = '' self.num_words = '' @@ -13,14 +14,15 @@ class library_item(): self.num_w_ports = '' self.Area = '' self.git_id = '' - self.technology = '' - self.min_op = '' + self.tech_name = '' + self.min_period = '' + self.datetime = '' class library_gen(): - def library_write(name): + def library_write(name, book): with open(name, 'w+') as f: - library_page.generate_html() + library_page.generate_html(book) f.write(library_page.html) def search_file(file, name): @@ -37,6 +39,66 @@ class library_gen(): i += 1 return i + def parse_comment(item): + row = item.comment.split(',') + print(row) + found = 0 + col = 0 + + item.name = row[col] + col += 1 + + item.num_words = row[col] + col += 1 + + item.num_banks = row[col] + col += 1 + + item.num_rw_ports = row[col] + col += 1 + + item.num_w_port = row[col] + col += 1 + + item.num_r_ports = row[col] + col += 1 + + item.tech_name = row[col] + col += 1 + print(item.tech_name) +# TEMP = row[col] + col += 1 + +# VOLT = row[col] + col += 1 + +# PROC = row[col] + col += 1 + + item.min_period = row[col] + col += 1 + print(item.min_period) +# OUT_DIR = row[col] + col += 1 + +# LIB_NAME = row[col] + col += 1 + + item.word_size = row[col] + col += 1 + + item.git_id = row[col] + col += 1 + + item.datetime = row[col] + col += 1 + +# DRC = row[col] + col += 1 + +# LVS = row[col] + col += 1 + def parse_html(file): item = library_item() start_tag = '' - - with open(file, 'r') as f: - start_byte = library_gen.search_file(f, start_tag) + len(start_tag) - end_byte = library_gen.search_file(f, end_tag) + start_byte - - f.seek(start_byte) - item.comment = f.read(end_byte - start_byte) - library_gen.parse_comment(item) - - return item - - def get_file_tree(path): - return list(Path(path).rglob("*.html")) - - -datasheet_list = library_gen.get_file_tree('./deliverables') -print(datasheet_list) -library_page = library.library() -book = [] -for datasheet in datasheet_list: - book.append(library_gen.parse_html(datasheet)) -library_gen.library_write('index.html', book) From 0556b864245e15d799bf7d7227138d1db2c34381 Mon Sep 17 00:00:00 2001 From: Jesse Cirimelli-Low Date: Wed, 16 Jan 2019 14:52:01 -0800 Subject: [PATCH 6/8] html datasheet no longer dependeds on sram --- compiler/characterizer/lib.py | 4 ++-- compiler/datasheet/datasheet_gen.py | 12 +++++++----- compiler/sram.py | 2 +- 3 files changed, 10 insertions(+), 8 deletions(-) diff --git a/compiler/characterizer/lib.py b/compiler/characterizer/lib.py index 7b10eb8f..156481ce 100644 --- a/compiler/characterizer/lib.py +++ b/compiler/characterizer/lib.py @@ -530,7 +530,7 @@ class lib: "sram_{0}_{1}_{2}".format(OPTS.word_size, OPTS.num_words, OPTS.tech_name), OPTS.num_words, OPTS.num_banks, - OPTS.num_rw_ports, + OPTS.num_rw_ports, OPTS.num_w_ports, OPTS.num_r_ports, OPTS.tech_name, @@ -555,7 +555,7 @@ class lib: LVS = str(total_lvs_errors) datasheet.write("{0},{1},".format(DRC, LVS)) - + datasheet.write(str(self.sram.width * self.sram.height)+',') for port in self.all_ports: #DIN timings if port in self.write_ports: diff --git a/compiler/datasheet/datasheet_gen.py b/compiler/datasheet/datasheet_gen.py index 93ab783c..902c058c 100644 --- a/compiler/datasheet/datasheet_gen.py +++ b/compiler/datasheet/datasheet_gen.py @@ -4,7 +4,7 @@ This is a script to load data from the characterization and layout processes int a web friendly html datasheet. """ # TODO: -# include log file +# include power # Diagram generation # Improve css @@ -31,7 +31,7 @@ def process_name(corner): return "custom" -def parse_characterizer_csv(sram, f, pages): +def parse_characterizer_csv(f, pages): """ Parses output data of the Liberty file generator in order to construct the timing and current table @@ -98,6 +98,8 @@ def parse_characterizer_csv(sram, f, pages): LVS = row[col] col += 1 + AREA = row[col] + col += 1 for sheet in pages: if sheet.name == NAME: @@ -529,11 +531,11 @@ def parse_characterizer_csv(sram, f, pages): new_sheet.io_table.add_row(['NUM_RW_PORTS', NUM_RW_PORTS]) new_sheet.io_table.add_row(['NUM_R_PORTS', NUM_R_PORTS]) new_sheet.io_table.add_row(['NUM_W_PORTS', NUM_W_PORTS]) - new_sheet.io_table.add_row(['Area', sram.width * sram.height]) + new_sheet.io_table.add_row(['Area', AREA]) class datasheet_gen(): - def datasheet_write(sram, name): + def datasheet_write(name): in_dir = OPTS.openram_temp @@ -541,7 +543,7 @@ class datasheet_gen(): os.mkdir(in_dir) datasheets = [] - parse_characterizer_csv(sram, in_dir + "/datasheet.info", datasheets) + parse_characterizer_csv(in_dir + "/datasheet.info", datasheets) for sheets in datasheets: with open(name, 'w+') as f: diff --git a/compiler/sram.py b/compiler/sram.py index a929434e..5ff28d47 100644 --- a/compiler/sram.py +++ b/compiler/sram.py @@ -122,7 +122,7 @@ class sram(): from datasheet_gen import datasheet_gen dname = OPTS.output_path + self.s.name + ".html" debug.print_raw("Datasheet: Writing to {0}".format(dname)) - datasheet_gen.datasheet_write(self.s,dname) + datasheet_gen.datasheet_write(dname) print_time("Datasheet", datetime.datetime.now(), start_time) # Write a verilog model From 41b8e8665b168ec93b3b2f746cb8806834745ea5 Mon Sep 17 00:00:00 2001 From: Jesse Cirimelli-Low Date: Wed, 16 Jan 2019 15:43:08 -0800 Subject: [PATCH 7/8] updated datasheet descriptors --- compiler/datasheet/datasheet_gen.py | 18 ++++++++++++------ 1 file changed, 12 insertions(+), 6 deletions(-) diff --git a/compiler/datasheet/datasheet_gen.py b/compiler/datasheet/datasheet_gen.py index 902c058c..6786c7bf 100644 --- a/compiler/datasheet/datasheet_gen.py +++ b/compiler/datasheet/datasheet_gen.py @@ -135,7 +135,7 @@ def parse_characterizer_csv(f, pages): pass while(True): - + col_start = col if(row[col].startswith('DIN')): start = col for item in sheet.timing_table.rows: @@ -332,11 +332,13 @@ def parse_characterizer_csv(f, pages): col += 1 else: + for element in row[col_start: col - 1]: + sheet.description.append(str(element)) break - datasheet.new_sheet.corners_table.add_row([PROC, process_name( + new_sheet.corners_table.add_row([PROC, process_name( PROC), VOLT, TEMP, LIB_NAME.replace(OUT_DIR, '').replace(NAME, '')]) - datasheet.new_sheet.dlv_table.add_row( + new_sheet.dlv_table.add_row( ['.lib', 'Synthesis models', '{1}'.format(LIB_NAME, LIB_NAME.replace(OUT_DIR, ''))]) if found == 0: @@ -349,15 +351,16 @@ def parse_characterizer_csv(f, pages): new_sheet.time = DATETIME new_sheet.DRC = DRC new_sheet.LVS = LVS - new_sheet.description = [NAME, NUM_WORDS, NUM_BANKS, NUM_RW_PORTS, - NUM_W_PORTS, NUM_R_PORTS, TECH_NAME, WORD_SIZE, ORIGIN_ID, DATETIME] + new_sheet.description = [NAME, NUM_WORDS, NUM_BANKS, NUM_RW_PORTS, NUM_W_PORTS, + NUM_R_PORTS, TECH_NAME, MIN_PERIOD, WORD_SIZE, ORIGIN_ID, DATETIME] new_sheet.corners_table = table_gen.table_gen("corners") new_sheet.corners_table.add_row( ['Corner Name', 'Process', 'Power Supply', 'Temperature', 'Library Name Suffix']) new_sheet.corners_table.add_row([PROC, process_name( PROC), VOLT, TEMP, LIB_NAME.replace(OUT_DIR, '').replace(NAME, '')]) - new_sheet.operating_table = table_gen.table_gen("operating_table") + new_sheet.operating_table = table_gen.table_gen( + "operating_table") new_sheet.operating_table.add_row( ['Parameter', 'Min', 'Typ', 'Max', 'Units']) new_sheet.operating_table.add_row( @@ -376,6 +379,7 @@ def parse_characterizer_csv(f, pages): new_sheet.timing_table.add_row( ['Parameter', 'Min', 'Max', 'Units']) while(True): + col_start = col if(row[col].startswith('DIN')): start = col @@ -497,6 +501,8 @@ def parse_characterizer_csv(f, pages): col += 1 else: + for element in row[col_start:col-1]: + sheet.description.append(str(element)) break new_sheet.dlv_table = table_gen.table_gen("dlv") From 25b0da404f6408ec435c9bdd86a348c2f04793bd Mon Sep 17 00:00:00 2001 From: Jesse Cirimelli-Low Date: Wed, 16 Jan 2019 16:08:41 -0800 Subject: [PATCH 8/8] removed EOL error in comment --- compiler/datasheet/datasheet.py | 1 - 1 file changed, 1 deletion(-) diff --git a/compiler/datasheet/datasheet.py b/compiler/datasheet/datasheet.py index 22a50cb9..d15733d5 100644 --- a/compiler/datasheet/datasheet.py +++ b/compiler/datasheet/datasheet.py @@ -27,7 +27,6 @@ class datasheet(): self.html += row # for item in self.description: # self.html += item + ',' - self.html += 'EOL' self.html += '-->' vlsi_logo = 0