From 87380a4801d4c73341b64dadd0d100d178bb2199 Mon Sep 17 00:00:00 2001 From: Jesse Cirimelli-Low Date: Sun, 13 Jan 2019 14:34:46 -0800 Subject: [PATCH 01/13] complete log file generation --- compiler/debug.py | 31 +++++++++++++++++++++++++++++ compiler/globals.py | 48 ++++++++++++++++++++++----------------------- compiler/openram.py | 9 +++++---- compiler/sram.py | 20 +++++++++---------- 4 files changed, 70 insertions(+), 38 deletions(-) diff --git a/compiler/debug.py b/compiler/debug.py index 1bf46db0..ea5eb45c 100644 --- a/compiler/debug.py +++ b/compiler/debug.py @@ -14,20 +14,51 @@ def check(check,str): index) = inspect.getouterframes(inspect.currentframe())[1] if not check: sys.stderr.write("ERROR: file {0}: line {1}: {2}\n".format(os.path.basename(filename),line_number,str)) + log("ERROR: file {0}: line {1}: {2}\n".format(os.path.basename(filename),line_number,str)) + assert 0 def error(str,return_value=0): (frame, filename, line_number, function_name, lines, index) = inspect.getouterframes(inspect.currentframe())[1] sys.stderr.write("ERROR: file {0}: line {1}: {2}\n".format(os.path.basename(filename),line_number,str)) + log("ERROR: file {0}: line {1}: {2}\n".format(os.path.basename(filename),line_number,str)) + assert return_value==0 def warning(str): (frame, filename, line_number, function_name, lines, index) = inspect.getouterframes(inspect.currentframe())[1] sys.stderr.write("WARNING: file {0}: line {1}: {2}\n".format(os.path.basename(filename),line_number,str)) + log("WARNING: file {0}: line {1}: {2}\n".format(os.path.basename(filename),line_number,str)) + +def print_raw(str): + print(str) + log(str) + + +def log(str): + try: + if log.create_file: + compile_log = open(globals.OPTS.output_path + globals.OPTS.output_name + '.log',"w+") + log.create_file = 0 + else: + compile_log = open(globals.OPTS.output_path + globals.OPTS.output_name + '.log',"a") + + if len(log.setup_output) != 0: + for line in log.setup_output: + compile_log.write(line) + log.setup_output = [] + compile_log.write(str + '\n') + except: + log.setup_out.append(str + "\n") + +#use a static list of strings to store messages until the global paths are set up +log.setup_output = [] +log.create_file = 1 + def info(lev, str): from globals import OPTS if (OPTS.debug_level >= lev): diff --git a/compiler/globals.py b/compiler/globals.py index a6360e24..f5c36551 100644 --- a/compiler/globals.py +++ b/compiler/globals.py @@ -71,26 +71,26 @@ def print_banner(): if OPTS.is_unit_test: return - print("|==============================================================================|") + debug.print_raw("|==============================================================================|") name = "OpenRAM Compiler" - print("|=========" + name.center(60) + "=========|") - print("|=========" + " ".center(60) + "=========|") - print("|=========" + "VLSI Design and Automation Lab".center(60) + "=========|") - print("|=========" + "Computer Science and Engineering Department".center(60) + "=========|") - print("|=========" + "University of California Santa Cruz".center(60) + "=========|") - print("|=========" + " ".center(60) + "=========|") - print("|=========" + "VLSI Computer Architecture Research Group".center(60) + "=========|") - print("|=========" + "Electrical and Computer Engineering Department".center(60) + "=========|") - print("|=========" + "Oklahoma State University".center(60) + "=========|") - print("|=========" + " ".center(60) + "=========|") + debug.print_raw("|=========" + name.center(60) + "=========|") + debug.print_raw("|=========" + " ".center(60) + "=========|") + debug.print_raw("|=========" + "VLSI Design and Automation Lab".center(60) + "=========|") + debug.print_raw("|=========" + "Computer Science and Engineering Department".center(60) + "=========|") + debug.print_raw("|=========" + "University of California Santa Cruz".center(60) + "=========|") + debug.print_raw("|=========" + " ".center(60) + "=========|") + debug.print_raw("|=========" + "VLSI Computer Architecture Research Group".center(60) + "=========|") + debug.print_raw("|=========" + "Electrical and Computer Engineering Department".center(60) + "=========|") + debug.print_raw("|=========" + "Oklahoma State University".center(60) + "=========|") + debug.print_raw("|=========" + " ".center(60) + "=========|") user_info = "Usage help: openram-user-group@ucsc.edu" - print("|=========" + user_info.center(60) + "=========|") + debug.print_raw("|=========" + user_info.center(60) + "=========|") dev_info = "Development help: openram-dev-group@ucsc.edu" - print("|=========" + dev_info.center(60) + "=========|") + debug.print_raw("|=========" + dev_info.center(60) + "=========|") temp_info = "Temp dir: {}".format(OPTS.openram_temp) - print("|=========" + temp_info.center(60) + "=========|") - print("|=========" + "See LICENSE for license info".center(60) + "=========|") - print("|==============================================================================|") + debug.print_raw("|=========" + temp_info.center(60) + "=========|") + debug.print_raw("|=========" + "See LICENSE for license info".center(60) + "=========|") + debug.print_raw("|==============================================================================|") def check_versions(): @@ -397,7 +397,7 @@ def print_time(name, now_time, last_time=None, indentation=2): time = str(round((now_time-last_time).total_seconds(),1)) + " seconds" else: time = now_time.strftime('%m/%d/%Y %H:%M:%S') - print("{0} {1}: {2}".format("*"*indentation,name,time)) + debug.print_raw("{0} {1}: {2}".format("*"*indentation,name,time)) def report_status(): @@ -413,20 +413,20 @@ def report_status(): if not OPTS.tech_name: debug.error("Tech name must be specified in config file.") - print("Technology: {0}".format(OPTS.tech_name)) - print("Total size: {} bits".format(OPTS.word_size*OPTS.num_words*OPTS.num_banks)) - print("Word size: {0}\nWords: {1}\nBanks: {2}".format(OPTS.word_size, + debug.print_raw("Technology: {0}".format(OPTS.tech_name)) + debug.print_raw("Total size: {} bits".format(OPTS.word_size*OPTS.num_words*OPTS.num_banks)) + debug.print_raw("Word size: {0}\nWords: {1}\nBanks: {2}".format(OPTS.word_size, OPTS.num_words, OPTS.num_banks)) - print("RW ports: {0}\nR-only ports: {1}\nW-only ports: {2}".format(OPTS.num_rw_ports, + debug.print_raw("RW ports: {0}\nR-only ports: {1}\nW-only ports: {2}".format(OPTS.num_rw_ports, OPTS.num_r_ports, OPTS.num_w_ports)) if OPTS.netlist_only: - print("Netlist only mode (no physical design is being done).") + debug.print_raw("Netlist only mode (no physical design is being done).") if not OPTS.inline_lvsdrc: - print("DRC/LVS/PEX is only run on the top-level design.") + debug.print_raw("DRC/LVS/PEX is only run on the top-level design.") if not OPTS.check_lvsdrc: - print("DRC/LVS/PEX is completely disabled.") + debug.print_raw("DRC/LVS/PEX is completely disabled.") diff --git a/compiler/openram.py b/compiler/openram.py index 78241f6a..0fe3f7cd 100755 --- a/compiler/openram.py +++ b/compiler/openram.py @@ -44,15 +44,16 @@ from sram_config import sram_config # Configure the SRAM organization c = sram_config(word_size=OPTS.word_size, num_words=OPTS.num_words) -print("Words per row: {}".format(c.words_per_row)) +debug.print_raw("Words per row: {}".format(c.words_per_row)) #from parser import * -output_extensions = ["sp","v","lib","py","html"] +output_extensions = ["sp","v","lib","py","html","log"] if not OPTS.netlist_only: output_extensions.extend(["gds","lef"]) output_files = ["{0}{1}.{2}".format(OPTS.output_path,OPTS.output_name,x) for x in output_extensions] -print("Output files are: ") -print(*output_files,sep="\n") +debug.print_raw("Output files are: ") +for path in output_files: + debug.print_raw(path) from sram import sram diff --git a/compiler/sram.py b/compiler/sram.py index 4971de08..a929434e 100644 --- a/compiler/sram.py +++ b/compiler/sram.py @@ -65,21 +65,21 @@ class sram(): # Write the layout start_time = datetime.datetime.now() gdsname = OPTS.output_path + self.s.name + ".gds" - print("GDS: Writing to {0}".format(gdsname)) + debug.print_raw("GDS: Writing to {0}".format(gdsname)) self.gds_write(gdsname) print_time("GDS", datetime.datetime.now(), start_time) # Create a LEF physical model start_time = datetime.datetime.now() lefname = OPTS.output_path + self.s.name + ".lef" - print("LEF: Writing to {0}".format(lefname)) + debug.print_raw("LEF: Writing to {0}".format(lefname)) self.lef_write(lefname) print_time("LEF", datetime.datetime.now(), start_time) # Save the spice file start_time = datetime.datetime.now() spname = OPTS.output_path + self.s.name + ".sp" - print("SP: Writing to {0}".format(spname)) + debug.print_raw("SP: Writing to {0}".format(spname)) self.sp_write(spname) print_time("Spice writing", datetime.datetime.now(), start_time) @@ -98,14 +98,14 @@ class sram(): # Characterize the design start_time = datetime.datetime.now() from characterizer import lib - print("LIB: Characterizing... ") + debug.print_raw("LIB: Characterizing... ") if OPTS.analytical_delay: - print("Using analytical delay models (no characterization)") + debug.print_raw("Using analytical delay models (no characterization)") else: if OPTS.spice_name!="": - print("Performing simulation-based characterization with {}".format(OPTS.spice_name)) + debug.print_raw("Performing simulation-based characterization with {}".format(OPTS.spice_name)) if OPTS.trim_netlist: - print("Trimming netlist to speed up characterization.") + debug.print_raw("Trimming netlist to speed up characterization.") lib(out_dir=OPTS.output_path, sram=self.s, sp_file=sp_file) print_time("Characterization", datetime.datetime.now(), start_time) @@ -114,20 +114,20 @@ class sram(): start_time = datetime.datetime.now() from shutil import copyfile copyfile(OPTS.config_file + '.py', OPTS.output_path + OPTS.output_name + '.py') - print("Config: Writing to {0}".format(OPTS.output_path + OPTS.output_name + '.py')) + debug.print_raw("Config: Writing to {0}".format(OPTS.output_path + OPTS.output_name + '.py')) print_time("Config", datetime.datetime.now(), start_time) # Write the datasheet start_time = datetime.datetime.now() from datasheet_gen import datasheet_gen dname = OPTS.output_path + self.s.name + ".html" - print("Datasheet: Writing to {0}".format(dname)) + debug.print_raw("Datasheet: Writing to {0}".format(dname)) datasheet_gen.datasheet_write(self.s,dname) print_time("Datasheet", datetime.datetime.now(), start_time) # Write a verilog model start_time = datetime.datetime.now() vname = OPTS.output_path + self.s.name + ".v" - print("Verilog: Writing to {0}".format(vname)) + debug.print_raw("Verilog: Writing to {0}".format(vname)) self.verilog_write(vname) print_time("Verilog", datetime.datetime.now(), start_time) From b66c53a99a867cd78ac42f51979a2a318fed1cea Mon Sep 17 00:00:00 2001 From: Jesse Cirimelli-Low Date: Sun, 13 Jan 2019 15:02:13 -0800 Subject: [PATCH 02/13] added log file to datasheet --- compiler/datasheet/datasheet_gen.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/compiler/datasheet/datasheet_gen.py b/compiler/datasheet/datasheet_gen.py index db03216b..9fcf921f 100644 --- a/compiler/datasheet/datasheet_gen.py +++ b/compiler/datasheet/datasheet_gen.py @@ -491,11 +491,12 @@ def parse_characterizer_csv(sram,f,pages): new_sheet.dlv_table.add_row(['.lef','LEF files','{0}.{1}'.format(OPTS.output_name,'lef')]) - new_sheet.dlv_table.add_row(['.sp','SPICE netlists','{0}.{1}'.format(OPTS.output_name,'sp')]) + new_sheet.dlv_table.add_row(['.log','OpenRAM compile log','{0}.{1}'.format(OPTS.output_name,'log')]) new_sheet.dlv_table.add_row(['.v','Verilog simulation models','{0}.{1}'.format(OPTS.output_name,'v')]) new_sheet.dlv_table.add_row(['.html','This datasheet','{0}.{1}'.format(OPTS.output_name,'html')]) new_sheet.dlv_table.add_row(['.lib','Synthesis models','{1}'.format(LIB_NAME,LIB_NAME.replace(OUT_DIR,''))]) new_sheet.dlv_table.add_row(['.py','OpenRAM configuration file','{0}.{1}'.format(OPTS.output_name,'py')]) + new_sheet.dlv_table.add_row(['.sp','SPICE netlists','{0}.{1}'.format(OPTS.output_name,'sp')]) new_sheet.io_table.add_row(['WORD_SIZE',WORD_SIZE]) new_sheet.io_table.add_row(['NUM_WORDS',NUM_WORDS]) From 903cafb3362a940906dc47c399a4189b8ae650e2 Mon Sep 17 00:00:00 2001 From: Jesse Cirimelli-Low Date: Tue, 15 Jan 2019 19:47:48 -0800 Subject: [PATCH 03/13] html parsing finished --- compiler/datasheet/datasheet.py | 54 ++-- compiler/datasheet/datasheet_gen.py | 266 ++++++++++-------- .../datasheet/library_page/lib_table_gen.py | 43 +++ compiler/datasheet/library_page/library.py | 16 ++ .../datasheet/library_page/library_gen.py | 60 ++++ 5 files changed, 287 insertions(+), 152 deletions(-) create mode 100644 compiler/datasheet/library_page/lib_table_gen.py create mode 100644 compiler/datasheet/library_page/library.py create mode 100644 compiler/datasheet/library_page/library_gen.py diff --git a/compiler/datasheet/datasheet.py b/compiler/datasheet/datasheet.py index ce84c22c..566dfc80 100644 --- a/compiler/datasheet/datasheet.py +++ b/compiler/datasheet/datasheet.py @@ -4,59 +4,60 @@ import csv import base64 from globals import OPTS + class datasheet(): """ Defines the layout,but not the data, of the html datasheet """ - def __init__(self,identifier): + + def __init__(self, identifier): self.name = identifier self.html = "" - def generate_html(self): """ Generates html tables using flask-table """ with open(os.path.abspath(os.environ.get("OPENRAM_HOME")) + '/datasheet/assets/datasheet.css', 'r') as datasheet_css: - #css styling is kept in a seperate file + # css styling is kept in a seperate file self.html += datasheet_css.read() - -# with open(OPTS.openram_temp + "/datasheet.info") as info: + +# with open(OPTS.openram_temp + "/datasheet.info") as info: self.html += '' - + self.html += '-->' + vlsi_logo = 0 - with open(os.path.abspath(os.environ.get("OPENRAM_HOME")) + '/datasheet/assets/vlsi_logo.png' , "rb") as image_file: + with open(os.path.abspath(os.environ.get("OPENRAM_HOME")) + '/datasheet/assets/vlsi_logo.png', "rb") as image_file: vlsi_logo = base64.b64encode(image_file.read()) openram_logo = 0 - with open(os.path.abspath(os.environ.get("OPENRAM_HOME")) + '/datasheet/assets/openram_logo_placeholder.png' , "rb") as image_file: + with open(os.path.abspath(os.environ.get("OPENRAM_HOME")) + '/datasheet/assets/openram_logo_placeholder.png', "rb") as image_file: openram_logo = base64.b64encode(image_file.read()) + self.html += 'VLSIDA'.format(str(vlsi_logo)[ + 2:-1]) - self.html += 'VLSIDA'.format(str(vlsi_logo)[2:-1]) - + self.html += '

' + \ + self.name + '.html' + '

' + self.html += '

Compiled at: ' + self.time + '

' + self.html += '

' + \ + 'DRC errors: ' + str(self.DRC) + '

' + self.html += '

' + \ + 'LVS errors: ' + str(self.LVS) + '

' + self.html += '

' + \ + 'Git commit id: ' + str(self.git_id) + '

' - - - - self.html +='

'+ self.name + '.html' + '

' - self.html +='

Compiled at: '+ self.time + '

' - self.html +='

'+ 'DRC errors: ' + str(self.DRC) + '

' - self.html +='

'+ 'LVS errors: ' + str(self.LVS) + '

' - self.html += '

'+ 'Git commit id: ' + str(self.git_id) + '

' - - self.html +='

Ports and Configuration

' + self.html += '

Ports and Configuration

' # self.html += in_out(self.io,table_id='data').__html__().replace('<','<').replace('"','"').replace('>',">") self.html += self.io_table.to_html() - - self.html +='

Operating Conditions

' + + self.html += '

Operating Conditions

' # self.html += operating_conditions(self.operating,table_id='data').__html__() self.html += self.operating_table.to_html() @@ -68,9 +69,6 @@ class datasheet(): # self.html += characterization_corners(self.corners,table_id='data').__html__() self.html += self.corners_table.to_html() - self.html +='

Deliverables

' + self.html += '

Deliverables

' # self.html += deliverables(self.dlv,table_id='data').__html__().replace('<','<').replace('"','"').replace('>',">") self.html += self.dlv_table.to_html() - - - diff --git a/compiler/datasheet/datasheet_gen.py b/compiler/datasheet/datasheet_gen.py index 9fcf921f..93ab783c 100644 --- a/compiler/datasheet/datasheet_gen.py +++ b/compiler/datasheet/datasheet_gen.py @@ -1,21 +1,21 @@ #!/usr/bin/env python3 """ -This is a script to load data from the characterization and layout processes into +This is a script to load data from the characterization and layout processes into a web friendly html datasheet. """ -#TODO: -#include log file -#Diagram generation -#Improve css +# TODO: +# include log file +# Diagram generation +# Improve css -import debug from globals import OPTS -import os, math -import optparse +import os +import math import csv -from datasheet import * -from table_gen import * +import datasheet +import table_gen + def process_name(corner): """ @@ -30,20 +30,20 @@ def process_name(corner): else: return "custom" -def parse_characterizer_csv(sram,f,pages): + +def parse_characterizer_csv(sram, f, pages): """ Parses output data of the Liberty file generator in order to construct the timing and current table """ with open(f) as csv_file: csv_reader = csv.reader(csv_file, delimiter=',') - line_count = 0 for row in csv_reader: found = 0 col = 0 - #defines layout of csv file + # defines layout of csv file NAME = row[col] col += 1 @@ -85,29 +85,28 @@ def parse_characterizer_csv(sram,f,pages): WORD_SIZE = row[col] col += 1 - + ORIGIN_ID = row[col] col += 1 DATETIME = row[col] - col+= 1 + col += 1 DRC = row[col] col += 1 LVS = row[col] col += 1 - - for sheet in pages: + for sheet in pages: if sheet.name == NAME: found = 1 - #if the .lib information is for an existing datasheet compare timing data + # if the .lib information is for an existing datasheet compare timing data for item in sheet.operating_table.rows: - #check if the new corner data is worse than the previous worse corner data + # check if the new corner data is worse than the previous worse corner data if item[0] == 'Operating Temperature': if float(TEMP) > float(item[3]): @@ -128,14 +127,13 @@ def parse_characterizer_csv(sram,f,pages): if item[0] == 'Operating Frequncy (F)': try: if float(math.floor(1000/float(MIN_PERIOD)) < float(item[3])): - item[3] = str(math.floor(1000/float(MIN_PERIOD))) + item[3] = str(math.floor( + 1000/float(MIN_PERIOD))) except Exception: pass - while(True): - if(row[col].startswith('DIN')): start = col for item in sheet.timing_table.rows: @@ -253,7 +251,6 @@ def parse_characterizer_csv(sram,f,pages): col += 1 - elif(row[col].startswith('WEb')): start = col for item in sheet.timing_table.rows: @@ -293,7 +290,6 @@ def parse_characterizer_csv(sram,f,pages): col += 1 - elif(row[col].startswith('ADDR')): start = col for item in sheet.timing_table.rows: @@ -333,198 +329,220 @@ def parse_characterizer_csv(sram,f,pages): col += 1 - - else: break - - new_sheet.corners_table.add_row([PROC,process_name(PROC),VOLT,TEMP,LIB_NAME.replace(OUT_DIR,'').replace(NAME,'')]) - new_sheet.dlv_table.add_row(['.lib','Synthesis models','{1}'.format(LIB_NAME,LIB_NAME.replace(OUT_DIR,''))]) + datasheet.new_sheet.corners_table.add_row([PROC, process_name( + PROC), VOLT, TEMP, LIB_NAME.replace(OUT_DIR, '').replace(NAME, '')]) + datasheet.new_sheet.dlv_table.add_row( + ['.lib', 'Synthesis models', '{1}'.format(LIB_NAME, LIB_NAME.replace(OUT_DIR, ''))]) if found == 0: - - #if this is the first corner for this sram, run first time configuration and set up tables - new_sheet = datasheet(NAME) + + # if this is the first corner for this sram, run first time configuration and set up tables + new_sheet = datasheet.datasheet(NAME) pages.append(new_sheet) new_sheet.git_id = ORIGIN_ID new_sheet.time = DATETIME new_sheet.DRC = DRC new_sheet.LVS = LVS - new_sheet.description = [NAME, NUM_WORDS, NUM_BANKS, NUM_RW_PORTS, NUM_W_PORTS, NUM_R_PORTS, TECH_NAME, WORD_SIZE, ORIGIN_ID, DATETIME] + new_sheet.description = [NAME, NUM_WORDS, NUM_BANKS, NUM_RW_PORTS, + NUM_W_PORTS, NUM_R_PORTS, TECH_NAME, WORD_SIZE, ORIGIN_ID, DATETIME] - new_sheet.corners_table = table_gen("corners") - new_sheet.corners_table.add_row(['Corner Name','Process','Power Supply','Temperature','Library Name Suffix']) - new_sheet.corners_table.add_row([PROC,process_name(PROC),VOLT,TEMP,LIB_NAME.replace(OUT_DIR,'').replace(NAME,'')]) - new_sheet.operating_table = table_gen("operating_table") - new_sheet.operating_table.add_row(['Parameter','Min','Typ','Max','Units']) - new_sheet.operating_table.add_row(['Power supply (VDD) range',VOLT,VOLT,VOLT,'Volts']) - new_sheet.operating_table.add_row(['Operating Temperature',TEMP,TEMP,TEMP,'Celsius']) + new_sheet.corners_table = table_gen.table_gen("corners") + new_sheet.corners_table.add_row( + ['Corner Name', 'Process', 'Power Supply', 'Temperature', 'Library Name Suffix']) + new_sheet.corners_table.add_row([PROC, process_name( + PROC), VOLT, TEMP, LIB_NAME.replace(OUT_DIR, '').replace(NAME, '')]) + new_sheet.operating_table = table_gen.table_gen("operating_table") + new_sheet.operating_table.add_row( + ['Parameter', 'Min', 'Typ', 'Max', 'Units']) + new_sheet.operating_table.add_row( + ['Power supply (VDD) range', VOLT, VOLT, VOLT, 'Volts']) + new_sheet.operating_table.add_row( + ['Operating Temperature', TEMP, TEMP, TEMP, 'Celsius']) try: - new_sheet.operating_table.add_row(['Operating Frequency (F)','','',str(math.floor(1000/float(MIN_PERIOD))),'MHz']) + new_sheet.operating_table.add_row(['Operating Frequency (F)', '', '', str( + math.floor(1000/float(MIN_PERIOD))), 'MHz']) except Exception: - new_sheet.operating_table.add_row(['Operating Frequency (F)','','',"not available in netlist only",'MHz']) #failed to provide non-zero MIN_PERIOD - new_sheet.timing_table = table_gen("timing") - new_sheet.timing_table.add_row(['Parameter','Min','Max','Units']) + # failed to provide non-zero MIN_PERIOD + new_sheet.operating_table.add_row( + ['Operating Frequency (F)', '', '', "not available in netlist only", 'MHz']) + new_sheet.timing_table = table_gen.table_gen("timing") + new_sheet.timing_table.add_row( + ['Parameter', 'Min', 'Max', 'Units']) while(True): if(row[col].startswith('DIN')): start = col - - new_sheet.timing_table.add_row(['{0} setup rising'.format(row[start]),row[col+1],row[col+2],'ns']) + + new_sheet.timing_table.add_row( + ['{0} setup rising'.format(row[start]), row[col+1], row[col+2], 'ns']) col += 2 - - new_sheet.timing_table.add_row(['{0} setup falling'.format(row[start]),row[col+1],row[col+2],'ns']) - - col += 2 - - new_sheet.timing_table.add_row(['{0} hold rising'.format(row[start]),row[col+1],row[col+2],'ns']) + new_sheet.timing_table.add_row( + ['{0} setup falling'.format(row[start]), row[col+1], row[col+2], 'ns']) col += 2 - new_sheet.timing_table.add_row(['{0} hold falling'.format(row[start]),row[col+1],row[col+2],'ns']) + new_sheet.timing_table.add_row( + ['{0} hold rising'.format(row[start]), row[col+1], row[col+2], 'ns']) col += 2 - col +=1 + new_sheet.timing_table.add_row( + ['{0} hold falling'.format(row[start]), row[col+1], row[col+2], 'ns']) + + col += 2 + + col += 1 elif(row[col].startswith('DOUT')): start = col - - new_sheet.timing_table.add_row(['{0} cell rise'.format(row[start]),row[col+1],row[col+2],'ns']) + + new_sheet.timing_table.add_row( + ['{0} cell rise'.format(row[start]), row[col+1], row[col+2], 'ns']) col += 2 - - new_sheet.timing_table.add_row(['{0} cell fall'.format(row[start]),row[col+1],row[col+2],'ns']) - - col += 2 - - new_sheet.timing_table.add_row(['{0} rise transition'.format(row[start]),row[col+1],row[col+2],'ns']) + new_sheet.timing_table.add_row( + ['{0} cell fall'.format(row[start]), row[col+1], row[col+2], 'ns']) col += 2 - new_sheet.timing_table.add_row(['{0} fall transition'.format(row[start]),row[col+1],row[col+2],'ns']) + new_sheet.timing_table.add_row( + ['{0} rise transition'.format(row[start]), row[col+1], row[col+2], 'ns']) col += 2 - col +=1 + new_sheet.timing_table.add_row( + ['{0} fall transition'.format(row[start]), row[col+1], row[col+2], 'ns']) + + col += 2 + + col += 1 elif(row[col].startswith('CSb')): start = col - - new_sheet.timing_table.add_row(['{0} setup rising'.format(row[start]),row[col+1],row[col+2],'ns']) + + new_sheet.timing_table.add_row( + ['{0} setup rising'.format(row[start]), row[col+1], row[col+2], 'ns']) col += 2 - - new_sheet.timing_table.add_row(['{0} setup falling'.format(row[start]),row[col+1],row[col+2],'ns']) - - col += 2 - - new_sheet.timing_table.add_row(['{0} hold rising'.format(row[start]),row[col+1],row[col+2],'ns']) + new_sheet.timing_table.add_row( + ['{0} setup falling'.format(row[start]), row[col+1], row[col+2], 'ns']) col += 2 - new_sheet.timing_table.add_row(['{0} hold falling'.format(row[start]),row[col+1],row[col+2],'ns']) + new_sheet.timing_table.add_row( + ['{0} hold rising'.format(row[start]), row[col+1], row[col+2], 'ns']) col += 2 - col +=1 + new_sheet.timing_table.add_row( + ['{0} hold falling'.format(row[start]), row[col+1], row[col+2], 'ns']) + + col += 2 + + col += 1 elif(row[col].startswith('WEb')): start = col - - new_sheet.timing_table.add_row(['{0} setup rising'.format(row[start]),row[col+1],row[col+2],'ns']) + + new_sheet.timing_table.add_row( + ['{0} setup rising'.format(row[start]), row[col+1], row[col+2], 'ns']) col += 2 - - new_sheet.timing_table.add_row(['{0} setup falling'.format(row[start]),row[col+1],row[col+2],'ns']) - - col += 2 - - new_sheet.timing_table.add_row(['{0} hold rising'.format(row[start]),row[col+1],row[col+2],'ns']) + new_sheet.timing_table.add_row( + ['{0} setup falling'.format(row[start]), row[col+1], row[col+2], 'ns']) col += 2 - new_sheet.timing_table.add_row(['{0} hold falling'.format(row[start]),row[col+1],row[col+2],'ns']) + new_sheet.timing_table.add_row( + ['{0} hold rising'.format(row[start]), row[col+1], row[col+2], 'ns']) col += 2 - col +=1 + new_sheet.timing_table.add_row( + ['{0} hold falling'.format(row[start]), row[col+1], row[col+2], 'ns']) + + col += 2 + + col += 1 elif(row[col].startswith('ADDR')): start = col - - new_sheet.timing_table.add_row(['{0} setup rising'.format(row[start]),row[col+1],row[col+2],'ns']) + + new_sheet.timing_table.add_row( + ['{0} setup rising'.format(row[start]), row[col+1], row[col+2], 'ns']) col += 2 - - new_sheet.timing_table.add_row(['{0} setup falling'.format(row[start]),row[col+1],row[col+2],'ns']) - - col += 2 - - new_sheet.timing_table.add_row(['{0} hold rising'.format(row[start]),row[col+1],row[col+2],'ns']) + new_sheet.timing_table.add_row( + ['{0} setup falling'.format(row[start]), row[col+1], row[col+2], 'ns']) col += 2 - new_sheet.timing_table.add_row(['{0} hold falling'.format(row[start]),row[col+1],row[col+2],'ns']) + new_sheet.timing_table.add_row( + ['{0} hold rising'.format(row[start]), row[col+1], row[col+2], 'ns']) col += 2 - col +=1 + new_sheet.timing_table.add_row( + ['{0} hold falling'.format(row[start]), row[col+1], row[col+2], 'ns']) + + col += 2 + + col += 1 else: break + new_sheet.dlv_table = table_gen.table_gen("dlv") + new_sheet.dlv_table.add_row(['Type', 'Description', 'Link']) - - new_sheet.dlv_table = table_gen("dlv") - new_sheet.dlv_table.add_row(['Type','Description','Link']) - - new_sheet.io_table = table_gen("io") + new_sheet.io_table = table_gen.table_gen("io") new_sheet.io_table.add_row(['Type', 'Value']) if not OPTS.netlist_only: - #physical layout files should not be generated in netlist only mode - new_sheet.dlv_table.add_row(['.gds','GDSII layout views','{0}.{1}'.format(OPTS.output_name,'gds')]) - new_sheet.dlv_table.add_row(['.lef','LEF files','{0}.{1}'.format(OPTS.output_name,'lef')]) - - - new_sheet.dlv_table.add_row(['.log','OpenRAM compile log','{0}.{1}'.format(OPTS.output_name,'log')]) - new_sheet.dlv_table.add_row(['.v','Verilog simulation models','{0}.{1}'.format(OPTS.output_name,'v')]) - new_sheet.dlv_table.add_row(['.html','This datasheet','{0}.{1}'.format(OPTS.output_name,'html')]) - new_sheet.dlv_table.add_row(['.lib','Synthesis models','{1}'.format(LIB_NAME,LIB_NAME.replace(OUT_DIR,''))]) - new_sheet.dlv_table.add_row(['.py','OpenRAM configuration file','{0}.{1}'.format(OPTS.output_name,'py')]) - new_sheet.dlv_table.add_row(['.sp','SPICE netlists','{0}.{1}'.format(OPTS.output_name,'sp')]) - - new_sheet.io_table.add_row(['WORD_SIZE',WORD_SIZE]) - new_sheet.io_table.add_row(['NUM_WORDS',NUM_WORDS]) - new_sheet.io_table.add_row(['NUM_BANKS',NUM_BANKS]) - new_sheet.io_table.add_row(['NUM_RW_PORTS',NUM_RW_PORTS]) - new_sheet.io_table.add_row(['NUM_R_PORTS',NUM_R_PORTS]) - new_sheet.io_table.add_row(['NUM_W_PORTS',NUM_W_PORTS]) - new_sheet.io_table.add_row(['Area',sram.width * sram.height]) - - + # physical layout files should not be generated in netlist only mode + new_sheet.dlv_table.add_row( + ['.gds', 'GDSII layout views', '{0}.{1}'.format(OPTS.output_name, 'gds')]) + new_sheet.dlv_table.add_row( + ['.lef', 'LEF files', '{0}.{1}'.format(OPTS.output_name, 'lef')]) + new_sheet.dlv_table.add_row( + ['.log', 'OpenRAM compile log', '{0}.{1}'.format(OPTS.output_name, 'log')]) + new_sheet.dlv_table.add_row( + ['.v', 'Verilog simulation models', '{0}.{1}'.format(OPTS.output_name, 'v')]) + new_sheet.dlv_table.add_row( + ['.html', 'This datasheet', '{0}.{1}'.format(OPTS.output_name, 'html')]) + new_sheet.dlv_table.add_row( + ['.lib', 'Synthesis models', '{1}'.format(LIB_NAME, LIB_NAME.replace(OUT_DIR, ''))]) + new_sheet.dlv_table.add_row( + ['.py', 'OpenRAM configuration file', '{0}.{1}'.format(OPTS.output_name, 'py')]) + new_sheet.dlv_table.add_row( + ['.sp', 'SPICE netlists', '{0}.{1}'.format(OPTS.output_name, 'sp')]) + new_sheet.io_table.add_row(['WORD_SIZE', WORD_SIZE]) + new_sheet.io_table.add_row(['NUM_WORDS', NUM_WORDS]) + new_sheet.io_table.add_row(['NUM_BANKS', NUM_BANKS]) + new_sheet.io_table.add_row(['NUM_RW_PORTS', NUM_RW_PORTS]) + new_sheet.io_table.add_row(['NUM_R_PORTS', NUM_R_PORTS]) + new_sheet.io_table.add_row(['NUM_W_PORTS', NUM_W_PORTS]) + new_sheet.io_table.add_row(['Area', sram.width * sram.height]) class datasheet_gen(): - def datasheet_write(sram,name): - + def datasheet_write(sram, name): in_dir = OPTS.openram_temp if not (os.path.isdir(in_dir)): os.mkdir(in_dir) - datasheets = [] parse_characterizer_csv(sram, in_dir + "/datasheet.info", datasheets) - for sheets in datasheets: with open(name, 'w+') as f: sheets.generate_html() diff --git a/compiler/datasheet/library_page/lib_table_gen.py b/compiler/datasheet/library_page/lib_table_gen.py new file mode 100644 index 00000000..c35d09c0 --- /dev/null +++ b/compiler/datasheet/library_page/lib_table_gen.py @@ -0,0 +1,43 @@ +class table_gen: + def __init__(self, name): + self.name = name + self.rows = [] + self.table_id = 'data' + + def add_row(self, row): + self.rows.append(row) + + def gen_table_head(self): + html = '' + + html += '' + html += '' + for col in self.rows[0]: + html += '' + str(col) + '' + html += '' + html += '' + return html + + def gen_table_body(self): + html = '' + + html += '' + html += '' + for row in self.rows[1:]: + html += '' + for col in row: + html += '' + str(col) + '' + html += '' + html += '' + html += '' + return html + + def to_html(self): + + html = '' + html += '' + html += self.gen_table_head() + html += self.gen_table_body() + html += '
' + + return html diff --git a/compiler/datasheet/library_page/library.py b/compiler/datasheet/library_page/library.py new file mode 100644 index 00000000..d23d4a75 --- /dev/null +++ b/compiler/datasheet/library_page/library.py @@ -0,0 +1,16 @@ +import os +import base64 + + +class library(): + + def __init__(self): + self.html = '' + + def generate_html(self): + vlsi_logo = 0 + with open(os.path.abspath(os.environ.get("OPENRAM_HOME")) + '/datasheet/assets/vlsi_logo.png', "rb") as image_file: + vlsi_logo = base64.b64encode(image_file.read()) + + self.html += 'VLSIDA'.format(str(vlsi_logo)[ + 2:-1]) diff --git a/compiler/datasheet/library_page/library_gen.py b/compiler/datasheet/library_page/library_gen.py new file mode 100644 index 00000000..ca7f631d --- /dev/null +++ b/compiler/datasheet/library_page/library_gen.py @@ -0,0 +1,60 @@ +import library +import csv + + +class library_item(): + def __init__(self): + self.comment = '' + self.word_size = '' + self.num_words = '' + self.num_banks = '' + self.num_rw_ports = '' + self.num_r_ports = '' + self.num_w_ports = '' + self.Area = '' + self.git_id = '' + self.technology = '' + self.min_op = '' + + +class library_gen(): + def library_write(name): + with open(name, 'w+') as f: + library_page.generate_html() + f.write(library_page.html) + + def search_file(file, name): + length = len(name) + part = file.read(length) + i = 0 + while True: + if part == name: + break + char = file.read(1) + if not char: + return + part = part[1:] + char + i += 1 + return i + + def parse_html(file): + item = library_item() + start_tag = '' + + with open(file, 'r') as f: + start_byte = library_gen.search_file(f, start_tag) + len(start_tag) + end_byte = library_gen.search_file(f, end_tag) + start_byte + + f.seek(start_byte) + item.comment = f.read(end_byte - start_byte) + print(item.comment) + return item + + def parse_comment(comment, item): + + pass + + +library_page = library.library() +library_gen.parse_html('../../temp/sram_2_16_scn4m_subm.html') From 813a551691fd6e98f09e3f38184ca7b0c528b2e4 Mon Sep 17 00:00:00 2001 From: Jesse Cirimelli-Low Date: Tue, 15 Jan 2019 20:48:20 -0800 Subject: [PATCH 04/13] comment parsing 1/2 complete; page gen setup complete --- compiler/datasheet/datasheet.py | 12 ++- compiler/datasheet/library_page/library.py | 2 +- .../datasheet/library_page/library_gen.py | 87 ++++++++++++++++--- 3 files changed, 83 insertions(+), 18 deletions(-) diff --git a/compiler/datasheet/datasheet.py b/compiler/datasheet/datasheet.py index 566dfc80..22a50cb9 100644 --- a/compiler/datasheet/datasheet.py +++ b/compiler/datasheet/datasheet.py @@ -1,6 +1,5 @@ from table_gen import * import os -import csv import base64 from globals import OPTS @@ -22,13 +21,12 @@ class datasheet(): # css styling is kept in a seperate file self.html += datasheet_css.read() - -# with open(OPTS.openram_temp + "/datasheet.info") as info: + with open(OPTS.openram_temp + "/datasheet.info") as info: self.html += '' diff --git a/compiler/datasheet/library_page/library.py b/compiler/datasheet/library_page/library.py index d23d4a75..3477164c 100644 --- a/compiler/datasheet/library_page/library.py +++ b/compiler/datasheet/library_page/library.py @@ -7,7 +7,7 @@ class library(): def __init__(self): self.html = '' - def generate_html(self): + def generate_html(self,book): vlsi_logo = 0 with open(os.path.abspath(os.environ.get("OPENRAM_HOME")) + '/datasheet/assets/vlsi_logo.png', "rb") as image_file: vlsi_logo = base64.b64encode(image_file.read()) diff --git a/compiler/datasheet/library_page/library_gen.py b/compiler/datasheet/library_page/library_gen.py index ca7f631d..ca5942f7 100644 --- a/compiler/datasheet/library_page/library_gen.py +++ b/compiler/datasheet/library_page/library_gen.py @@ -1,9 +1,10 @@ import library -import csv +from pathlib import Path class library_item(): def __init__(self): + self.name = '' self.comment = '' self.word_size = '' self.num_words = '' @@ -13,14 +14,15 @@ class library_item(): self.num_w_ports = '' self.Area = '' self.git_id = '' - self.technology = '' - self.min_op = '' + self.tech_name = '' + self.min_period = '' + self.datetime = '' class library_gen(): - def library_write(name): + def library_write(name, book): with open(name, 'w+') as f: - library_page.generate_html() + library_page.generate_html(book) f.write(library_page.html) def search_file(file, name): @@ -37,6 +39,66 @@ class library_gen(): i += 1 return i + def parse_comment(item): + row = item.comment.split(',') + print(row) + found = 0 + col = 0 + + item.name = row[col] + col += 1 + + item.num_words = row[col] + col += 1 + + item.num_banks = row[col] + col += 1 + + item.num_rw_ports = row[col] + col += 1 + + item.num_w_port = row[col] + col += 1 + + item.num_r_ports = row[col] + col += 1 + + item.tech_name = row[col] + col += 1 + print(item.tech_name) +# TEMP = row[col] + col += 1 + +# VOLT = row[col] + col += 1 + +# PROC = row[col] + col += 1 + + item.min_period = row[col] + col += 1 + print(item.min_period) +# OUT_DIR = row[col] + col += 1 + +# LIB_NAME = row[col] + col += 1 + + item.word_size = row[col] + col += 1 + + item.git_id = row[col] + col += 1 + + item.datetime = row[col] + col += 1 + +# DRC = row[col] + col += 1 + +# LVS = row[col] + col += 1 + def parse_html(file): item = library_item() start_tag = '' - - with open(file, 'r') as f: - start_byte = library_gen.search_file(f, start_tag) + len(start_tag) - end_byte = library_gen.search_file(f, end_tag) + start_byte - - f.seek(start_byte) - item.comment = f.read(end_byte - start_byte) - library_gen.parse_comment(item) - - return item - - def get_file_tree(path): - return list(Path(path).rglob("*.html")) - - -datasheet_list = library_gen.get_file_tree('./deliverables') -print(datasheet_list) -library_page = library.library() -book = [] -for datasheet in datasheet_list: - book.append(library_gen.parse_html(datasheet)) -library_gen.library_write('index.html', book) From 2e7d2483ebab5a72f728dd66b2fa8872562e5072 Mon Sep 17 00:00:00 2001 From: Matt Guthaus Date: Wed, 16 Jan 2019 09:42:01 -0800 Subject: [PATCH 06/13] Use github formatted 3-clause BSD license --- LICENSE | 42 +++++++++++++++++++++--------------------- 1 file changed, 21 insertions(+), 21 deletions(-) diff --git a/LICENSE b/LICENSE index 9a86ca1f..7d362d0c 100644 --- a/LICENSE +++ b/LICENSE @@ -1,31 +1,31 @@ +BSD 3-Clause License + Copyright 2018 Regents of the University of California and The Board of Regents for the Oklahoma Agricultural and Mechanical College (acting for and on behalf of Oklahoma State University) +All rights reserved. Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: +modification, are permitted provided that the following conditions are met: -1. Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. -2. Redistributions in binary form must reproduce the above copyright -notice, this list of conditions and the following disclaimer in the -documentation and/or other materials provided with the distribution. +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. -3. Neither the name of the copyright holder nor the names of its -contributors may be used to endorse or promote products derived from -this software without specific prior written permission. +* Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - From 9431b93a1d411c14b9edf69d1712f3c5eed69c2b Mon Sep 17 00:00:00 2001 From: Matt Guthaus Date: Wed, 16 Jan 2019 09:43:31 -0800 Subject: [PATCH 07/13] Update copyright year --- LICENSE | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/LICENSE b/LICENSE index 7d362d0c..761f6e8b 100644 --- a/LICENSE +++ b/LICENSE @@ -1,6 +1,6 @@ BSD 3-Clause License -Copyright 2018 Regents of the University of California and The Board +Copyright (c) 2019 Regents of the University of California and The Board of Regents for the Oklahoma Agricultural and Mechanical College (acting for and on behalf of Oklahoma State University) All rights reserved. From 553894c5a2da7e8485e4708be712e37bb3dc6748 Mon Sep 17 00:00:00 2001 From: Matt Guthaus Date: Wed, 16 Jan 2019 10:07:10 -0800 Subject: [PATCH 08/13] Remove library code and move to its own repository --- lib/Makefile | 14 -------- lib/README.md | 5 --- lib/freepdk45/Makefile | 32 ------------------ .../configs/sram_128b_1024_1rw_freepdk45.py | 8 ----- .../configs/sram_32b_1024_1rw_freepdk45.py | 7 ---- .../configs/sram_32b_2048_1rw_freepdk45.py | 7 ---- .../configs/sram_32b_256_1rw_freepdk45.py | 7 ---- .../configs/sram_32b_512_1rw_freepdk45.py | 7 ---- .../configs/sram_64b_1024_1rw_freepdk45.py | 7 ---- .../configs/sram_8b_1024_1rw_freepdk45.py | 7 ---- .../configs/sram_8b_256_1rw_freepdk45.py | 7 ---- .../configs/sram_8b_512_1rw_freepdk45.py | 7 ---- lib/scn4m_subm/Makefile | 33 ------------------- .../configs/sram_128b_1024_1rw_scn4m_subm.py | 12 ------- .../configs/sram_32b_1024_1rw_scn4m_subm.py | 12 ------- .../configs/sram_32b_2048_1rw_scn4m_subm.py | 12 ------- .../configs/sram_32b_256_1rw_scn4m_subm.py | 12 ------- .../configs/sram_32b_512_1rw_scn4m_subm.py | 12 ------- .../configs/sram_64b_1024_1rw_scn4m_subm.py | 12 ------- .../configs/sram_8b_1024_1rw_scn4m_subm.py | 12 ------- .../configs/sram_8b_256_1rw_scn4m_subm.py | 12 ------- .../configs/sram_8b_512_1rw_scn4m_subm.py | 12 ------- 22 files changed, 256 deletions(-) delete mode 100644 lib/Makefile delete mode 100644 lib/README.md delete mode 100644 lib/freepdk45/Makefile delete mode 100644 lib/freepdk45/configs/sram_128b_1024_1rw_freepdk45.py delete mode 100644 lib/freepdk45/configs/sram_32b_1024_1rw_freepdk45.py delete mode 100644 lib/freepdk45/configs/sram_32b_2048_1rw_freepdk45.py delete mode 100644 lib/freepdk45/configs/sram_32b_256_1rw_freepdk45.py delete mode 100644 lib/freepdk45/configs/sram_32b_512_1rw_freepdk45.py delete mode 100644 lib/freepdk45/configs/sram_64b_1024_1rw_freepdk45.py delete mode 100644 lib/freepdk45/configs/sram_8b_1024_1rw_freepdk45.py delete mode 100644 lib/freepdk45/configs/sram_8b_256_1rw_freepdk45.py delete mode 100644 lib/freepdk45/configs/sram_8b_512_1rw_freepdk45.py delete mode 100644 lib/scn4m_subm/Makefile delete mode 100644 lib/scn4m_subm/configs/sram_128b_1024_1rw_scn4m_subm.py delete mode 100644 lib/scn4m_subm/configs/sram_32b_1024_1rw_scn4m_subm.py delete mode 100644 lib/scn4m_subm/configs/sram_32b_2048_1rw_scn4m_subm.py delete mode 100644 lib/scn4m_subm/configs/sram_32b_256_1rw_scn4m_subm.py delete mode 100644 lib/scn4m_subm/configs/sram_32b_512_1rw_scn4m_subm.py delete mode 100644 lib/scn4m_subm/configs/sram_64b_1024_1rw_scn4m_subm.py delete mode 100644 lib/scn4m_subm/configs/sram_8b_1024_1rw_scn4m_subm.py delete mode 100644 lib/scn4m_subm/configs/sram_8b_256_1rw_scn4m_subm.py delete mode 100644 lib/scn4m_subm/configs/sram_8b_512_1rw_scn4m_subm.py diff --git a/lib/Makefile b/lib/Makefile deleted file mode 100644 index d9debb2c..00000000 --- a/lib/Makefile +++ /dev/null @@ -1,14 +0,0 @@ -SUBDIRS := $(wildcard */.) -SUBDIRSCLEAN=$(addsuffix clean,$(SUBDIRS)) - -all: $(SUBDIRS) - -$(SUBDIRS): - $(MAKE) -k -C $@ - -clean: - for dir in $(SUBDIRS); do \ - $(MAKE) -C $$dir $@; \ - done - -.PHONY: all $(SUBDIRS) $(SUBDIRSCLEAN) diff --git a/lib/README.md b/lib/README.md deleted file mode 100644 index b284ec12..00000000 --- a/lib/README.md +++ /dev/null @@ -1,5 +0,0 @@ -This directory contains a set of common sizes based on -discussions with users. All of the files are pre-computed -to that common-case users don't need to setup/use OpenRAM. -The results will be updated automatically as improvements -are made to OpenRAM. \ No newline at end of file diff --git a/lib/freepdk45/Makefile b/lib/freepdk45/Makefile deleted file mode 100644 index cf883d4c..00000000 --- a/lib/freepdk45/Makefile +++ /dev/null @@ -1,32 +0,0 @@ -CUR_DIR = $(shell pwd) -TEST_DIR = ${CUR_DIR}/tests - -#MAKEFLAGS += -j 2 - -CONFIG_DIR = configs -OUT_DIRS = sp lib lef gds verilog -$(shell mkdir -p $(OUT_DIRS)) - -SRCS=$(wildcard $(CONFIG_DIR)/*.py) -SPICES=$(SRCS:.py=.sp) -all : $(SPICES) - -# Characterize and perform DRC/LVS -OPTS = -c -# Do not characterize or perform DRC/LVS -#OPTS += -n -# Verbosity -OPTS += -v -%.sp : %.py - $(eval bname=$(basename $(notdir $<))) - openram.py $(OPTS) $< 2>&1 > $(bname).log - mv $(bname).lef lef - mv $(bname).v verilog - mv $(bname).sp sp - mv $(bname).gds gds - mv $(bname)*.lib lib - -clean: - rm -f *.log configs/*.pyc *~ *.gds *.lib *.sp *.v *.lef - rm -f gds/* lef/* lib/* sp/* verilog/* - diff --git a/lib/freepdk45/configs/sram_128b_1024_1rw_freepdk45.py b/lib/freepdk45/configs/sram_128b_1024_1rw_freepdk45.py deleted file mode 100644 index 1d131e16..00000000 --- a/lib/freepdk45/configs/sram_128b_1024_1rw_freepdk45.py +++ /dev/null @@ -1,8 +0,0 @@ -word_size = 128 -num_words = 1024 -num_banks = 1 - -tech_name = "freepdk45" -process_corners = ["TT"] -supply_voltages = [1.0] -temperatures = [25] diff --git a/lib/freepdk45/configs/sram_32b_1024_1rw_freepdk45.py b/lib/freepdk45/configs/sram_32b_1024_1rw_freepdk45.py deleted file mode 100644 index 1085a755..00000000 --- a/lib/freepdk45/configs/sram_32b_1024_1rw_freepdk45.py +++ /dev/null @@ -1,7 +0,0 @@ -word_size = 32 -num_words = 1024 - -tech_name = "freepdk45" -process_corners = ["TT"] -supply_voltages = [1.0] -temperatures = [25] diff --git a/lib/freepdk45/configs/sram_32b_2048_1rw_freepdk45.py b/lib/freepdk45/configs/sram_32b_2048_1rw_freepdk45.py deleted file mode 100644 index e96bac37..00000000 --- a/lib/freepdk45/configs/sram_32b_2048_1rw_freepdk45.py +++ /dev/null @@ -1,7 +0,0 @@ -word_size = 32 -num_words = 2048 - -tech_name = "freepdk45" -process_corners = ["TT"] -supply_voltages = [1.0] -temperatures = [25] diff --git a/lib/freepdk45/configs/sram_32b_256_1rw_freepdk45.py b/lib/freepdk45/configs/sram_32b_256_1rw_freepdk45.py deleted file mode 100644 index 7ca6be89..00000000 --- a/lib/freepdk45/configs/sram_32b_256_1rw_freepdk45.py +++ /dev/null @@ -1,7 +0,0 @@ -word_size = 32 -num_words = 256 - -tech_name = "freepdk45" -process_corners = ["TT"] -supply_voltages = [1.0] -temperatures = [25] diff --git a/lib/freepdk45/configs/sram_32b_512_1rw_freepdk45.py b/lib/freepdk45/configs/sram_32b_512_1rw_freepdk45.py deleted file mode 100644 index 960e9f73..00000000 --- a/lib/freepdk45/configs/sram_32b_512_1rw_freepdk45.py +++ /dev/null @@ -1,7 +0,0 @@ -word_size = 32 -num_words = 512 - -tech_name = "freepdk45" -process_corners = ["TT"] -supply_voltages = [1.0] -temperatures = [25] diff --git a/lib/freepdk45/configs/sram_64b_1024_1rw_freepdk45.py b/lib/freepdk45/configs/sram_64b_1024_1rw_freepdk45.py deleted file mode 100644 index bb421711..00000000 --- a/lib/freepdk45/configs/sram_64b_1024_1rw_freepdk45.py +++ /dev/null @@ -1,7 +0,0 @@ -word_size = 64 -num_words = 1024 - -tech_name = "freepdk45" -process_corners = ["TT"] -supply_voltages = [1.0] -temperatures = [25] diff --git a/lib/freepdk45/configs/sram_8b_1024_1rw_freepdk45.py b/lib/freepdk45/configs/sram_8b_1024_1rw_freepdk45.py deleted file mode 100644 index 838d49d7..00000000 --- a/lib/freepdk45/configs/sram_8b_1024_1rw_freepdk45.py +++ /dev/null @@ -1,7 +0,0 @@ -word_size = 8 -num_words = 1024 - -tech_name = "freepdk45" -process_corners = ["TT"] -supply_voltages = [1.0] -temperatures = [25] diff --git a/lib/freepdk45/configs/sram_8b_256_1rw_freepdk45.py b/lib/freepdk45/configs/sram_8b_256_1rw_freepdk45.py deleted file mode 100644 index 84daa8f0..00000000 --- a/lib/freepdk45/configs/sram_8b_256_1rw_freepdk45.py +++ /dev/null @@ -1,7 +0,0 @@ -word_size = 8 -num_words = 256 - -tech_name = "freepdk45" -process_corners = ["TT"] -supply_voltages = [1.0] -temperatures = [25] diff --git a/lib/freepdk45/configs/sram_8b_512_1rw_freepdk45.py b/lib/freepdk45/configs/sram_8b_512_1rw_freepdk45.py deleted file mode 100644 index ab85aaa4..00000000 --- a/lib/freepdk45/configs/sram_8b_512_1rw_freepdk45.py +++ /dev/null @@ -1,7 +0,0 @@ -word_size = 8 -num_words = 512 - -tech_name = "freepdk45" -process_corners = ["TT"] -supply_voltages = [1.0] -temperatures = [25] diff --git a/lib/scn4m_subm/Makefile b/lib/scn4m_subm/Makefile deleted file mode 100644 index f833836a..00000000 --- a/lib/scn4m_subm/Makefile +++ /dev/null @@ -1,33 +0,0 @@ -CUR_DIR = $(shell pwd) -TEST_DIR = ${CUR_DIR}/tests - -#MAKEFLAGS += -j 2 - -CONFIG_DIR = configs -OUT_DIRS = sp lib lef gds verilog -$(shell mkdir -p $(OUT_DIRS)) - -SRCS=$(wildcard $(CONFIG_DIR)/*.py) -SPICES=$(SRCS:.py=.sp) -all : $(SPICES) - -OPTS = -# Characterize and perform DRC/LVS -#OPTS = -c -# Do not characterize or perform DRC/LVS -#OPTS += -n -# Verbosity -OPTS += -v -%.sp : %.py - $(eval bname=$(basename $(notdir $<))) - openram.py $(OPTS) $< 2>&1 > $(bname).log - mv $(bname).lef lef - mv $(bname).v verilog - mv $(bname).sp sp - mv $(bname).gds gds - mv $(bname)*.lib lib - -clean: - rm -f *.log configs/*.pyc *~ *.gds *.lib *.sp *.v *.lef - rm -f gds/* lef/* lib/* sp/* verilog/* - diff --git a/lib/scn4m_subm/configs/sram_128b_1024_1rw_scn4m_subm.py b/lib/scn4m_subm/configs/sram_128b_1024_1rw_scn4m_subm.py deleted file mode 100644 index aad83344..00000000 --- a/lib/scn4m_subm/configs/sram_128b_1024_1rw_scn4m_subm.py +++ /dev/null @@ -1,12 +0,0 @@ -word_size = 128 -num_words = 1024 - -tech_name = "scn4m_subm" -process_corners = ["TT"] -supply_voltages = [ 5.0 ] -temperatures = [ 25 ] - -drc_name = "magic" -lvs_name = "netgen" -pex_name = "magic" - diff --git a/lib/scn4m_subm/configs/sram_32b_1024_1rw_scn4m_subm.py b/lib/scn4m_subm/configs/sram_32b_1024_1rw_scn4m_subm.py deleted file mode 100644 index 162eaa6e..00000000 --- a/lib/scn4m_subm/configs/sram_32b_1024_1rw_scn4m_subm.py +++ /dev/null @@ -1,12 +0,0 @@ -word_size = 32 -num_words = 1024 - -tech_name = "scn4m_subm" -process_corners = ["TT"] -supply_voltages = [ 5.0 ] -temperatures = [ 25 ] - -drc_name = "magic" -lvs_name = "netgen" -pex_name = "magic" - diff --git a/lib/scn4m_subm/configs/sram_32b_2048_1rw_scn4m_subm.py b/lib/scn4m_subm/configs/sram_32b_2048_1rw_scn4m_subm.py deleted file mode 100644 index 7ce98b2e..00000000 --- a/lib/scn4m_subm/configs/sram_32b_2048_1rw_scn4m_subm.py +++ /dev/null @@ -1,12 +0,0 @@ -word_size = 32 -num_words = 2048 - -tech_name = "scn4m_subm" -process_corners = ["TT"] -supply_voltages = [ 5.0 ] -temperatures = [ 25 ] - -drc_name = "magic" -lvs_name = "netgen" -pex_name = "magic" - diff --git a/lib/scn4m_subm/configs/sram_32b_256_1rw_scn4m_subm.py b/lib/scn4m_subm/configs/sram_32b_256_1rw_scn4m_subm.py deleted file mode 100644 index 33547f16..00000000 --- a/lib/scn4m_subm/configs/sram_32b_256_1rw_scn4m_subm.py +++ /dev/null @@ -1,12 +0,0 @@ -word_size = 32 -num_words = 256 - -tech_name = "scn4m_subm" -process_corners = ["TT"] -supply_voltages = [ 5.0 ] -temperatures = [ 25 ] - -drc_name = "magic" -lvs_name = "netgen" -pex_name = "magic" - diff --git a/lib/scn4m_subm/configs/sram_32b_512_1rw_scn4m_subm.py b/lib/scn4m_subm/configs/sram_32b_512_1rw_scn4m_subm.py deleted file mode 100644 index 88d5fc96..00000000 --- a/lib/scn4m_subm/configs/sram_32b_512_1rw_scn4m_subm.py +++ /dev/null @@ -1,12 +0,0 @@ -word_size = 32 -num_words = 512 - -tech_name = "scn4m_subm" -process_corners = ["TT"] -supply_voltages = [ 5.0 ] -temperatures = [ 25 ] - -drc_name = "magic" -lvs_name = "netgen" -pex_name = "magic" - diff --git a/lib/scn4m_subm/configs/sram_64b_1024_1rw_scn4m_subm.py b/lib/scn4m_subm/configs/sram_64b_1024_1rw_scn4m_subm.py deleted file mode 100644 index c9d7d116..00000000 --- a/lib/scn4m_subm/configs/sram_64b_1024_1rw_scn4m_subm.py +++ /dev/null @@ -1,12 +0,0 @@ -word_size = 64 -num_words = 1024 - -tech_name = "scn4m_subm" -process_corners = ["TT"] -supply_voltages = [ 5.0 ] -temperatures = [ 25 ] - -drc_name = "magic" -lvs_name = "netgen" -pex_name = "magic" - diff --git a/lib/scn4m_subm/configs/sram_8b_1024_1rw_scn4m_subm.py b/lib/scn4m_subm/configs/sram_8b_1024_1rw_scn4m_subm.py deleted file mode 100644 index 3770c138..00000000 --- a/lib/scn4m_subm/configs/sram_8b_1024_1rw_scn4m_subm.py +++ /dev/null @@ -1,12 +0,0 @@ -word_size = 8 -num_words = 1024 - -tech_name = "scn4m_subm" -process_corners = ["TT"] -supply_voltages = [ 5.0 ] -temperatures = [ 25 ] - -drc_name = "magic" -lvs_name = "netgen" -pex_name = "magic" - diff --git a/lib/scn4m_subm/configs/sram_8b_256_1rw_scn4m_subm.py b/lib/scn4m_subm/configs/sram_8b_256_1rw_scn4m_subm.py deleted file mode 100644 index 37e4bf50..00000000 --- a/lib/scn4m_subm/configs/sram_8b_256_1rw_scn4m_subm.py +++ /dev/null @@ -1,12 +0,0 @@ -word_size = 8 -num_words = 256 - -tech_name = "scn4m_subm" -process_corners = ["TT"] -supply_voltages = [ 5.0 ] -temperatures = [ 25 ] - -drc_name = "magic" -lvs_name = "netgen" -pex_name = "magic" - diff --git a/lib/scn4m_subm/configs/sram_8b_512_1rw_scn4m_subm.py b/lib/scn4m_subm/configs/sram_8b_512_1rw_scn4m_subm.py deleted file mode 100644 index 5f4676d7..00000000 --- a/lib/scn4m_subm/configs/sram_8b_512_1rw_scn4m_subm.py +++ /dev/null @@ -1,12 +0,0 @@ -word_size = 8 -num_words = 512 - -tech_name = "scn4m_subm" -process_corners = ["TT"] -supply_voltages = [ 5.0 ] -temperatures = [ 25 ] - -drc_name = "magic" -lvs_name = "netgen" -pex_name = "magic" - From 0556b864245e15d799bf7d7227138d1db2c34381 Mon Sep 17 00:00:00 2001 From: Jesse Cirimelli-Low Date: Wed, 16 Jan 2019 14:52:01 -0800 Subject: [PATCH 09/13] html datasheet no longer dependeds on sram --- compiler/characterizer/lib.py | 4 ++-- compiler/datasheet/datasheet_gen.py | 12 +++++++----- compiler/sram.py | 2 +- 3 files changed, 10 insertions(+), 8 deletions(-) diff --git a/compiler/characterizer/lib.py b/compiler/characterizer/lib.py index 7b10eb8f..156481ce 100644 --- a/compiler/characterizer/lib.py +++ b/compiler/characterizer/lib.py @@ -530,7 +530,7 @@ class lib: "sram_{0}_{1}_{2}".format(OPTS.word_size, OPTS.num_words, OPTS.tech_name), OPTS.num_words, OPTS.num_banks, - OPTS.num_rw_ports, + OPTS.num_rw_ports, OPTS.num_w_ports, OPTS.num_r_ports, OPTS.tech_name, @@ -555,7 +555,7 @@ class lib: LVS = str(total_lvs_errors) datasheet.write("{0},{1},".format(DRC, LVS)) - + datasheet.write(str(self.sram.width * self.sram.height)+',') for port in self.all_ports: #DIN timings if port in self.write_ports: diff --git a/compiler/datasheet/datasheet_gen.py b/compiler/datasheet/datasheet_gen.py index 93ab783c..902c058c 100644 --- a/compiler/datasheet/datasheet_gen.py +++ b/compiler/datasheet/datasheet_gen.py @@ -4,7 +4,7 @@ This is a script to load data from the characterization and layout processes int a web friendly html datasheet. """ # TODO: -# include log file +# include power # Diagram generation # Improve css @@ -31,7 +31,7 @@ def process_name(corner): return "custom" -def parse_characterizer_csv(sram, f, pages): +def parse_characterizer_csv(f, pages): """ Parses output data of the Liberty file generator in order to construct the timing and current table @@ -98,6 +98,8 @@ def parse_characterizer_csv(sram, f, pages): LVS = row[col] col += 1 + AREA = row[col] + col += 1 for sheet in pages: if sheet.name == NAME: @@ -529,11 +531,11 @@ def parse_characterizer_csv(sram, f, pages): new_sheet.io_table.add_row(['NUM_RW_PORTS', NUM_RW_PORTS]) new_sheet.io_table.add_row(['NUM_R_PORTS', NUM_R_PORTS]) new_sheet.io_table.add_row(['NUM_W_PORTS', NUM_W_PORTS]) - new_sheet.io_table.add_row(['Area', sram.width * sram.height]) + new_sheet.io_table.add_row(['Area', AREA]) class datasheet_gen(): - def datasheet_write(sram, name): + def datasheet_write(name): in_dir = OPTS.openram_temp @@ -541,7 +543,7 @@ class datasheet_gen(): os.mkdir(in_dir) datasheets = [] - parse_characterizer_csv(sram, in_dir + "/datasheet.info", datasheets) + parse_characterizer_csv(in_dir + "/datasheet.info", datasheets) for sheets in datasheets: with open(name, 'w+') as f: diff --git a/compiler/sram.py b/compiler/sram.py index a929434e..5ff28d47 100644 --- a/compiler/sram.py +++ b/compiler/sram.py @@ -122,7 +122,7 @@ class sram(): from datasheet_gen import datasheet_gen dname = OPTS.output_path + self.s.name + ".html" debug.print_raw("Datasheet: Writing to {0}".format(dname)) - datasheet_gen.datasheet_write(self.s,dname) + datasheet_gen.datasheet_write(dname) print_time("Datasheet", datetime.datetime.now(), start_time) # Write a verilog model From 41b8e8665b168ec93b3b2f746cb8806834745ea5 Mon Sep 17 00:00:00 2001 From: Jesse Cirimelli-Low Date: Wed, 16 Jan 2019 15:43:08 -0800 Subject: [PATCH 10/13] updated datasheet descriptors --- compiler/datasheet/datasheet_gen.py | 18 ++++++++++++------ 1 file changed, 12 insertions(+), 6 deletions(-) diff --git a/compiler/datasheet/datasheet_gen.py b/compiler/datasheet/datasheet_gen.py index 902c058c..6786c7bf 100644 --- a/compiler/datasheet/datasheet_gen.py +++ b/compiler/datasheet/datasheet_gen.py @@ -135,7 +135,7 @@ def parse_characterizer_csv(f, pages): pass while(True): - + col_start = col if(row[col].startswith('DIN')): start = col for item in sheet.timing_table.rows: @@ -332,11 +332,13 @@ def parse_characterizer_csv(f, pages): col += 1 else: + for element in row[col_start: col - 1]: + sheet.description.append(str(element)) break - datasheet.new_sheet.corners_table.add_row([PROC, process_name( + new_sheet.corners_table.add_row([PROC, process_name( PROC), VOLT, TEMP, LIB_NAME.replace(OUT_DIR, '').replace(NAME, '')]) - datasheet.new_sheet.dlv_table.add_row( + new_sheet.dlv_table.add_row( ['.lib', 'Synthesis models', '{1}'.format(LIB_NAME, LIB_NAME.replace(OUT_DIR, ''))]) if found == 0: @@ -349,15 +351,16 @@ def parse_characterizer_csv(f, pages): new_sheet.time = DATETIME new_sheet.DRC = DRC new_sheet.LVS = LVS - new_sheet.description = [NAME, NUM_WORDS, NUM_BANKS, NUM_RW_PORTS, - NUM_W_PORTS, NUM_R_PORTS, TECH_NAME, WORD_SIZE, ORIGIN_ID, DATETIME] + new_sheet.description = [NAME, NUM_WORDS, NUM_BANKS, NUM_RW_PORTS, NUM_W_PORTS, + NUM_R_PORTS, TECH_NAME, MIN_PERIOD, WORD_SIZE, ORIGIN_ID, DATETIME] new_sheet.corners_table = table_gen.table_gen("corners") new_sheet.corners_table.add_row( ['Corner Name', 'Process', 'Power Supply', 'Temperature', 'Library Name Suffix']) new_sheet.corners_table.add_row([PROC, process_name( PROC), VOLT, TEMP, LIB_NAME.replace(OUT_DIR, '').replace(NAME, '')]) - new_sheet.operating_table = table_gen.table_gen("operating_table") + new_sheet.operating_table = table_gen.table_gen( + "operating_table") new_sheet.operating_table.add_row( ['Parameter', 'Min', 'Typ', 'Max', 'Units']) new_sheet.operating_table.add_row( @@ -376,6 +379,7 @@ def parse_characterizer_csv(f, pages): new_sheet.timing_table.add_row( ['Parameter', 'Min', 'Max', 'Units']) while(True): + col_start = col if(row[col].startswith('DIN')): start = col @@ -497,6 +501,8 @@ def parse_characterizer_csv(f, pages): col += 1 else: + for element in row[col_start:col-1]: + sheet.description.append(str(element)) break new_sheet.dlv_table = table_gen.table_gen("dlv") From 25b0da404f6408ec435c9bdd86a348c2f04793bd Mon Sep 17 00:00:00 2001 From: Jesse Cirimelli-Low Date: Wed, 16 Jan 2019 16:08:41 -0800 Subject: [PATCH 11/13] removed EOL error in comment --- compiler/datasheet/datasheet.py | 1 - 1 file changed, 1 deletion(-) diff --git a/compiler/datasheet/datasheet.py b/compiler/datasheet/datasheet.py index 22a50cb9..d15733d5 100644 --- a/compiler/datasheet/datasheet.py +++ b/compiler/datasheet/datasheet.py @@ -27,7 +27,6 @@ class datasheet(): self.html += row # for item in self.description: # self.html += item + ',' - self.html += 'EOL' self.html += '-->' vlsi_logo = 0 From 9c8090d94bf0878125b656314d3a3fd6b836589e Mon Sep 17 00:00:00 2001 From: Jesse Cirimelli-Low Date: Wed, 16 Jan 2019 19:56:23 -0800 Subject: [PATCH 12/13] added debug.info to logging --- compiler/debug.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/compiler/debug.py b/compiler/debug.py index ea5eb45c..6a595569 100644 --- a/compiler/debug.py +++ b/compiler/debug.py @@ -70,3 +70,6 @@ def info(lev, str): else: class_name=mod.__name__ print("[{0}/{1}]: {2}".format(class_name,frm[0].f_code.co_name,str)) + log("[{0}/{1}]: {2}".format(class_name,frm[0].f_code.co_name,str)) + + From c20fb2a70e24f13e6d17d23905cc4d76fcaa149e Mon Sep 17 00:00:00 2001 From: Yusu Wang Date: Thu, 17 Jan 2019 12:01:08 -0800 Subject: [PATCH 13/13] replace matrix to array --- compiler/gdsMill/gdsMill/vlsiLayout.py | 44 ++++++++++++-------------- 1 file changed, 20 insertions(+), 24 deletions(-) diff --git a/compiler/gdsMill/gdsMill/vlsiLayout.py b/compiler/gdsMill/gdsMill/vlsiLayout.py index 42921812..8d4816f6 100644 --- a/compiler/gdsMill/gdsMill/vlsiLayout.py +++ b/compiler/gdsMill/gdsMill/vlsiLayout.py @@ -1,7 +1,8 @@ from .gdsPrimitives import * from datetime import * #from mpmath import matrix -from numpy import matrix +#from numpy import matrix +import numpy as np #import gdsPrimitives import debug @@ -170,21 +171,20 @@ class VlsiLayout: else: # MRG: Added negative to make CCW rotate 8/29/18 angle = math.radians(float(rotateAngle)) - mRotate = matrix([[math.cos(angle),-math.sin(angle),0.0], + mRotate = np.array([[math.cos(angle),-math.sin(angle),0.0], [math.sin(angle),math.cos(angle),0.0], [0.0,0.0,1.0]]) #set up the translation matrix translateX = float(coordinates[0]) translateY = float(coordinates[1]) - mTranslate = matrix([[1.0,0.0,translateX],[0.0,1.0,translateY],[0.0,0.0,1.0]]) + mTranslate = np.array([[1.0,0.0,translateX],[0.0,1.0,translateY],[0.0,0.0,1.0]]) #set up the scale matrix (handles mirror X) scaleX = 1.0 if(transFlags[0]): scaleY = -1.0 else: scaleY = 1.0 - mScale = matrix([[scaleX,0.0,0.0],[0.0,scaleY,0.0],[0.0,0.0,1.0]]) - + mScale = np.array([[scaleX,0.0,0.0],[0.0,scaleY,0.0],[0.0,0.0,1.0]]) #we need to keep track of all transforms in the hierarchy #when we add an element to the xy tree, we apply all transforms from the bottom up transformPath.append((mRotate,mScale,mTranslate)) @@ -219,27 +219,26 @@ class VlsiLayout: def populateCoordinateMap(self): def addToXyTree(startingStructureName = None,transformPath = None): - #print("populateCoordinateMap") - uVector = matrix([1.0,0.0,0.0]).transpose() #start with normal basis vectors - vVector = matrix([0.0,1.0,0.0]).transpose() - origin = matrix([0.0,0.0,1.0]).transpose() #and an origin (Z component is 1.0 to indicate position instead of vector) + uVector = np.array([[1.0],[0.0],[0.0]]) #start with normal basis vectors + vVector = np.array([[0.0],[1.0],[0.0]]) + origin = np.array([[0.0],[0.0],[1.0]]) #and an origin (Z component is 1.0 to indicate position instead of vector) #make a copy of all the transforms and reverse it reverseTransformPath = transformPath[:] if len(reverseTransformPath) > 1: - reverseTransformPath.reverse() + reverseTransformPath.reverse() #now go through each transform and apply them to our basis and origin in succession for transform in reverseTransformPath: - origin = transform[0] * origin #rotate - uVector = transform[0] * uVector #rotate - vVector = transform[0] * vVector #rotate - origin = transform[1] * origin #scale - uVector = transform[1] * uVector #scale - vVector = transform[1] * vVector #scale - origin = transform[2] * origin #translate + origin = np.dot(transform[0], origin) #rotate + uVector = np.dot(transform[0], uVector) #rotate + vVector = np.dot(transform[0], vVector) #rotate + origin = np.dot(transform[1], origin) #scale + uVector = np.dot(transform[1], uVector) #scale + vVector = np.dot(transform[1], vVector) #scale + origin = np.dot(transform[2], origin) #translate #we don't need to do a translation on the basis vectors #uVector = transform[2] * uVector #translate #vVector = transform[2] * vVector #translate - #populate the xyTree with each structureName and coordinate space + #populate the xyTree with each structureName and coordinate space self.xyTree.append((startingStructureName,origin,uVector,vVector)) self.traverseTheHierarchy(delegateFunction = addToXyTree) @@ -522,8 +521,7 @@ class VlsiLayout: return True - def fillAreaDensity(self, layerToFill = 0, offsetInMicrons = (0,0), coverageWidth = 100.0, coverageHeight = 100.0, - minSpacing = 0.22, blockSize = 1.0): + def fillAreaDensity(self, layerToFill = 0, offsetInMicrons = (0,0), coverageWidth = 100.0, coverageHeight = 100.0, minSpacing = 0.22, blockSize = 1.0): effectiveBlock = blockSize+minSpacing widthInBlocks = int(coverageWidth/effectiveBlock) heightInBlocks = int(coverageHeight/effectiveBlock) @@ -810,8 +808,8 @@ class VlsiLayout: # This is fixed to be: # |u[0] v[0]| |x| |x'| # |u[1] v[1]|x|y|=|y'| - x=coordinate[0]*uVector[0].item()+coordinate[1]*vVector[0].item() - y=coordinate[0]*uVector[1].item()+coordinate[1]*vVector[1].item() + x=coordinate[0]*uVector[0][0]+coordinate[1]*vVector[0][0] + y=coordinate[0]*uVector[1][0]+coordinate[1]*vVector[1][0] transformCoordinate=[x,y] return transformCoordinate @@ -836,5 +834,3 @@ def boundaryArea(A): area_A=(A[2]-A[0])*(A[3]-A[1]) return area_A - -