diff --git a/compiler/globals.py b/compiler/globals.py index af89eaa4..f19559e2 100644 --- a/compiler/globals.py +++ b/compiler/globals.py @@ -287,7 +287,7 @@ def setup_paths(): # Add all of the subdirs to the python path # These subdirs are modules and don't need to be added: characterizer, verify - for subdir in ["gdsMill", "tests", "modules", "base", "pgates"]: + for subdir in ["gdsMill", "tests", "modules", "base", "pgates", "datasheet"]: full_path = "{0}/{1}".format(OPENRAM_HOME,subdir) debug.check(os.path.isdir(full_path), "$OPENRAM_HOME/{0} does not exist: {1}".format(subdir,full_path)) diff --git a/compiler/openram.py b/compiler/openram.py index c84817a3..a588f806 100755 --- a/compiler/openram.py +++ b/compiler/openram.py @@ -27,7 +27,6 @@ if len(args) != 1: # These depend on arguments, so don't load them until now. import debug - init_openram(config_file=args[0], is_unit_test=False) # Only print banner here so it's not in unit tests @@ -40,7 +39,7 @@ report_status() import verify from sram import sram from sram_config import sram_config -import parser +#from parser import * output_extensions = ["sp","v","lib"] if not OPTS.netlist_only: output_extensions.extend(["gds","lef"]) @@ -65,7 +64,9 @@ s.save() # generate datasheet from characterization of created SRAM if not OPTS.analytical_delay: - p = parser.parse(OPTS.openram_temp,os.environ.get('OPENRAM_HOME')+"/datasheets") + import datasheet_gen + p = datasheet_gen.parse(OPTS.openram_temp,os.environ.get('OPENRAM_HOME')+"/datasheet/datasheets") + # Delete temp files etc. end_openram() diff --git a/compiler/parser.py b/compiler/parser.py deleted file mode 100644 index 4d514014..00000000 --- a/compiler/parser.py +++ /dev/null @@ -1,224 +0,0 @@ -#!/usr/bin/env python3 -""" -Datasheet Generator - -TODO: -locate all port elements in .lib -Locate all timing elements in .lib -Diagram generation -Improve css -""" - -import os, math -import optparse -from flask_table import * -import csv -import contextlib -from globals import OPTS - -class deliverables(Table): - typ = Col('Type') - description = Col('Description') - link = Col('Link') - - - -class deliverables_item(object): - def __init__(self, typ, description,link): - self.typ = typ - self.description = description - self.link = link - -class operating_conditions(Table): - parameter = Col('Parameter') - min = Col('Min') - typ = Col('Typ') - max = Col('Max') - units = Col('Units') - -class operating_conditions_item(object): - def __init__(self, parameter, min, typ, max, units): - self.parameter = parameter - self.min = min - self.typ = typ - self.max = max - self.units = units - -class timing_and_current_data(Table): - parameter = Col('Parameter') - min = Col('Min') - max = Col('Max') - units = Col('Units') - -class timing_and_current_data_item(object): - def __init__(self, parameter, min, max, units): - self.parameter = parameter - self.min = min - self.max = max - self.units = units - -class characterization_corners(Table): - corner_name = Col('Corner Name') - process = Col('Process') - power_supply = Col('Power Supply') - temperature = Col('Temperature') - library_name_suffix = Col('Library Name Suffix') - -class characterization_corners_item(object): - def __init__(self, corner_name, process, power_supply, temperature, library_name_suffix): - self.corner_name = corner_name - self.process = process - self.power_supply = power_supply - self.temperature = temperature - self.library_name_suffix = library_name_suffix - -def process_name(corner): - if corner == "TT": - return "Typical - Typical" - if corner == "SS": - return "Slow - Slow" - if corner == "FF": - return "Fast - Fast" - else: - return "custom" - -def parse_file(f,pages): - with open(f) as csv_file: - csv_reader = csv.reader(csv_file, delimiter=',') - line_count = 0 - for row in csv_reader: - found = 0 - NAME = row[0] - NUM_WORDS = row[1] - NUM_BANKS = row[2] - NUM_RW_PORTS = row[3] - NUM_W_PORTS = row[4] - NUM_R_PORTS = row[5] - TECH_NAME = row[6] - TEMP = row[7] - VOLT = row[8] - PROC = row[9] - MIN_PERIOD = row[10] - OUT_DIR = row[11] - LIB_NAME = row[12] - for sheet in pages: - - - if sheet.name == row[0]: - found = 1 - #if the .lib information is for an existing datasheet compare timing data - - for item in sheet.operating: - - if item.parameter == 'Operating Temperature': - if float(TEMP) > float(item.max): - item.typ = item.max - item.max = TEMP - if float(TEMP) < float(item.min): - item.typ = item.min - item.min = TEMP - - if item.parameter == 'Power supply (VDD) range': - if float(VOLT) > float(item.max): - item.typ = item.max - item.max = VOLT - if float(VOLT) < float(item.min): - item.typ = item.min - item.min = VOLT - - if item.parameter == 'Operating Frequncy (F)': - if float(math.floor(1000/float(MIN_PERIOD)) < float(item.max)): - item.max = str(math.floor(1000/float(MIN_PERIOD))) - - - - new_sheet.corners.append(characterization_corners_item(PROC,process_name(PROC),VOLT,TEMP,LIB_NAME.replace(OUT_DIR,'').replace(NAME,''))) - new_sheet.dlv.append(deliverables_item('.lib','Synthesis models','{1}'.format(LIB_NAME,LIB_NAME.replace(OUT_DIR,'')))) - - if found == 0: - new_sheet = datasheet(NAME) - pages.append(new_sheet) - - new_sheet.corners.append(characterization_corners_item(PROC,process_name(PROC),VOLT,TEMP,LIB_NAME.replace(OUT_DIR,'').replace(NAME,''))) - - new_sheet.operating.append(operating_conditions_item('Power supply (VDD) range',VOLT,VOLT,VOLT,'Volts')) - new_sheet.operating.append(operating_conditions_item('Operating Temperature',TEMP,TEMP,TEMP,'Celsius')) - new_sheet.operating.append(operating_conditions_item('Operating Frequency (F)','','',str(math.floor(1000/float(MIN_PERIOD))),'MHz')) - - new_sheet.timing.append(timing_and_current_data_item('1','2','3','4')) - - new_sheet.dlv.append(deliverables_item('.sp','SPICE netlists','{1}.{2}'.format(OUT_DIR,NAME,'sp'))) - new_sheet.dlv.append(deliverables_item('.v','Verilog simulation models','{1}.{2}'.format(OUT_DIR,NAME,'v'))) - new_sheet.dlv.append(deliverables_item('.gds','GDSII layout views','{1}.{2}'.format(OUT_DIR,NAME,'gds'))) - new_sheet.dlv.append(deliverables_item('.lef','LEF files','{1}.{2}'.format(OUT_DIR,NAME,'lef'))) - new_sheet.dlv.append(deliverables_item('.lib','Synthesis models','{1}'.format(LIB_NAME,LIB_NAME.replace(OUT_DIR,'')))) - - - -class datasheet(): - - def __init__(self,identifier): - self.corners = [] - self.timing = [] - self.operating = [] - self.dlv = [] - self.name = identifier - self.html = "" - - def generate_html(self): - self.html += """""" - self.html +='

{0}

' - self.html +='

{0}

' - self.html +='

{0}

' - self.html +='

Operating Conditions

' - self.html += operating_conditions(self.operating,table_id='data').__html__() - self.html += '

Timing and Current Data

' - self.html += timing_and_current_data(self.timing,table_id='data').__html__() - self.html += '

Characterization Corners

' - self.html += characterization_corners(self.corners,table_id='data').__html__() - self.html +='

Deliverables

' - self.html += deliverables(self.dlv,table_id='data').__html__().replace('<','<').replace('"','"').replace('>',">") - - -class parse(): - def __init__(self,in_dir,out_dir): - - if not (os.path.isdir(in_dir)): - os.mkdir(in_dir) - - if not (os.path.isdir(out_dir)): - os.mkdir(out_dir) - - datasheets = [] - parse_file(in_dir + "/datasheet.info", datasheets) - - - for sheets in datasheets: -# print (out_dir + sheets.name + ".html") - with open(out_dir + "/" + sheets.name + ".html", 'w+') as f: - sheets.generate_html() - f.write(sheets.html) diff --git a/compiler/tests/30_openram_test.py b/compiler/tests/30_openram_test.py index 7be820e0..d53182fc 100755 --- a/compiler/tests/30_openram_test.py +++ b/compiler/tests/30_openram_test.py @@ -63,9 +63,9 @@ class openram_test(openram_test): files = glob.glob('{0}/*.lib'.format(out_path)) self.assertTrue(len(files)>0) - # Make sure there is any .html file if characterizer was ran - if not OPTS.analytical_delay: - datasheets = glob.glob('{0}/{1}/*html'.format(OPENRAM_HOME,'datasheets')) + # Make sure there is any .html file + if os.path.exists(os.environ.get('OPENRAM_HOME')+"/datasheet/datasheets"): + datasheets = glob.glob('{0}/{1}/*html'.format(OPENRAM_HOME,'datasheet/datasheets')) self.assertTrue(len(datasheets)>0) # grep any errors from the output