diff --git a/compiler/datasheet/datasheet.py b/compiler/datasheet/datasheet.py
index ce84c22c..566dfc80 100644
--- a/compiler/datasheet/datasheet.py
+++ b/compiler/datasheet/datasheet.py
@@ -4,59 +4,60 @@ import csv
import base64
from globals import OPTS
+
class datasheet():
"""
Defines the layout,but not the data, of the html datasheet
"""
- def __init__(self,identifier):
+
+ def __init__(self, identifier):
self.name = identifier
self.html = ""
-
def generate_html(self):
"""
Generates html tables using flask-table
"""
with open(os.path.abspath(os.environ.get("OPENRAM_HOME")) + '/datasheet/assets/datasheet.css', 'r') as datasheet_css:
- #css styling is kept in a seperate file
+ # css styling is kept in a seperate file
self.html += datasheet_css.read()
-
-# with open(OPTS.openram_temp + "/datasheet.info") as info:
+
+# with open(OPTS.openram_temp + "/datasheet.info") as info:
self.html += ''
-
+ self.html += '-->'
+
vlsi_logo = 0
- with open(os.path.abspath(os.environ.get("OPENRAM_HOME")) + '/datasheet/assets/vlsi_logo.png' , "rb") as image_file:
+ with open(os.path.abspath(os.environ.get("OPENRAM_HOME")) + '/datasheet/assets/vlsi_logo.png', "rb") as image_file:
vlsi_logo = base64.b64encode(image_file.read())
openram_logo = 0
- with open(os.path.abspath(os.environ.get("OPENRAM_HOME")) + '/datasheet/assets/openram_logo_placeholder.png' , "rb") as image_file:
+ with open(os.path.abspath(os.environ.get("OPENRAM_HOME")) + '/datasheet/assets/openram_logo_placeholder.png', "rb") as image_file:
openram_logo = base64.b64encode(image_file.read())
+ self.html += ''.format(str(vlsi_logo)[
+ 2:-1])
- self.html += '
'.format(str(vlsi_logo)[2:-1])
-
+ self.html += '
' + \ + self.name + '.html' + '
' + self.html += 'Compiled at: ' + self.time + '
' + self.html += '' + \ + 'DRC errors: ' + str(self.DRC) + '
' + self.html += '' + \ + 'LVS errors: ' + str(self.LVS) + '
' + self.html += '' + \ + 'Git commit id: ' + str(self.git_id) + '
' - - - - self.html +=''+ self.name + '.html' + '
' - self.html +='Compiled at: '+ self.time + '
' - self.html +=''+ 'DRC errors: ' + str(self.DRC) + '
' - self.html +=''+ 'LVS errors: ' + str(self.LVS) + '
' - self.html += ''+ 'Git commit id: ' + str(self.git_id) + '
' - - self.html +='Ports and Configuration
' + self.html += 'Ports and Configuration
' # self.html += in_out(self.io,table_id='data').__html__().replace('<','<').replace('"','"').replace('>',">") self.html += self.io_table.to_html() - - self.html +='Operating Conditions
' + + self.html += 'Operating Conditions
' # self.html += operating_conditions(self.operating,table_id='data').__html__() self.html += self.operating_table.to_html() @@ -68,9 +69,6 @@ class datasheet(): # self.html += characterization_corners(self.corners,table_id='data').__html__() self.html += self.corners_table.to_html() - self.html +='Deliverables
' + self.html += 'Deliverables
' # self.html += deliverables(self.dlv,table_id='data').__html__().replace('<','<').replace('"','"').replace('>',">") self.html += self.dlv_table.to_html() - - - diff --git a/compiler/datasheet/datasheet_gen.py b/compiler/datasheet/datasheet_gen.py index 9fcf921f..93ab783c 100644 --- a/compiler/datasheet/datasheet_gen.py +++ b/compiler/datasheet/datasheet_gen.py @@ -1,21 +1,21 @@ #!/usr/bin/env python3 """ -This is a script to load data from the characterization and layout processes into +This is a script to load data from the characterization and layout processes into a web friendly html datasheet. """ -#TODO: -#include log file -#Diagram generation -#Improve css +# TODO: +# include log file +# Diagram generation +# Improve css -import debug from globals import OPTS -import os, math -import optparse +import os +import math import csv -from datasheet import * -from table_gen import * +import datasheet +import table_gen + def process_name(corner): """ @@ -30,20 +30,20 @@ def process_name(corner): else: return "custom" -def parse_characterizer_csv(sram,f,pages): + +def parse_characterizer_csv(sram, f, pages): """ Parses output data of the Liberty file generator in order to construct the timing and current table """ with open(f) as csv_file: csv_reader = csv.reader(csv_file, delimiter=',') - line_count = 0 for row in csv_reader: found = 0 col = 0 - #defines layout of csv file + # defines layout of csv file NAME = row[col] col += 1 @@ -85,29 +85,28 @@ def parse_characterizer_csv(sram,f,pages): WORD_SIZE = row[col] col += 1 - + ORIGIN_ID = row[col] col += 1 DATETIME = row[col] - col+= 1 + col += 1 DRC = row[col] col += 1 LVS = row[col] col += 1 - - for sheet in pages: + for sheet in pages: if sheet.name == NAME: found = 1 - #if the .lib information is for an existing datasheet compare timing data + # if the .lib information is for an existing datasheet compare timing data for item in sheet.operating_table.rows: - #check if the new corner data is worse than the previous worse corner data + # check if the new corner data is worse than the previous worse corner data if item[0] == 'Operating Temperature': if float(TEMP) > float(item[3]): @@ -128,14 +127,13 @@ def parse_characterizer_csv(sram,f,pages): if item[0] == 'Operating Frequncy (F)': try: if float(math.floor(1000/float(MIN_PERIOD)) < float(item[3])): - item[3] = str(math.floor(1000/float(MIN_PERIOD))) + item[3] = str(math.floor( + 1000/float(MIN_PERIOD))) except Exception: pass - while(True): - if(row[col].startswith('DIN')): start = col for item in sheet.timing_table.rows: @@ -253,7 +251,6 @@ def parse_characterizer_csv(sram,f,pages): col += 1 - elif(row[col].startswith('WEb')): start = col for item in sheet.timing_table.rows: @@ -293,7 +290,6 @@ def parse_characterizer_csv(sram,f,pages): col += 1 - elif(row[col].startswith('ADDR')): start = col for item in sheet.timing_table.rows: @@ -333,198 +329,220 @@ def parse_characterizer_csv(sram,f,pages): col += 1 - - else: break - - new_sheet.corners_table.add_row([PROC,process_name(PROC),VOLT,TEMP,LIB_NAME.replace(OUT_DIR,'').replace(NAME,'')]) - new_sheet.dlv_table.add_row(['.lib','Synthesis models','{1}'.format(LIB_NAME,LIB_NAME.replace(OUT_DIR,''))]) + datasheet.new_sheet.corners_table.add_row([PROC, process_name( + PROC), VOLT, TEMP, LIB_NAME.replace(OUT_DIR, '').replace(NAME, '')]) + datasheet.new_sheet.dlv_table.add_row( + ['.lib', 'Synthesis models', '{1}'.format(LIB_NAME, LIB_NAME.replace(OUT_DIR, ''))]) if found == 0: - - #if this is the first corner for this sram, run first time configuration and set up tables - new_sheet = datasheet(NAME) + + # if this is the first corner for this sram, run first time configuration and set up tables + new_sheet = datasheet.datasheet(NAME) pages.append(new_sheet) new_sheet.git_id = ORIGIN_ID new_sheet.time = DATETIME new_sheet.DRC = DRC new_sheet.LVS = LVS - new_sheet.description = [NAME, NUM_WORDS, NUM_BANKS, NUM_RW_PORTS, NUM_W_PORTS, NUM_R_PORTS, TECH_NAME, WORD_SIZE, ORIGIN_ID, DATETIME] + new_sheet.description = [NAME, NUM_WORDS, NUM_BANKS, NUM_RW_PORTS, + NUM_W_PORTS, NUM_R_PORTS, TECH_NAME, WORD_SIZE, ORIGIN_ID, DATETIME] - new_sheet.corners_table = table_gen("corners") - new_sheet.corners_table.add_row(['Corner Name','Process','Power Supply','Temperature','Library Name Suffix']) - new_sheet.corners_table.add_row([PROC,process_name(PROC),VOLT,TEMP,LIB_NAME.replace(OUT_DIR,'').replace(NAME,'')]) - new_sheet.operating_table = table_gen("operating_table") - new_sheet.operating_table.add_row(['Parameter','Min','Typ','Max','Units']) - new_sheet.operating_table.add_row(['Power supply (VDD) range',VOLT,VOLT,VOLT,'Volts']) - new_sheet.operating_table.add_row(['Operating Temperature',TEMP,TEMP,TEMP,'Celsius']) + new_sheet.corners_table = table_gen.table_gen("corners") + new_sheet.corners_table.add_row( + ['Corner Name', 'Process', 'Power Supply', 'Temperature', 'Library Name Suffix']) + new_sheet.corners_table.add_row([PROC, process_name( + PROC), VOLT, TEMP, LIB_NAME.replace(OUT_DIR, '').replace(NAME, '')]) + new_sheet.operating_table = table_gen.table_gen("operating_table") + new_sheet.operating_table.add_row( + ['Parameter', 'Min', 'Typ', 'Max', 'Units']) + new_sheet.operating_table.add_row( + ['Power supply (VDD) range', VOLT, VOLT, VOLT, 'Volts']) + new_sheet.operating_table.add_row( + ['Operating Temperature', TEMP, TEMP, TEMP, 'Celsius']) try: - new_sheet.operating_table.add_row(['Operating Frequency (F)','','',str(math.floor(1000/float(MIN_PERIOD))),'MHz']) + new_sheet.operating_table.add_row(['Operating Frequency (F)', '', '', str( + math.floor(1000/float(MIN_PERIOD))), 'MHz']) except Exception: - new_sheet.operating_table.add_row(['Operating Frequency (F)','','',"not available in netlist only",'MHz']) #failed to provide non-zero MIN_PERIOD - new_sheet.timing_table = table_gen("timing") - new_sheet.timing_table.add_row(['Parameter','Min','Max','Units']) + # failed to provide non-zero MIN_PERIOD + new_sheet.operating_table.add_row( + ['Operating Frequency (F)', '', '', "not available in netlist only", 'MHz']) + new_sheet.timing_table = table_gen.table_gen("timing") + new_sheet.timing_table.add_row( + ['Parameter', 'Min', 'Max', 'Units']) while(True): if(row[col].startswith('DIN')): start = col - - new_sheet.timing_table.add_row(['{0} setup rising'.format(row[start]),row[col+1],row[col+2],'ns']) + + new_sheet.timing_table.add_row( + ['{0} setup rising'.format(row[start]), row[col+1], row[col+2], 'ns']) col += 2 - - new_sheet.timing_table.add_row(['{0} setup falling'.format(row[start]),row[col+1],row[col+2],'ns']) - - col += 2 - - new_sheet.timing_table.add_row(['{0} hold rising'.format(row[start]),row[col+1],row[col+2],'ns']) + new_sheet.timing_table.add_row( + ['{0} setup falling'.format(row[start]), row[col+1], row[col+2], 'ns']) col += 2 - new_sheet.timing_table.add_row(['{0} hold falling'.format(row[start]),row[col+1],row[col+2],'ns']) + new_sheet.timing_table.add_row( + ['{0} hold rising'.format(row[start]), row[col+1], row[col+2], 'ns']) col += 2 - col +=1 + new_sheet.timing_table.add_row( + ['{0} hold falling'.format(row[start]), row[col+1], row[col+2], 'ns']) + + col += 2 + + col += 1 elif(row[col].startswith('DOUT')): start = col - - new_sheet.timing_table.add_row(['{0} cell rise'.format(row[start]),row[col+1],row[col+2],'ns']) + + new_sheet.timing_table.add_row( + ['{0} cell rise'.format(row[start]), row[col+1], row[col+2], 'ns']) col += 2 - - new_sheet.timing_table.add_row(['{0} cell fall'.format(row[start]),row[col+1],row[col+2],'ns']) - - col += 2 - - new_sheet.timing_table.add_row(['{0} rise transition'.format(row[start]),row[col+1],row[col+2],'ns']) + new_sheet.timing_table.add_row( + ['{0} cell fall'.format(row[start]), row[col+1], row[col+2], 'ns']) col += 2 - new_sheet.timing_table.add_row(['{0} fall transition'.format(row[start]),row[col+1],row[col+2],'ns']) + new_sheet.timing_table.add_row( + ['{0} rise transition'.format(row[start]), row[col+1], row[col+2], 'ns']) col += 2 - col +=1 + new_sheet.timing_table.add_row( + ['{0} fall transition'.format(row[start]), row[col+1], row[col+2], 'ns']) + + col += 2 + + col += 1 elif(row[col].startswith('CSb')): start = col - - new_sheet.timing_table.add_row(['{0} setup rising'.format(row[start]),row[col+1],row[col+2],'ns']) + + new_sheet.timing_table.add_row( + ['{0} setup rising'.format(row[start]), row[col+1], row[col+2], 'ns']) col += 2 - - new_sheet.timing_table.add_row(['{0} setup falling'.format(row[start]),row[col+1],row[col+2],'ns']) - - col += 2 - - new_sheet.timing_table.add_row(['{0} hold rising'.format(row[start]),row[col+1],row[col+2],'ns']) + new_sheet.timing_table.add_row( + ['{0} setup falling'.format(row[start]), row[col+1], row[col+2], 'ns']) col += 2 - new_sheet.timing_table.add_row(['{0} hold falling'.format(row[start]),row[col+1],row[col+2],'ns']) + new_sheet.timing_table.add_row( + ['{0} hold rising'.format(row[start]), row[col+1], row[col+2], 'ns']) col += 2 - col +=1 + new_sheet.timing_table.add_row( + ['{0} hold falling'.format(row[start]), row[col+1], row[col+2], 'ns']) + + col += 2 + + col += 1 elif(row[col].startswith('WEb')): start = col - - new_sheet.timing_table.add_row(['{0} setup rising'.format(row[start]),row[col+1],row[col+2],'ns']) + + new_sheet.timing_table.add_row( + ['{0} setup rising'.format(row[start]), row[col+1], row[col+2], 'ns']) col += 2 - - new_sheet.timing_table.add_row(['{0} setup falling'.format(row[start]),row[col+1],row[col+2],'ns']) - - col += 2 - - new_sheet.timing_table.add_row(['{0} hold rising'.format(row[start]),row[col+1],row[col+2],'ns']) + new_sheet.timing_table.add_row( + ['{0} setup falling'.format(row[start]), row[col+1], row[col+2], 'ns']) col += 2 - new_sheet.timing_table.add_row(['{0} hold falling'.format(row[start]),row[col+1],row[col+2],'ns']) + new_sheet.timing_table.add_row( + ['{0} hold rising'.format(row[start]), row[col+1], row[col+2], 'ns']) col += 2 - col +=1 + new_sheet.timing_table.add_row( + ['{0} hold falling'.format(row[start]), row[col+1], row[col+2], 'ns']) + + col += 2 + + col += 1 elif(row[col].startswith('ADDR')): start = col - - new_sheet.timing_table.add_row(['{0} setup rising'.format(row[start]),row[col+1],row[col+2],'ns']) + + new_sheet.timing_table.add_row( + ['{0} setup rising'.format(row[start]), row[col+1], row[col+2], 'ns']) col += 2 - - new_sheet.timing_table.add_row(['{0} setup falling'.format(row[start]),row[col+1],row[col+2],'ns']) - - col += 2 - - new_sheet.timing_table.add_row(['{0} hold rising'.format(row[start]),row[col+1],row[col+2],'ns']) + new_sheet.timing_table.add_row( + ['{0} setup falling'.format(row[start]), row[col+1], row[col+2], 'ns']) col += 2 - new_sheet.timing_table.add_row(['{0} hold falling'.format(row[start]),row[col+1],row[col+2],'ns']) + new_sheet.timing_table.add_row( + ['{0} hold rising'.format(row[start]), row[col+1], row[col+2], 'ns']) col += 2 - col +=1 + new_sheet.timing_table.add_row( + ['{0} hold falling'.format(row[start]), row[col+1], row[col+2], 'ns']) + + col += 2 + + col += 1 else: break + new_sheet.dlv_table = table_gen.table_gen("dlv") + new_sheet.dlv_table.add_row(['Type', 'Description', 'Link']) - - new_sheet.dlv_table = table_gen("dlv") - new_sheet.dlv_table.add_row(['Type','Description','Link']) - - new_sheet.io_table = table_gen("io") + new_sheet.io_table = table_gen.table_gen("io") new_sheet.io_table.add_row(['Type', 'Value']) if not OPTS.netlist_only: - #physical layout files should not be generated in netlist only mode - new_sheet.dlv_table.add_row(['.gds','GDSII layout views','{0}.{1}'.format(OPTS.output_name,'gds')]) - new_sheet.dlv_table.add_row(['.lef','LEF files','{0}.{1}'.format(OPTS.output_name,'lef')]) - - - new_sheet.dlv_table.add_row(['.log','OpenRAM compile log','{0}.{1}'.format(OPTS.output_name,'log')]) - new_sheet.dlv_table.add_row(['.v','Verilog simulation models','{0}.{1}'.format(OPTS.output_name,'v')]) - new_sheet.dlv_table.add_row(['.html','This datasheet','{0}.{1}'.format(OPTS.output_name,'html')]) - new_sheet.dlv_table.add_row(['.lib','Synthesis models','{1}'.format(LIB_NAME,LIB_NAME.replace(OUT_DIR,''))]) - new_sheet.dlv_table.add_row(['.py','OpenRAM configuration file','{0}.{1}'.format(OPTS.output_name,'py')]) - new_sheet.dlv_table.add_row(['.sp','SPICE netlists','{0}.{1}'.format(OPTS.output_name,'sp')]) - - new_sheet.io_table.add_row(['WORD_SIZE',WORD_SIZE]) - new_sheet.io_table.add_row(['NUM_WORDS',NUM_WORDS]) - new_sheet.io_table.add_row(['NUM_BANKS',NUM_BANKS]) - new_sheet.io_table.add_row(['NUM_RW_PORTS',NUM_RW_PORTS]) - new_sheet.io_table.add_row(['NUM_R_PORTS',NUM_R_PORTS]) - new_sheet.io_table.add_row(['NUM_W_PORTS',NUM_W_PORTS]) - new_sheet.io_table.add_row(['Area',sram.width * sram.height]) - - + # physical layout files should not be generated in netlist only mode + new_sheet.dlv_table.add_row( + ['.gds', 'GDSII layout views', '{0}.{1}'.format(OPTS.output_name, 'gds')]) + new_sheet.dlv_table.add_row( + ['.lef', 'LEF files', '{0}.{1}'.format(OPTS.output_name, 'lef')]) + new_sheet.dlv_table.add_row( + ['.log', 'OpenRAM compile log', '{0}.{1}'.format(OPTS.output_name, 'log')]) + new_sheet.dlv_table.add_row( + ['.v', 'Verilog simulation models', '{0}.{1}'.format(OPTS.output_name, 'v')]) + new_sheet.dlv_table.add_row( + ['.html', 'This datasheet', '{0}.{1}'.format(OPTS.output_name, 'html')]) + new_sheet.dlv_table.add_row( + ['.lib', 'Synthesis models', '{1}'.format(LIB_NAME, LIB_NAME.replace(OUT_DIR, ''))]) + new_sheet.dlv_table.add_row( + ['.py', 'OpenRAM configuration file', '{0}.{1}'.format(OPTS.output_name, 'py')]) + new_sheet.dlv_table.add_row( + ['.sp', 'SPICE netlists', '{0}.{1}'.format(OPTS.output_name, 'sp')]) + new_sheet.io_table.add_row(['WORD_SIZE', WORD_SIZE]) + new_sheet.io_table.add_row(['NUM_WORDS', NUM_WORDS]) + new_sheet.io_table.add_row(['NUM_BANKS', NUM_BANKS]) + new_sheet.io_table.add_row(['NUM_RW_PORTS', NUM_RW_PORTS]) + new_sheet.io_table.add_row(['NUM_R_PORTS', NUM_R_PORTS]) + new_sheet.io_table.add_row(['NUM_W_PORTS', NUM_W_PORTS]) + new_sheet.io_table.add_row(['Area', sram.width * sram.height]) class datasheet_gen(): - def datasheet_write(sram,name): - + def datasheet_write(sram, name): in_dir = OPTS.openram_temp if not (os.path.isdir(in_dir)): os.mkdir(in_dir) - datasheets = [] parse_characterizer_csv(sram, in_dir + "/datasheet.info", datasheets) - for sheets in datasheets: with open(name, 'w+') as f: sheets.generate_html() diff --git a/compiler/datasheet/library_page/lib_table_gen.py b/compiler/datasheet/library_page/lib_table_gen.py new file mode 100644 index 00000000..c35d09c0 --- /dev/null +++ b/compiler/datasheet/library_page/lib_table_gen.py @@ -0,0 +1,43 @@ +class table_gen: + def __init__(self, name): + self.name = name + self.rows = [] + self.table_id = 'data' + + def add_row(self, row): + self.rows.append(row) + + def gen_table_head(self): + html = '' + + html += '' + html += '