tables stable and flask removed, headers are bugged

This commit is contained in:
Jesse Cirimelli-Low 2019-01-08 19:50:47 -08:00
parent 6033cc604d
commit e58515b89b
9 changed files with 150 additions and 444 deletions

View File

@ -1,23 +0,0 @@
from flask_table import *
class characterization_corners(Table):
"""
Set up characterization corners table columns and title information
"""
corner_name = Col('Corner Name')
process = Col('Process')
power_supply = Col('Power Supply')
temperature = Col('Temperature')
library_name_suffix = Col('Library Name Suffix')
class characterization_corners_item(object):
"""
Defines the contents of a charcaterization corner table row
"""
def __init__(self, corner_name, process, power_supply, temperature, library_name_suffix):
self.corner_name = corner_name
self.process = process
self.power_supply = power_supply
self.temperature = temperature
self.library_name_suffix = library_name_suffix

View File

@ -1,10 +1,4 @@
from table_gen import *
from flask_table import *
from operating_conditions import *
from characterization_corners import *
from deliverables import *
from timing_and_current_data import *
from in_out import *
import os
import csv
import base64
@ -15,11 +9,6 @@ class datasheet():
Defines the layout,but not the data, of the html datasheet
"""
def __init__(self,identifier):
self.io = []
self.corners = []
self.timing = []
self.operating = []
self.dlv = []
self.name = identifier
self.html = ""
@ -64,22 +53,24 @@ class datasheet():
self.html += '<p style="font-size: 18px;font-family: Trebuchet MS, Arial, Helvetica, sans-serif;">'+ 'Git commit id: ' + str(self.git_id) + '</p>'
self.html +='<p style="font-size: 26px;font-family: Trebuchet MS, Arial, Helvetica, sans-serif;">Ports and Configuration (DEBUG)</p>'
#self.html += in_out(self.io,table_id='data').__html__().replace('&lt;','<').replace('&#34;','"').replace('&gt;',">")
# self.html += in_out(self.io,table_id='data').__html__().replace('&lt;','<').replace('&#34;','"').replace('&gt;',">")
self.html += self.io_table.to_html()
# for row in self.io_table.rows:
# print(row)
self.html +='<p style="font-size: 26px;font-family: Trebuchet MS, Arial, Helvetica, sans-serif;">Operating Conditions</p>'
self.html += operating_conditions(self.operating,table_id='data').__html__()
# self.html += operating_conditions(self.operating,table_id='data').__html__()
self.html += self.operating_table.to_html()
self.html += '<p style="font-size: 26px;font-family: Trebuchet MS, Arial, Helvetica, sans-serif;">Timing and Current Data</p>'
self.html += timing_and_current_data(self.timing,table_id='data').__html__()
# self.html += timing_and_current_data(self.timing,table_id='data').__html__()
self.html += self.timing_table.to_html()
self.html += '<p style="font-size: 26px;font-family: Trebuchet MS, Arial, Helvetica, sans-serif;">Characterization Corners</p>'
self.html += characterization_corners(self.corners,table_id='data').__html__()
# self.html += characterization_corners(self.corners,table_id='data').__html__()
self.html += self.corners_table.to_html()
self.html +='<p style="font-size: 26px;font-family: Trebuchet MS, Arial, Helvetica, sans-serif;">Deliverables</p>'
self.html += deliverables(self.dlv,table_id='data').__html__().replace('&lt;','<').replace('&#34;','"').replace('&gt;',">")
# self.html += deliverables(self.dlv,table_id='data').__html__().replace('&lt;','<').replace('&#34;','"').replace('&gt;',">")
self.html += self.dlv_table.to_html()

View File

@ -1,35 +1,21 @@
#!/usr/bin/env python3
"""
This is a script to load data from the characterization and layout processes into
a web friendly html datasheet. This script requres the python-flask and flask-table
packages to be installed.
a web friendly html datasheet.
"""
#TODO:
#locate all port elements in .lib
#Locate all timing elements in .lib
#include log file
#Diagram generation
#Improve css
import debug
from globals import OPTS
if OPTS.datasheet_gen:
import flask_table
import os, math
import optparse
import csv
from table_gen import *
from deliverables import *
from operating_conditions import *
from timing_and_current_data import *
from characterization_corners import *
from datasheet import *
from in_out import *
else:
debug.warning("Python library flask_table not found. Skipping html datasheet generation. This can be installed with pip install flask-table.")
#make sure appropriate python libraries are installed
import os, math
import optparse
import csv
from datasheet import *
from table_gen import *
def process_name(corner):
"""
@ -120,37 +106,11 @@ def parse_characterizer_csv(sram,f,pages):
found = 1
#if the .lib information is for an existing datasheet compare timing data
for item in sheet.operating:
#check if the new corner data is worse than the previous worse corner data
if item.parameter == 'Operating Temperature':
if float(TEMP) > float(item.max):
item.typ = item.max
item.max = TEMP
if float(TEMP) < float(item.min):
item.typ = item.min
item.min = TEMP
if item.parameter == 'Power supply (VDD) range':
if float(VOLT) > float(item.max):
item.typ = item.max
item.max = VOLT
if float(VOLT) < float(item.min):
item.typ = item.min
item.min = VOLT
if item.parameter == 'Operating Frequncy (F)':
try:
if float(math.floor(1000/float(MIN_PERIOD)) < float(item.max)):
item.max = str(math.floor(1000/float(MIN_PERIOD)))
except Exception:
pass
#
for item in sheet.operating_table.rows:
#check if the new corner data is worse than the previous worse corner data
if item[0] == 'Operating Temperature':
if float(TEMP) > float(ite[3]):
if float(TEMP) > float(item[3]):
item[2] = item[3]
item[3] = TEMP
if float(TEMP) < float(item[1]):
@ -165,49 +125,51 @@ def parse_characterizer_csv(sram,f,pages):
item[2] = item[1]
item[1] = VOLT
if item.parameter == 'Operating Frequncy (F)':
if item[0] == 'Operating Frequncy (F)':
try:
if float(math.floor(1000/float(MIN_PERIOD)) < float(item[3])):
item[3] = str(math.floor(1000/float(MIN_PERIOD)))
except Exception:
pass
#
while(True):
if(row[col].startswith('DIN')):
start = col
for item in sheet.timing:
if item.parameter.startswith(row[col]):
for item in sheet.timing_table.rows:
if item[0].startswith(row[col]):
if item.parameter.endswith('setup rising'):
if float(row[col+1]) < float(item.min):
item.min = row[col+1]
if float(row[col+2]) > float(item.max):
item.max = row[col+2]
if item[0].endswith('setup rising'):
if float(row[col+1]) < float(item[1]):
item[1] = row[col+1]
if float(row[col+2]) > float(item[2]):
item[2] = row[col+2]
col += 2
elif item.parameter.endswith('setup falling'):
if float(row[col+1]) < float(item.min):
item.min = row[col+1]
if float(row[col+2]) > float(item.max):
item.max = row[col+2]
elif item[0].endswith('setup falling'):
if float(row[col+1]) < float(item[1]):
item[1] = row[col+1]
if float(row[col+2]) > float(item[2]):
item[2] = row[col+2]
col += 2
elif item.parameter.endswith('hold rising'):
if float(row[col+1]) < float(item.min):
item.min = row[col+1]
if float(row[col+2]) > float(item.max):
item.max = row[col+2]
elif item[0].endswith('hold rising'):
if float(row[col+1]) < float(item[1]):
item[1] = row[col+1]
if float(row[col+2]) > float(item[2]):
item[2] = row[col+2]
col += 2
elif item.parameter.endswith('hold falling'):
if float(row[col+1]) < float(item.min):
item.min = row[col+1]
if float(row[col+2]) > float(item.max):
item.max = row[col+2]
elif item[0].endswith('hold falling'):
if float(row[col+1]) < float(item[1]):
item[1] = row[col+1]
if float(row[col+2]) > float(item[2]):
item[2] = row[col+2]
col += 2
@ -215,38 +177,38 @@ def parse_characterizer_csv(sram,f,pages):
elif(row[col].startswith('DOUT')):
start = col
for item in sheet.timing:
if item.parameter.startswith(row[col]):
for item in sheet.timing_table.rows:
if item[0].startswith(row[col]):
if item.parameter.endswith('cell rise'):
if float(row[col+1]) < float(item.min):
item.min = row[col+1]
if float(row[col+2]) > float(item.max):
item.max = row[col+2]
if item[0].endswith('cell rise'):
if float(row[col+1]) < float(item[1]):
item[1] = row[col+1]
if float(row[col+2]) > float(item[2]):
item[2] = row[col+2]
col += 2
elif item.parameter.endswith('cell fall'):
if float(row[col+1]) < float(item.min):
item.min = row[col+1]
if float(row[col+2]) > float(item.max):
item.max = row[col+2]
elif item[0].endswith('cell fall'):
if float(row[col+1]) < float(item[1]):
item[1] = row[col+1]
if float(row[col+2]) > float(item[2]):
item[2] = row[col+2]
col += 2
elif item.parameter.endswith('rise transition'):
if float(row[col+1]) < float(item.min):
item.min = row[col+1]
if float(row[col+2]) > float(item.max):
item.max = row[col+2]
elif item[0].endswith('rise transition'):
if float(row[col+1]) < float(item[1]):
item[1] = row[col+1]
if float(row[col+2]) > float(item[2]):
item[2] = row[col+2]
col += 2
elif item.parameter.endswith('fall transition'):
if float(row[col+1]) < float(item.min):
item.min = row[col+1]
if float(row[col+2]) > float(item.max):
item.max = row[col+2]
elif item[0].endswith('fall transition'):
if float(row[col+1]) < float(item[1]):
item[1] = row[col+1]
if float(row[col+2]) > float(item[2]):
item[2] = row[col+2]
col += 2
@ -254,38 +216,38 @@ def parse_characterizer_csv(sram,f,pages):
elif(row[col].startswith('CSb')):
start = col
for item in sheet.timing:
if item.parameter.startswith(row[col]):
for item in sheet.timing_table.rows:
if item[0].startswith(row[col]):
if item.parameter.endswith('setup rising'):
if float(row[col+1]) < float(item.min):
item.min = row[col+1]
if float(row[col+2]) > float(item.max):
item.max = row[col+2]
if item[0].endswith('setup rising'):
if float(row[col+1]) < float(item[1]):
item[1] = row[col+1]
if float(row[col+2]) > float(item[2]):
item[2] = row[col+2]
col += 2
elif item.parameter.endswith('setup falling'):
if float(row[col+1]) < float(item.min):
item.min = row[col+1]
if float(row[col+2]) > float(item.max):
item.max = row[col+2]
elif item[0].endswith('setup falling'):
if float(row[col+1]) < float(item[1]):
item[1] = row[col+1]
if float(row[col+2]) > float(item[2]):
item[2] = row[col+2]
col += 2
elif item.parameter.endswith('hold rising'):
if float(row[col+1]) < float(item.min):
item.min = row[col+1]
if float(row[col+2]) > float(item.max):
item.max = row[col+2]
elif item[0].endswith('hold rising'):
if float(row[col+1]) < float(item[1]):
item[1] = row[col+1]
if float(row[col+2]) > float(item[2]):
item[2] = row[col+2]
col += 2
elif item.parameter.endswith('hold falling'):
if float(row[col+1]) < float(item.min):
item.min = row[col+1]
if float(row[col+2]) > float(item.max):
item.max = row[col+2]
elif item[0].endswith('hold falling'):
if float(row[col+1]) < float(item[1]):
item[1] = row[col+1]
if float(row[col+2]) > float(item[2]):
item[2] = row[col+2]
col += 2
@ -294,38 +256,38 @@ def parse_characterizer_csv(sram,f,pages):
elif(row[col].startswith('WEb')):
start = col
for item in sheet.timing:
if item.parameter.startswith(row[col]):
for item in sheet.timing_table.rows:
if item[0].startswith(row[col]):
if item.parameter.endswith('setup rising'):
if float(row[col+1]) < float(item.min):
item.min = row[col+1]
if float(row[col+2]) > float(item.max):
item.max = row[col+2]
if item[0].endswith('setup rising'):
if float(row[col+1]) < float(item[1]):
item[1] = row[col+1]
if float(row[col+2]) > float(item[2]):
item[2] = row[col+2]
col += 2
elif item.parameter.endswith('setup falling'):
if float(row[col+1]) < float(item.min):
item.min = row[col+1]
if float(row[col+2]) > float(item.max):
item.max = row[col+2]
elif item[0].endswith('setup falling'):
if float(row[col+1]) < float(item[1]):
item[1] = row[col+1]
if float(row[col+2]) > float(item[2]):
item[2] = row[col+2]
col += 2
elif item.parameter.endswith('hold rising'):
if float(row[col+1]) < float(item.min):
item.min = row[col+1]
if float(row[col+2]) > float(item.max):
item.max = row[col+2]
elif item[0].endswith('hold rising'):
if float(row[col+1]) < float(item[1]):
item[1] = row[col+1]
if float(row[col+2]) > float(item[2]):
item[2] = row[col+2]
col += 2
elif item.parameter.endswith('hold falling'):
if float(row[col+1]) < float(item.min):
item.min = row[col+1]
if float(row[col+2]) > float(item.max):
item.max = row[col+2]
elif item[0].endswith('hold falling'):
if float(row[col+1]) < float(item[1]):
item[1] = row[col+1]
if float(row[col+2]) > float(item[2]):
item[2] = row[col+2]
col += 2
@ -334,38 +296,38 @@ def parse_characterizer_csv(sram,f,pages):
elif(row[col].startswith('ADDR')):
start = col
for item in sheet.timing:
if item.parameter.startswith(row[col]):
for item in sheet.timing_table.rows:
if item[0].startswith(row[col]):
if item.parameter.endswith('setup rising'):
if float(row[col+1]) < float(item.min):
item.min = row[col+1]
if float(row[col+2]) > float(item.max):
item.max = row[col+2]
if item[0].endswith('setup rising'):
if float(row[col+1]) < float(item[1]):
item[1] = row[col+1]
if float(row[col+2]) > float(item[2]):
item[2] = row[col+2]
col += 2
elif item.parameter.endswith('setup falling'):
if float(row[col+1]) < float(item.min):
item.min = row[col+1]
if float(row[col+2]) > float(item.max):
item.max = row[col+2]
elif item[0].endswith('setup falling'):
if float(row[col+1]) < float(item[1]):
item[1] = row[col+1]
if float(row[col+2]) > float(item[2]):
item[2] = row[col+2]
col += 2
elif item.parameter.endswith('hold rising'):
if float(row[col+1]) < float(item.min):
item.min = row[col+1]
if float(row[col+2]) > float(item.max):
item.max = row[col+2]
elif item[0].endswith('hold rising'):
if float(row[col+1]) < float(item[1]):
item[1] = row[col+1]
if float(row[col+2]) > float(item[2]):
item[2] = row[col+2]
col += 2
elif item.parameter.endswith('hold falling'):
if float(row[col+1]) < float(item.min):
item.min = row[col+1]
if float(row[col+2]) > float(item.max):
item.max = row[col+2]
elif item[0].endswith('hold falling'):
if float(row[col+1]) < float(item[1]):
item[1] = row[col+1]
if float(row[col+2]) > float(item[2]):
item[2] = row[col+2]
col += 2
@ -377,14 +339,8 @@ def parse_characterizer_csv(sram,f,pages):
break
#regardless of if there is already a corner for the current sram, append the new corner to the datasheet
new_sheet.corners.append(characterization_corners_item(PROC,process_name(PROC),VOLT,TEMP,LIB_NAME.replace(OUT_DIR,'').replace(NAME,'')))
new_sheet.dlv.append(deliverables_item('.lib','Synthesis models','<a href="file://{0}">{1}</a>'.format(LIB_NAME,LIB_NAME.replace(OUT_DIR,''))))
#
new_sheet.corners.add_row([PROC,process_name(PROC),VOLT,TEMP,LIB_NAME.replace(OUT_DIR,'').replace(NAME,'')])
new_sheet.dlv.add_row(['.lib','Synthesis models','<a href="file://{0}">{1}</a>'.format(LIB_NAME,LIB_NAME.replace(OUT_DIR,''))])
#
new_sheet.corners_table.add_row([PROC,process_name(PROC),VOLT,TEMP,LIB_NAME.replace(OUT_DIR,'').replace(NAME,'')])
new_sheet.dlv_table.add_row(['.lib','Synthesis models','<a href="file://{0}">{1}</a>'.format(LIB_NAME,LIB_NAME.replace(OUT_DIR,''))])
if found == 0:
@ -398,40 +354,18 @@ def parse_characterizer_csv(sram,f,pages):
new_sheet.LVS = LVS
new_sheet.description = [NAME, NUM_WORDS, NUM_BANKS, NUM_RW_PORTS, NUM_W_PORTS, NUM_R_PORTS, TECH_NAME, WORD_SIZE, ORIGIN_ID, DATETIME]
new_sheet.corners.append(characterization_corners_item(PROC,process_name(PROC),VOLT,TEMP,LIB_NAME.replace(OUT_DIR,'').replace(NAME,'')))
#
new_sheet.corners_table = table_gen("corners")
new_sheet.corners_table.add_row([PROC,process_name(PROC),VOLT,TEMP,LIB_NAME.replace(OUT_DIR,'').replace(NAME,'')])
#
new_sheet.operating.append(operating_conditions_item('Power supply (VDD) range',VOLT,VOLT,VOLT,'Volts'))
new_sheet.operating.append(operating_conditions_item('Operating Temperature',TEMP,TEMP,TEMP,'Celsius'))
#
new_sheet.operating_table = table_gen("operating_table")
new_sheet.operating_table.add_row(['Power supply (VDD) range',VOLT,VOLT,VOLT,'Volts'])
new_sheet.operating_table.add_row(['Operating Temperature',TEMP,TEMP,TEMP,'Celsius'])
#
try:
new_sheet.operating.append(operating_conditions_item('Operating Frequency (F)','','',str(math.floor(1000/float(MIN_PERIOD))),'MHz'))
#
new_sheet.operating_table.add_row(['Operating Frequency (F)','','',str(math.floor(1000/float(MIN_PERIOD))),'MHz'])
#
except Exception:
new_sheet.operating.append(operating_conditions_item('Operating Frequency (F)','','',"not available in netlist only",'MHz')) #failed to provide non-zero MIN_PERIOD
#
new_sheet.operating_table.add_row(['Operating Frequency (F)','','',"not available in netlist only",'MHz']) #failed to provide non-zero MIN_PERIOD
#
#
new_sheet.timing_table = table_gen("timing")
#
while(True):
#
if(row[col].startswith('DIN')):
start = col
@ -453,41 +387,6 @@ def parse_characterizer_csv(sram,f,pages):
col +=1
#
if(row[col].startswith('DIN')):
start = col
new_sheet.timing.append(timing_and_current_data_item('{0} setup rising'.format(row[start]),row[col+1],row[col+2],'ns'))
col += 2
new_sheet.timing.append(timing_and_current_data_item('{0} setup falling'.format(row[start]),row[col+1],row[col+2],'ns'))
col += 2
new_sheet.timing.append(timing_and_current_data_item('{0} hold rising'.format(row[start]),row[col+1],row[col+2],'ns'))
col += 2
new_sheet.timing.append(timing_and_current_data_item('{0} hold falling'.format(row[start]),row[col+1],row[col+2],'ns'))
col += 2
col +=1
#
elif(row[col].startswith('DOUT')):
start = col
new_sheet.timing.append(timing_and_current_data_item('{0} cell rise'.format(row[start]),row[col+1],row[col+2],'ns'))
col += 2
new_sheet.timing.append(timing_and_current_data_item('{0} cell fall'.format(row[start]),row[col+1],row[col+2],'ns'))
col += 2
new_sheet.timing.append(timing_and_current_data_item('{0} rise transition'.format(row[start]),row[col+1],row[col+2],'ns'))
col += 2
new_sheet.timing.append(timing_and_current_data_item('{0} fall transition'.format(row[start]),row[col+1],row[col+2],'ns'))
col += 2
col +=1
#
elif(row[col].startswith('DOUT')):
start = col
@ -509,23 +408,6 @@ def parse_characterizer_csv(sram,f,pages):
col +=1
#
elif(row[col].startswith('CSb')):
start = col
new_sheet.timing.append(timing_and_current_data_item('{0} setup rising'.format(row[start]),row[col+1],row[col+2],'ns'))
col += 2
new_sheet.timing.append(timing_and_current_data_item('{0} setup falling'.format(row[start]),row[col+1],row[col+2],'ns'))
col += 2
new_sheet.timing.append(timing_and_current_data_item('{0} hold rising'.format(row[start]),row[col+1],row[col+2],'ns'))
col += 2
new_sheet.timing.append(timing_and_current_data_item('{0} hold falling'.format(row[start]),row[col+1],row[col+2],'ns'))
col += 2
col +=1
#
elif(row[col].startswith('CSb')):
start = col
@ -547,23 +429,6 @@ def parse_characterizer_csv(sram,f,pages):
col +=1
#
elif(row[col].startswith('WEb')):
start = col
new_sheet.timing.append(timing_and_current_data_item('{0} setup rising'.format(row[start]),row[col+1],row[col+2],'ns'))
col += 2
new_sheet.timing.append(timing_and_current_data_item('{0} setup falling'.format(row[start]),row[col+1],row[col+2],'ns'))
col += 2
new_sheet.timing.append(timing_and_current_data_item('{0} hold rising'.format(row[start]),row[col+1],row[col+2],'ns'))
col += 2
new_sheet.timing.append(timing_and_current_data_item('{0} hold falling'.format(row[start]),row[col+1],row[col+2],'ns'))
col += 2
col +=1
#
elif(row[col].startswith('WEb')):
start = col
@ -585,23 +450,6 @@ def parse_characterizer_csv(sram,f,pages):
col +=1
#
elif(row[col].startswith('ADDR')):
start = col
new_sheet.timing.append(timing_and_current_data_item('{0} setup rising'.format(row[start]),row[col+1],row[col+2],'ns'))
col += 2
new_sheet.timing.append(timing_and_current_data_item('{0} setup falling'.format(row[start]),row[col+1],row[col+2],'ns'))
col += 2
new_sheet.timing.append(timing_and_current_data_item('{0} hold rising'.format(row[start]),row[col+1],row[col+2],'ns'))
col += 2
new_sheet.timing.append(timing_and_current_data_item('{0} hold falling'.format(row[start]),row[col+1],row[col+2],'ns'))
col += 2
col +=1
#
elif(row[col].startswith('ADDR')):
start = col
@ -623,47 +471,26 @@ def parse_characterizer_csv(sram,f,pages):
col +=1
#
else:
break
#
new_sheet.dlv_table = table_gen("dlv")
new_sheet.io_table = table_gen("io")
#
if not OPTS.netlist_only:
#physical layout files should not be generated in netlist only mode
new_sheet.dlv.append(deliverables_item('.gds','GDSII layout views','<a href="{0}.{1}">{0}.{1}</a>'.format(OPTS.output_name,'gds')))
new_sheet.dlv.append(deliverables_item('.lef','LEF files','<a href="{0}.{1}">{0}.{1}</a>'.format(OPTS.output_name,'lef')))
new_sheet.dlv_table.add_row(['.gds','GDSII layout views','<a href="{0}.{1}">{0}.{1}</a>'.format(OPTS.output_name,'gds')])
new_sheet.dlv_table.add_row(['.lef','LEF files','<a href="{0}.{1}">{0}.{1}</a>'.format(OPTS.output_name,'lef')])
new_sheet.dlv.append(deliverables_item('.sp','SPICE netlists','<a href="{0}.{1}">{0}.{1}</a>'.format(OPTS.output_name,'sp')))
new_sheet.dlv.append(deliverables_item('.v','Verilog simulation models','<a href="{0}.{1}">{0}.{1}</a>'.format(OPTS.output_name,'v')))
new_sheet.dlv.append(deliverables_item('.html','This datasheet','<a href="{0}.{1}">{0}.{1}</a>'.format(OPTS.output_name,'html')))
new_sheet.dlv.append(deliverables_item('.lib','Synthesis models','<a href="{1}">{1}</a>'.format(LIB_NAME,LIB_NAME.replace(OUT_DIR,''))))
new_sheet.dlv.append(deliverables_item('.py','OpenRAM configuration file','<a href="{0}.{1}">{0}.{1}</a>'.format(OPTS.output_name,'py')))
#
new_sheet.dlv_table.add_row(['.sp','SPICE netlists','<a href="{0}.{1}">{0}.{1}</a>'.format(OPTS.output_name,'sp')])
new_sheet.dlv_table.add_row(['.v','Verilog simulation models','<a href="{0}.{1}">{0}.{1}</a>'.format(OPTS.output_name,'v')])
new_sheet.dlv_table.add_row(['.html','This datasheet','<a href="{0}.{1}">{0}.{1}</a>'.format(OPTS.output_name,'html')])
new_sheet.dlv_table.add_row(['.lib','Synthesis models','<a href="{1}">{1}</a>'.format(LIB_NAME,LIB_NAME.replace(OUT_DIR,''))])
new_sheet.dlv_table.add_row(['.py','OpenRAM configuration file','<a href="{0}.{1}">{0}.{1}</a>'.format(OPTS.output_name,'py')])
#
#debug table for multiport information
new_sheet.io.append(in_out_item('WORD_SIZE',WORD_SIZE))
new_sheet.io.append(in_out_item('NUM_WORDS',NUM_WORDS))
new_sheet.io.append(in_out_item('NUM_BANKS',NUM_BANKS))
new_sheet.io.append(in_out_item('NUM_RW_PORTS',NUM_RW_PORTS))
new_sheet.io.append(in_out_item('NUM_R_PORTS',NUM_R_PORTS))
new_sheet.io.append(in_out_item('NUM_W_PORTS',NUM_W_PORTS))
new_sheet.io.append(in_out_item('Area',sram.width * sram.height))
#
new_sheet.io_table.add_row(['WORD_SIZE',WORD_SIZE])
new_sheet.io_table.add_row(['NUM_WORDS',NUM_WORDS])
new_sheet.io_table.add_row(['NUM_BANKS',NUM_BANKS])
@ -671,7 +498,7 @@ def parse_characterizer_csv(sram,f,pages):
new_sheet.io_table.add_row(['NUM_R_PORTS',NUM_R_PORTS])
new_sheet.io_table.add_row(['NUM_W_PORTS',NUM_W_PORTS])
new_sheet.io_table.add_row(['Area',sram.width * sram.height])
#
@ -680,18 +507,18 @@ def parse_characterizer_csv(sram,f,pages):
class datasheet_gen():
def datasheet_write(sram,name):
if OPTS.datasheet_gen:
in_dir = OPTS.openram_temp
if not (os.path.isdir(in_dir)):
os.mkdir(in_dir)
in_dir = OPTS.openram_temp
if not (os.path.isdir(in_dir)):
os.mkdir(in_dir)
datasheets = []
parse_characterizer_csv(sram, in_dir + "/datasheet.info", datasheets)
datasheets = []
parse_characterizer_csv(sram, in_dir + "/datasheet.info", datasheets)
for sheets in datasheets:
with open(name, 'w+') as f:
sheets.generate_html()
f.write(sheets.html)
for sheets in datasheets:
with open(name, 'w+') as f:
sheets.generate_html()
f.write(sheets.html)

View File

@ -1,19 +0,0 @@
from flask_table import *
class deliverables(Table):
"""
Set up delivarables table columns and title information
"""
typ = Col('Type')
description = Col('Description')
link = Col('Link')
class deliverables_item(object):
"""
Define deliverables table row elemenent information
"""
def __init__(self, typ, description,link):
self.typ = typ
self.description = description
self.link = link

View File

@ -1,17 +0,0 @@
from flask_table import *
class in_out(Table):
"""
Set up I/O table columns and title information for multiport debugging
"""
typ = Col('Type')
description = Col('Description')
class in_out_item(object):
"""
Define table row element for I/O table
"""
def __init__(self, typ, description):
self.typ = typ
self.description = description

View File

@ -1,23 +0,0 @@
from flask_table import *
class operating_conditions(Table):
"""
Set up operating conditions columns and title information
"""
parameter = Col('Parameter')
min = Col('Min')
typ = Col('Typ')
max = Col('Max')
units = Col('Units')
class operating_conditions_item(object):
"""
Define operating conditions table row element
"""
def __init__(self, parameter, min, typ, max, units):
self.parameter = parameter
self.min = min
self.typ = typ
self.max = max
self.units = units

View File

@ -1,22 +0,0 @@
from flask_table import *
class timing_and_current_data(Table):
"""
Set up timing and current table columns and title information
"""
parameter = Col('Parameter')
min = Col('Min')
max = Col('Max')
units = Col('Units')
class timing_and_current_data_item(object):
"""
Define timing and current data row element
"""
def __init__(self, parameter, min, max, units):
self.parameter = parameter
self.min = min
self.max = max
self.units = units

View File

@ -107,13 +107,7 @@ def check_versions():
# FIXME: Check versions of other tools here??
# or, this could be done in each module (e.g. verify, characterizer, etc.)
global OPTS
try:
import flask_table
OPTS.datasheet_gen = 1
except:
OPTS.datasheet_gen = 0
try:
import coverage
OPTS.coverage = 1

View File

@ -47,9 +47,7 @@ c = sram_config(word_size=OPTS.word_size,
print("Words per row: {}".format(c.words_per_row))
#from parser import *
output_extensions = ["sp","v","lib","py"]
if OPTS.datasheet_gen:
output_extensions.append("html")
output_extensions = ["sp","v","lib","py","html"]
if not OPTS.netlist_only:
output_extensions.extend(["gds","lef"])
output_files = ["{0}.{1}".format(OPTS.output_name,x) for x in output_extensions]