Merge remote-tracking branch 'origin/dev' into factory

This commit is contained in:
Matt Guthaus 2019-01-18 09:52:18 -08:00
commit f5f27073be
31 changed files with 298 additions and 501 deletions

44
LICENSE
View File

@ -1,31 +1,31 @@
Copyright 2018 Regents of the University of California and The Board
BSD 3-Clause License
Copyright (c) 2019 Regents of the University of California and The Board
of Regents for the Oklahoma Agricultural and Mechanical College
(acting for and on behalf of Oklahoma State University)
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
* Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

View File

@ -530,7 +530,7 @@ class lib:
"sram_{0}_{1}_{2}".format(OPTS.word_size, OPTS.num_words, OPTS.tech_name),
OPTS.num_words,
OPTS.num_banks,
OPTS.num_rw_ports,
OPTS.num_rw_ports,
OPTS.num_w_ports,
OPTS.num_r_ports,
OPTS.tech_name,
@ -555,7 +555,7 @@ class lib:
LVS = str(total_lvs_errors)
datasheet.write("{0},{1},".format(DRC, LVS))
datasheet.write(str(self.sram.width * self.sram.height)+',')
for port in self.all_ports:
#DIN timings
if port in self.write_ports:

View File

@ -1,62 +1,60 @@
from table_gen import *
import os
import csv
import base64
from globals import OPTS
class datasheet():
"""
Defines the layout,but not the data, of the html datasheet
"""
def __init__(self,identifier):
def __init__(self, identifier):
self.name = identifier
self.html = ""
def generate_html(self):
"""
Generates html tables using flask-table
"""
with open(os.path.abspath(os.environ.get("OPENRAM_HOME")) + '/datasheet/assets/datasheet.css', 'r') as datasheet_css:
#css styling is kept in a seperate file
# css styling is kept in a seperate file
self.html += datasheet_css.read()
# with open(OPTS.openram_temp + "/datasheet.info") as info:
with open(OPTS.openram_temp + "/datasheet.info") as info:
self.html += '<!--'
# for row in info:
# self.html += row
for item in self.description:
self.html += item + ','
self.html += 'EOL'
self.html +='-->'
for row in info:
self.html += row
# for item in self.description:
# self.html += item + ','
self.html += '-->'
vlsi_logo = 0
with open(os.path.abspath(os.environ.get("OPENRAM_HOME")) + '/datasheet/assets/vlsi_logo.png' , "rb") as image_file:
with open(os.path.abspath(os.environ.get("OPENRAM_HOME")) + '/datasheet/assets/vlsi_logo.png', "rb") as image_file:
vlsi_logo = base64.b64encode(image_file.read())
openram_logo = 0
with open(os.path.abspath(os.environ.get("OPENRAM_HOME")) + '/datasheet/assets/openram_logo_placeholder.png' , "rb") as image_file:
with open(os.path.abspath(os.environ.get("OPENRAM_HOME")) + '/datasheet/assets/openram_logo_placeholder.png', "rb") as image_file:
openram_logo = base64.b64encode(image_file.read())
self.html += '<a href="https://vlsida.soe.ucsc.edu/"><img src="data:image/png;base64,{0}" alt="VLSIDA"></a>'.format(str(vlsi_logo)[
2:-1])
self.html += '<a href="https://vlsida.soe.ucsc.edu/"><img src="data:image/png;base64,{0}" alt="VLSIDA"></a>'.format(str(vlsi_logo)[2:-1])
self.html += '<p style="font-size: 18px;font-family: Trebuchet MS, Arial, Helvetica, sans-serif;">' + \
self.name + '.html' + '</p>'
self.html += '<p style="font-size: 18px;font-family: Trebuchet MS, Arial, Helvetica, sans-serif;">Compiled at: ' + self.time + '</p>'
self.html += '<p style="font-size: 18px;font-family: Trebuchet MS, Arial, Helvetica, sans-serif;">' + \
'DRC errors: ' + str(self.DRC) + '</p>'
self.html += '<p style="font-size: 18px;font-family: Trebuchet MS, Arial, Helvetica, sans-serif;">' + \
'LVS errors: ' + str(self.LVS) + '</p>'
self.html += '<p style="font-size: 18px;font-family: Trebuchet MS, Arial, Helvetica, sans-serif;">' + \
'Git commit id: ' + str(self.git_id) + '</p>'
self.html +='<p style="font-size: 18px;font-family: Trebuchet MS, Arial, Helvetica, sans-serif;">'+ self.name + '.html' + '</p>'
self.html +='<p style="font-size: 18px;font-family: Trebuchet MS, Arial, Helvetica, sans-serif;">Compiled at: '+ self.time + '</p>'
self.html +='<p style="font-size: 18px;font-family: Trebuchet MS, Arial, Helvetica, sans-serif;">'+ 'DRC errors: ' + str(self.DRC) + '</p>'
self.html +='<p style="font-size: 18px;font-family: Trebuchet MS, Arial, Helvetica, sans-serif;">'+ 'LVS errors: ' + str(self.LVS) + '</p>'
self.html += '<p style="font-size: 18px;font-family: Trebuchet MS, Arial, Helvetica, sans-serif;">'+ 'Git commit id: ' + str(self.git_id) + '</p>'
self.html +='<p style="font-size: 26px;font-family: Trebuchet MS, Arial, Helvetica, sans-serif;">Ports and Configuration</p>'
self.html += '<p style="font-size: 26px;font-family: Trebuchet MS, Arial, Helvetica, sans-serif;">Ports and Configuration</p>'
# self.html += in_out(self.io,table_id='data').__html__().replace('&lt;','<').replace('&#34;','"').replace('&gt;',">")
self.html += self.io_table.to_html()
self.html +='<p style="font-size: 26px;font-family: Trebuchet MS, Arial, Helvetica, sans-serif;">Operating Conditions</p>'
self.html += '<p style="font-size: 26px;font-family: Trebuchet MS, Arial, Helvetica, sans-serif;">Operating Conditions</p>'
# self.html += operating_conditions(self.operating,table_id='data').__html__()
self.html += self.operating_table.to_html()
@ -68,9 +66,6 @@ class datasheet():
# self.html += characterization_corners(self.corners,table_id='data').__html__()
self.html += self.corners_table.to_html()
self.html +='<p style="font-size: 26px;font-family: Trebuchet MS, Arial, Helvetica, sans-serif;">Deliverables</p>'
self.html += '<p style="font-size: 26px;font-family: Trebuchet MS, Arial, Helvetica, sans-serif;">Deliverables</p>'
# self.html += deliverables(self.dlv,table_id='data').__html__().replace('&lt;','<').replace('&#34;','"').replace('&gt;',">")
self.html += self.dlv_table.to_html()

View File

@ -1,21 +1,21 @@
#!/usr/bin/env python3
"""
This is a script to load data from the characterization and layout processes into
This is a script to load data from the characterization and layout processes into
a web friendly html datasheet.
"""
#TODO:
#include log file
#Diagram generation
#Improve css
# TODO:
# include power
# Diagram generation
# Improve css
import debug
from globals import OPTS
import os, math
import optparse
import os
import math
import csv
from datasheet import *
from table_gen import *
import datasheet
import table_gen
def process_name(corner):
"""
@ -30,20 +30,20 @@ def process_name(corner):
else:
return "custom"
def parse_characterizer_csv(sram,f,pages):
def parse_characterizer_csv(f, pages):
"""
Parses output data of the Liberty file generator in order to construct the timing and
current table
"""
with open(f) as csv_file:
csv_reader = csv.reader(csv_file, delimiter=',')
line_count = 0
for row in csv_reader:
found = 0
col = 0
#defines layout of csv file
# defines layout of csv file
NAME = row[col]
col += 1
@ -85,29 +85,30 @@ def parse_characterizer_csv(sram,f,pages):
WORD_SIZE = row[col]
col += 1
ORIGIN_ID = row[col]
col += 1
DATETIME = row[col]
col+= 1
col += 1
DRC = row[col]
col += 1
LVS = row[col]
col += 1
for sheet in pages:
AREA = row[col]
col += 1
for sheet in pages:
if sheet.name == NAME:
found = 1
#if the .lib information is for an existing datasheet compare timing data
# if the .lib information is for an existing datasheet compare timing data
for item in sheet.operating_table.rows:
#check if the new corner data is worse than the previous worse corner data
# check if the new corner data is worse than the previous worse corner data
if item[0] == 'Operating Temperature':
if float(TEMP) > float(item[3]):
@ -128,14 +129,13 @@ def parse_characterizer_csv(sram,f,pages):
if item[0] == 'Operating Frequncy (F)':
try:
if float(math.floor(1000/float(MIN_PERIOD)) < float(item[3])):
item[3] = str(math.floor(1000/float(MIN_PERIOD)))
item[3] = str(math.floor(
1000/float(MIN_PERIOD)))
except Exception:
pass
while(True):
col_start = col
if(row[col].startswith('DIN')):
start = col
for item in sheet.timing_table.rows:
@ -253,7 +253,6 @@ def parse_characterizer_csv(sram,f,pages):
col += 1
elif(row[col].startswith('WEb')):
start = col
for item in sheet.timing_table.rows:
@ -293,7 +292,6 @@ def parse_characterizer_csv(sram,f,pages):
col += 1
elif(row[col].startswith('ADDR')):
start = col
for item in sheet.timing_table.rows:
@ -333,196 +331,225 @@ def parse_characterizer_csv(sram,f,pages):
col += 1
else:
for element in row[col_start: col - 1]:
sheet.description.append(str(element))
break
new_sheet.corners_table.add_row([PROC,process_name(PROC),VOLT,TEMP,LIB_NAME.replace(OUT_DIR,'').replace(NAME,'')])
new_sheet.dlv_table.add_row(['.lib','Synthesis models','<a href="file://{0}">{1}</a>'.format(LIB_NAME,LIB_NAME.replace(OUT_DIR,''))])
new_sheet.corners_table.add_row([PROC, process_name(
PROC), VOLT, TEMP, LIB_NAME.replace(OUT_DIR, '').replace(NAME, '')])
new_sheet.dlv_table.add_row(
['.lib', 'Synthesis models', '<a href="file://{0}">{1}</a>'.format(LIB_NAME, LIB_NAME.replace(OUT_DIR, ''))])
if found == 0:
#if this is the first corner for this sram, run first time configuration and set up tables
new_sheet = datasheet(NAME)
# if this is the first corner for this sram, run first time configuration and set up tables
new_sheet = datasheet.datasheet(NAME)
pages.append(new_sheet)
new_sheet.git_id = ORIGIN_ID
new_sheet.time = DATETIME
new_sheet.DRC = DRC
new_sheet.LVS = LVS
new_sheet.description = [NAME, NUM_WORDS, NUM_BANKS, NUM_RW_PORTS, NUM_W_PORTS, NUM_R_PORTS, TECH_NAME, WORD_SIZE, ORIGIN_ID, DATETIME]
new_sheet.description = [NAME, NUM_WORDS, NUM_BANKS, NUM_RW_PORTS, NUM_W_PORTS,
NUM_R_PORTS, TECH_NAME, MIN_PERIOD, WORD_SIZE, ORIGIN_ID, DATETIME]
new_sheet.corners_table = table_gen("corners")
new_sheet.corners_table.add_row(['Corner Name','Process','Power Supply','Temperature','Library Name Suffix'])
new_sheet.corners_table.add_row([PROC,process_name(PROC),VOLT,TEMP,LIB_NAME.replace(OUT_DIR,'').replace(NAME,'')])
new_sheet.operating_table = table_gen("operating_table")
new_sheet.operating_table.add_row(['Parameter','Min','Typ','Max','Units'])
new_sheet.operating_table.add_row(['Power supply (VDD) range',VOLT,VOLT,VOLT,'Volts'])
new_sheet.operating_table.add_row(['Operating Temperature',TEMP,TEMP,TEMP,'Celsius'])
new_sheet.corners_table = table_gen.table_gen("corners")
new_sheet.corners_table.add_row(
['Corner Name', 'Process', 'Power Supply', 'Temperature', 'Library Name Suffix'])
new_sheet.corners_table.add_row([PROC, process_name(
PROC), VOLT, TEMP, LIB_NAME.replace(OUT_DIR, '').replace(NAME, '')])
new_sheet.operating_table = table_gen.table_gen(
"operating_table")
new_sheet.operating_table.add_row(
['Parameter', 'Min', 'Typ', 'Max', 'Units'])
new_sheet.operating_table.add_row(
['Power supply (VDD) range', VOLT, VOLT, VOLT, 'Volts'])
new_sheet.operating_table.add_row(
['Operating Temperature', TEMP, TEMP, TEMP, 'Celsius'])
try:
new_sheet.operating_table.add_row(['Operating Frequency (F)','','',str(math.floor(1000/float(MIN_PERIOD))),'MHz'])
new_sheet.operating_table.add_row(['Operating Frequency (F)', '', '', str(
math.floor(1000/float(MIN_PERIOD))), 'MHz'])
except Exception:
new_sheet.operating_table.add_row(['Operating Frequency (F)','','',"not available in netlist only",'MHz']) #failed to provide non-zero MIN_PERIOD
new_sheet.timing_table = table_gen("timing")
new_sheet.timing_table.add_row(['Parameter','Min','Max','Units'])
# failed to provide non-zero MIN_PERIOD
new_sheet.operating_table.add_row(
['Operating Frequency (F)', '', '', "not available in netlist only", 'MHz'])
new_sheet.timing_table = table_gen.table_gen("timing")
new_sheet.timing_table.add_row(
['Parameter', 'Min', 'Max', 'Units'])
while(True):
col_start = col
if(row[col].startswith('DIN')):
start = col
new_sheet.timing_table.add_row(['{0} setup rising'.format(row[start]),row[col+1],row[col+2],'ns'])
new_sheet.timing_table.add_row(
['{0} setup rising'.format(row[start]), row[col+1], row[col+2], 'ns'])
col += 2
new_sheet.timing_table.add_row(['{0} setup falling'.format(row[start]),row[col+1],row[col+2],'ns'])
col += 2
new_sheet.timing_table.add_row(['{0} hold rising'.format(row[start]),row[col+1],row[col+2],'ns'])
new_sheet.timing_table.add_row(
['{0} setup falling'.format(row[start]), row[col+1], row[col+2], 'ns'])
col += 2
new_sheet.timing_table.add_row(['{0} hold falling'.format(row[start]),row[col+1],row[col+2],'ns'])
new_sheet.timing_table.add_row(
['{0} hold rising'.format(row[start]), row[col+1], row[col+2], 'ns'])
col += 2
col +=1
new_sheet.timing_table.add_row(
['{0} hold falling'.format(row[start]), row[col+1], row[col+2], 'ns'])
col += 2
col += 1
elif(row[col].startswith('DOUT')):
start = col
new_sheet.timing_table.add_row(['{0} cell rise'.format(row[start]),row[col+1],row[col+2],'ns'])
new_sheet.timing_table.add_row(
['{0} cell rise'.format(row[start]), row[col+1], row[col+2], 'ns'])
col += 2
new_sheet.timing_table.add_row(['{0} cell fall'.format(row[start]),row[col+1],row[col+2],'ns'])
col += 2
new_sheet.timing_table.add_row(['{0} rise transition'.format(row[start]),row[col+1],row[col+2],'ns'])
new_sheet.timing_table.add_row(
['{0} cell fall'.format(row[start]), row[col+1], row[col+2], 'ns'])
col += 2
new_sheet.timing_table.add_row(['{0} fall transition'.format(row[start]),row[col+1],row[col+2],'ns'])
new_sheet.timing_table.add_row(
['{0} rise transition'.format(row[start]), row[col+1], row[col+2], 'ns'])
col += 2
col +=1
new_sheet.timing_table.add_row(
['{0} fall transition'.format(row[start]), row[col+1], row[col+2], 'ns'])
col += 2
col += 1
elif(row[col].startswith('CSb')):
start = col
new_sheet.timing_table.add_row(['{0} setup rising'.format(row[start]),row[col+1],row[col+2],'ns'])
new_sheet.timing_table.add_row(
['{0} setup rising'.format(row[start]), row[col+1], row[col+2], 'ns'])
col += 2
new_sheet.timing_table.add_row(['{0} setup falling'.format(row[start]),row[col+1],row[col+2],'ns'])
col += 2
new_sheet.timing_table.add_row(['{0} hold rising'.format(row[start]),row[col+1],row[col+2],'ns'])
new_sheet.timing_table.add_row(
['{0} setup falling'.format(row[start]), row[col+1], row[col+2], 'ns'])
col += 2
new_sheet.timing_table.add_row(['{0} hold falling'.format(row[start]),row[col+1],row[col+2],'ns'])
new_sheet.timing_table.add_row(
['{0} hold rising'.format(row[start]), row[col+1], row[col+2], 'ns'])
col += 2
col +=1
new_sheet.timing_table.add_row(
['{0} hold falling'.format(row[start]), row[col+1], row[col+2], 'ns'])
col += 2
col += 1
elif(row[col].startswith('WEb')):
start = col
new_sheet.timing_table.add_row(['{0} setup rising'.format(row[start]),row[col+1],row[col+2],'ns'])
new_sheet.timing_table.add_row(
['{0} setup rising'.format(row[start]), row[col+1], row[col+2], 'ns'])
col += 2
new_sheet.timing_table.add_row(['{0} setup falling'.format(row[start]),row[col+1],row[col+2],'ns'])
col += 2
new_sheet.timing_table.add_row(['{0} hold rising'.format(row[start]),row[col+1],row[col+2],'ns'])
new_sheet.timing_table.add_row(
['{0} setup falling'.format(row[start]), row[col+1], row[col+2], 'ns'])
col += 2
new_sheet.timing_table.add_row(['{0} hold falling'.format(row[start]),row[col+1],row[col+2],'ns'])
new_sheet.timing_table.add_row(
['{0} hold rising'.format(row[start]), row[col+1], row[col+2], 'ns'])
col += 2
col +=1
new_sheet.timing_table.add_row(
['{0} hold falling'.format(row[start]), row[col+1], row[col+2], 'ns'])
col += 2
col += 1
elif(row[col].startswith('ADDR')):
start = col
new_sheet.timing_table.add_row(['{0} setup rising'.format(row[start]),row[col+1],row[col+2],'ns'])
new_sheet.timing_table.add_row(
['{0} setup rising'.format(row[start]), row[col+1], row[col+2], 'ns'])
col += 2
new_sheet.timing_table.add_row(['{0} setup falling'.format(row[start]),row[col+1],row[col+2],'ns'])
col += 2
new_sheet.timing_table.add_row(['{0} hold rising'.format(row[start]),row[col+1],row[col+2],'ns'])
new_sheet.timing_table.add_row(
['{0} setup falling'.format(row[start]), row[col+1], row[col+2], 'ns'])
col += 2
new_sheet.timing_table.add_row(['{0} hold falling'.format(row[start]),row[col+1],row[col+2],'ns'])
new_sheet.timing_table.add_row(
['{0} hold rising'.format(row[start]), row[col+1], row[col+2], 'ns'])
col += 2
col +=1
new_sheet.timing_table.add_row(
['{0} hold falling'.format(row[start]), row[col+1], row[col+2], 'ns'])
col += 2
col += 1
else:
for element in row[col_start:col-1]:
sheet.description.append(str(element))
break
new_sheet.dlv_table = table_gen.table_gen("dlv")
new_sheet.dlv_table.add_row(['Type', 'Description', 'Link'])
new_sheet.dlv_table = table_gen("dlv")
new_sheet.dlv_table.add_row(['Type','Description','Link'])
new_sheet.io_table = table_gen("io")
new_sheet.io_table = table_gen.table_gen("io")
new_sheet.io_table.add_row(['Type', 'Value'])
if not OPTS.netlist_only:
#physical layout files should not be generated in netlist only mode
new_sheet.dlv_table.add_row(['.gds','GDSII layout views','<a href="{0}.{1}">{0}.{1}</a>'.format(OPTS.output_name,'gds')])
new_sheet.dlv_table.add_row(['.lef','LEF files','<a href="{0}.{1}">{0}.{1}</a>'.format(OPTS.output_name,'lef')])
new_sheet.dlv_table.add_row(['.sp','SPICE netlists','<a href="{0}.{1}">{0}.{1}</a>'.format(OPTS.output_name,'sp')])
new_sheet.dlv_table.add_row(['.v','Verilog simulation models','<a href="{0}.{1}">{0}.{1}</a>'.format(OPTS.output_name,'v')])
new_sheet.dlv_table.add_row(['.html','This datasheet','<a href="{0}.{1}">{0}.{1}</a>'.format(OPTS.output_name,'html')])
new_sheet.dlv_table.add_row(['.lib','Synthesis models','<a href="{1}">{1}</a>'.format(LIB_NAME,LIB_NAME.replace(OUT_DIR,''))])
new_sheet.dlv_table.add_row(['.py','OpenRAM configuration file','<a href="{0}.{1}">{0}.{1}</a>'.format(OPTS.output_name,'py')])
new_sheet.io_table.add_row(['WORD_SIZE',WORD_SIZE])
new_sheet.io_table.add_row(['NUM_WORDS',NUM_WORDS])
new_sheet.io_table.add_row(['NUM_BANKS',NUM_BANKS])
new_sheet.io_table.add_row(['NUM_RW_PORTS',NUM_RW_PORTS])
new_sheet.io_table.add_row(['NUM_R_PORTS',NUM_R_PORTS])
new_sheet.io_table.add_row(['NUM_W_PORTS',NUM_W_PORTS])
new_sheet.io_table.add_row(['Area',sram.width * sram.height])
# physical layout files should not be generated in netlist only mode
new_sheet.dlv_table.add_row(
['.gds', 'GDSII layout views', '<a href="{0}.{1}">{0}.{1}</a>'.format(OPTS.output_name, 'gds')])
new_sheet.dlv_table.add_row(
['.lef', 'LEF files', '<a href="{0}.{1}">{0}.{1}</a>'.format(OPTS.output_name, 'lef')])
new_sheet.dlv_table.add_row(
['.log', 'OpenRAM compile log', '<a href="{0}.{1}">{0}.{1}</a>'.format(OPTS.output_name, 'log')])
new_sheet.dlv_table.add_row(
['.v', 'Verilog simulation models', '<a href="{0}.{1}">{0}.{1}</a>'.format(OPTS.output_name, 'v')])
new_sheet.dlv_table.add_row(
['.html', 'This datasheet', '<a href="{0}.{1}">{0}.{1}</a>'.format(OPTS.output_name, 'html')])
new_sheet.dlv_table.add_row(
['.lib', 'Synthesis models', '<a href="{1}">{1}</a>'.format(LIB_NAME, LIB_NAME.replace(OUT_DIR, ''))])
new_sheet.dlv_table.add_row(
['.py', 'OpenRAM configuration file', '<a href="{0}.{1}">{0}.{1}</a>'.format(OPTS.output_name, 'py')])
new_sheet.dlv_table.add_row(
['.sp', 'SPICE netlists', '<a href="{0}.{1}">{0}.{1}</a>'.format(OPTS.output_name, 'sp')])
new_sheet.io_table.add_row(['WORD_SIZE', WORD_SIZE])
new_sheet.io_table.add_row(['NUM_WORDS', NUM_WORDS])
new_sheet.io_table.add_row(['NUM_BANKS', NUM_BANKS])
new_sheet.io_table.add_row(['NUM_RW_PORTS', NUM_RW_PORTS])
new_sheet.io_table.add_row(['NUM_R_PORTS', NUM_R_PORTS])
new_sheet.io_table.add_row(['NUM_W_PORTS', NUM_W_PORTS])
new_sheet.io_table.add_row(['Area', AREA])
class datasheet_gen():
def datasheet_write(sram,name):
def datasheet_write(name):
in_dir = OPTS.openram_temp
if not (os.path.isdir(in_dir)):
os.mkdir(in_dir)
datasheets = []
parse_characterizer_csv(sram, in_dir + "/datasheet.info", datasheets)
parse_characterizer_csv(in_dir + "/datasheet.info", datasheets)
for sheets in datasheets:
with open(name, 'w+') as f:

View File

@ -14,20 +14,51 @@ def check(check,str):
index) = inspect.getouterframes(inspect.currentframe())[1]
if not check:
sys.stderr.write("ERROR: file {0}: line {1}: {2}\n".format(os.path.basename(filename),line_number,str))
log("ERROR: file {0}: line {1}: {2}\n".format(os.path.basename(filename),line_number,str))
assert 0
def error(str,return_value=0):
(frame, filename, line_number, function_name, lines,
index) = inspect.getouterframes(inspect.currentframe())[1]
sys.stderr.write("ERROR: file {0}: line {1}: {2}\n".format(os.path.basename(filename),line_number,str))
log("ERROR: file {0}: line {1}: {2}\n".format(os.path.basename(filename),line_number,str))
assert return_value==0
def warning(str):
(frame, filename, line_number, function_name, lines,
index) = inspect.getouterframes(inspect.currentframe())[1]
sys.stderr.write("WARNING: file {0}: line {1}: {2}\n".format(os.path.basename(filename),line_number,str))
log("WARNING: file {0}: line {1}: {2}\n".format(os.path.basename(filename),line_number,str))
def print_raw(str):
print(str)
log(str)
def log(str):
try:
if log.create_file:
compile_log = open(globals.OPTS.output_path + globals.OPTS.output_name + '.log',"w+")
log.create_file = 0
else:
compile_log = open(globals.OPTS.output_path + globals.OPTS.output_name + '.log',"a")
if len(log.setup_output) != 0:
for line in log.setup_output:
compile_log.write(line)
log.setup_output = []
compile_log.write(str + '\n')
except:
log.setup_out.append(str + "\n")
#use a static list of strings to store messages until the global paths are set up
log.setup_output = []
log.create_file = 1
def info(lev, str):
from globals import OPTS
if (OPTS.debug_level >= lev):
@ -39,3 +70,6 @@ def info(lev, str):
else:
class_name=mod.__name__
print("[{0}/{1}]: {2}".format(class_name,frm[0].f_code.co_name,str))
log("[{0}/{1}]: {2}".format(class_name,frm[0].f_code.co_name,str))

View File

@ -1,7 +1,8 @@
from .gdsPrimitives import *
from datetime import *
#from mpmath import matrix
from numpy import matrix
#from numpy import matrix
import numpy as np
#import gdsPrimitives
import debug
@ -170,21 +171,20 @@ class VlsiLayout:
else:
# MRG: Added negative to make CCW rotate 8/29/18
angle = math.radians(float(rotateAngle))
mRotate = matrix([[math.cos(angle),-math.sin(angle),0.0],
mRotate = np.array([[math.cos(angle),-math.sin(angle),0.0],
[math.sin(angle),math.cos(angle),0.0],
[0.0,0.0,1.0]])
#set up the translation matrix
translateX = float(coordinates[0])
translateY = float(coordinates[1])
mTranslate = matrix([[1.0,0.0,translateX],[0.0,1.0,translateY],[0.0,0.0,1.0]])
mTranslate = np.array([[1.0,0.0,translateX],[0.0,1.0,translateY],[0.0,0.0,1.0]])
#set up the scale matrix (handles mirror X)
scaleX = 1.0
if(transFlags[0]):
scaleY = -1.0
else:
scaleY = 1.0
mScale = matrix([[scaleX,0.0,0.0],[0.0,scaleY,0.0],[0.0,0.0,1.0]])
mScale = np.array([[scaleX,0.0,0.0],[0.0,scaleY,0.0],[0.0,0.0,1.0]])
#we need to keep track of all transforms in the hierarchy
#when we add an element to the xy tree, we apply all transforms from the bottom up
transformPath.append((mRotate,mScale,mTranslate))
@ -219,27 +219,26 @@ class VlsiLayout:
def populateCoordinateMap(self):
def addToXyTree(startingStructureName = None,transformPath = None):
#print("populateCoordinateMap")
uVector = matrix([1.0,0.0,0.0]).transpose() #start with normal basis vectors
vVector = matrix([0.0,1.0,0.0]).transpose()
origin = matrix([0.0,0.0,1.0]).transpose() #and an origin (Z component is 1.0 to indicate position instead of vector)
uVector = np.array([[1.0],[0.0],[0.0]]) #start with normal basis vectors
vVector = np.array([[0.0],[1.0],[0.0]])
origin = np.array([[0.0],[0.0],[1.0]]) #and an origin (Z component is 1.0 to indicate position instead of vector)
#make a copy of all the transforms and reverse it
reverseTransformPath = transformPath[:]
if len(reverseTransformPath) > 1:
reverseTransformPath.reverse()
reverseTransformPath.reverse()
#now go through each transform and apply them to our basis and origin in succession
for transform in reverseTransformPath:
origin = transform[0] * origin #rotate
uVector = transform[0] * uVector #rotate
vVector = transform[0] * vVector #rotate
origin = transform[1] * origin #scale
uVector = transform[1] * uVector #scale
vVector = transform[1] * vVector #scale
origin = transform[2] * origin #translate
origin = np.dot(transform[0], origin) #rotate
uVector = np.dot(transform[0], uVector) #rotate
vVector = np.dot(transform[0], vVector) #rotate
origin = np.dot(transform[1], origin) #scale
uVector = np.dot(transform[1], uVector) #scale
vVector = np.dot(transform[1], vVector) #scale
origin = np.dot(transform[2], origin) #translate
#we don't need to do a translation on the basis vectors
#uVector = transform[2] * uVector #translate
#vVector = transform[2] * vVector #translate
#populate the xyTree with each structureName and coordinate space
#populate the xyTree with each structureName and coordinate space
self.xyTree.append((startingStructureName,origin,uVector,vVector))
self.traverseTheHierarchy(delegateFunction = addToXyTree)
@ -522,8 +521,7 @@ class VlsiLayout:
return True
def fillAreaDensity(self, layerToFill = 0, offsetInMicrons = (0,0), coverageWidth = 100.0, coverageHeight = 100.0,
minSpacing = 0.22, blockSize = 1.0):
def fillAreaDensity(self, layerToFill = 0, offsetInMicrons = (0,0), coverageWidth = 100.0, coverageHeight = 100.0, minSpacing = 0.22, blockSize = 1.0):
effectiveBlock = blockSize+minSpacing
widthInBlocks = int(coverageWidth/effectiveBlock)
heightInBlocks = int(coverageHeight/effectiveBlock)
@ -810,8 +808,8 @@ class VlsiLayout:
# This is fixed to be:
# |u[0] v[0]| |x| |x'|
# |u[1] v[1]|x|y|=|y'|
x=coordinate[0]*uVector[0].item()+coordinate[1]*vVector[0].item()
y=coordinate[0]*uVector[1].item()+coordinate[1]*vVector[1].item()
x=coordinate[0]*uVector[0][0]+coordinate[1]*vVector[0][0]
y=coordinate[0]*uVector[1][0]+coordinate[1]*vVector[1][0]
transformCoordinate=[x,y]
return transformCoordinate
@ -836,5 +834,3 @@ def boundaryArea(A):
area_A=(A[2]-A[0])*(A[3]-A[1])
return area_A

View File

@ -71,26 +71,26 @@ def print_banner():
if OPTS.is_unit_test:
return
print("|==============================================================================|")
debug.print_raw("|==============================================================================|")
name = "OpenRAM Compiler"
print("|=========" + name.center(60) + "=========|")
print("|=========" + " ".center(60) + "=========|")
print("|=========" + "VLSI Design and Automation Lab".center(60) + "=========|")
print("|=========" + "Computer Science and Engineering Department".center(60) + "=========|")
print("|=========" + "University of California Santa Cruz".center(60) + "=========|")
print("|=========" + " ".center(60) + "=========|")
print("|=========" + "VLSI Computer Architecture Research Group".center(60) + "=========|")
print("|=========" + "Electrical and Computer Engineering Department".center(60) + "=========|")
print("|=========" + "Oklahoma State University".center(60) + "=========|")
print("|=========" + " ".center(60) + "=========|")
debug.print_raw("|=========" + name.center(60) + "=========|")
debug.print_raw("|=========" + " ".center(60) + "=========|")
debug.print_raw("|=========" + "VLSI Design and Automation Lab".center(60) + "=========|")
debug.print_raw("|=========" + "Computer Science and Engineering Department".center(60) + "=========|")
debug.print_raw("|=========" + "University of California Santa Cruz".center(60) + "=========|")
debug.print_raw("|=========" + " ".center(60) + "=========|")
debug.print_raw("|=========" + "VLSI Computer Architecture Research Group".center(60) + "=========|")
debug.print_raw("|=========" + "Electrical and Computer Engineering Department".center(60) + "=========|")
debug.print_raw("|=========" + "Oklahoma State University".center(60) + "=========|")
debug.print_raw("|=========" + " ".center(60) + "=========|")
user_info = "Usage help: openram-user-group@ucsc.edu"
print("|=========" + user_info.center(60) + "=========|")
debug.print_raw("|=========" + user_info.center(60) + "=========|")
dev_info = "Development help: openram-dev-group@ucsc.edu"
print("|=========" + dev_info.center(60) + "=========|")
debug.print_raw("|=========" + dev_info.center(60) + "=========|")
temp_info = "Temp dir: {}".format(OPTS.openram_temp)
print("|=========" + temp_info.center(60) + "=========|")
print("|=========" + "See LICENSE for license info".center(60) + "=========|")
print("|==============================================================================|")
debug.print_raw("|=========" + temp_info.center(60) + "=========|")
debug.print_raw("|=========" + "See LICENSE for license info".center(60) + "=========|")
debug.print_raw("|==============================================================================|")
def check_versions():
@ -400,7 +400,7 @@ def print_time(name, now_time, last_time=None, indentation=2):
time = str(round((now_time-last_time).total_seconds(),1)) + " seconds"
else:
time = now_time.strftime('%m/%d/%Y %H:%M:%S')
print("{0} {1}: {2}".format("*"*indentation,name,time))
debug.print_raw("{0} {1}: {2}".format("*"*indentation,name,time))
def report_status():
@ -416,20 +416,20 @@ def report_status():
if not OPTS.tech_name:
debug.error("Tech name must be specified in config file.")
print("Technology: {0}".format(OPTS.tech_name))
print("Total size: {} bits".format(OPTS.word_size*OPTS.num_words*OPTS.num_banks))
print("Word size: {0}\nWords: {1}\nBanks: {2}".format(OPTS.word_size,
debug.print_raw("Technology: {0}".format(OPTS.tech_name))
debug.print_raw("Total size: {} bits".format(OPTS.word_size*OPTS.num_words*OPTS.num_banks))
debug.print_raw("Word size: {0}\nWords: {1}\nBanks: {2}".format(OPTS.word_size,
OPTS.num_words,
OPTS.num_banks))
print("RW ports: {0}\nR-only ports: {1}\nW-only ports: {2}".format(OPTS.num_rw_ports,
debug.print_raw("RW ports: {0}\nR-only ports: {1}\nW-only ports: {2}".format(OPTS.num_rw_ports,
OPTS.num_r_ports,
OPTS.num_w_ports))
if OPTS.netlist_only:
print("Netlist only mode (no physical design is being done).")
debug.print_raw("Netlist only mode (no physical design is being done).")
if not OPTS.inline_lvsdrc:
print("DRC/LVS/PEX is only run on the top-level design.")
debug.print_raw("DRC/LVS/PEX is only run on the top-level design.")
if not OPTS.check_lvsdrc:
print("DRC/LVS/PEX is completely disabled.")
debug.print_raw("DRC/LVS/PEX is completely disabled.")

View File

@ -44,15 +44,16 @@ from sram_config import sram_config
# Configure the SRAM organization
c = sram_config(word_size=OPTS.word_size,
num_words=OPTS.num_words)
print("Words per row: {}".format(c.words_per_row))
debug.print_raw("Words per row: {}".format(c.words_per_row))
#from parser import *
output_extensions = ["sp","v","lib","py","html"]
output_extensions = ["sp","v","lib","py","html","log"]
if not OPTS.netlist_only:
output_extensions.extend(["gds","lef"])
output_files = ["{0}{1}.{2}".format(OPTS.output_path,OPTS.output_name,x) for x in output_extensions]
print("Output files are: ")
print(*output_files,sep="\n")
debug.print_raw("Output files are: ")
for path in output_files:
debug.print_raw(path)
from sram import sram

View File

@ -65,21 +65,21 @@ class sram():
# Write the layout
start_time = datetime.datetime.now()
gdsname = OPTS.output_path + self.s.name + ".gds"
print("GDS: Writing to {0}".format(gdsname))
debug.print_raw("GDS: Writing to {0}".format(gdsname))
self.gds_write(gdsname)
print_time("GDS", datetime.datetime.now(), start_time)
# Create a LEF physical model
start_time = datetime.datetime.now()
lefname = OPTS.output_path + self.s.name + ".lef"
print("LEF: Writing to {0}".format(lefname))
debug.print_raw("LEF: Writing to {0}".format(lefname))
self.lef_write(lefname)
print_time("LEF", datetime.datetime.now(), start_time)
# Save the spice file
start_time = datetime.datetime.now()
spname = OPTS.output_path + self.s.name + ".sp"
print("SP: Writing to {0}".format(spname))
debug.print_raw("SP: Writing to {0}".format(spname))
self.sp_write(spname)
print_time("Spice writing", datetime.datetime.now(), start_time)
@ -98,14 +98,14 @@ class sram():
# Characterize the design
start_time = datetime.datetime.now()
from characterizer import lib
print("LIB: Characterizing... ")
debug.print_raw("LIB: Characterizing... ")
if OPTS.analytical_delay:
print("Using analytical delay models (no characterization)")
debug.print_raw("Using analytical delay models (no characterization)")
else:
if OPTS.spice_name!="":
print("Performing simulation-based characterization with {}".format(OPTS.spice_name))
debug.print_raw("Performing simulation-based characterization with {}".format(OPTS.spice_name))
if OPTS.trim_netlist:
print("Trimming netlist to speed up characterization.")
debug.print_raw("Trimming netlist to speed up characterization.")
lib(out_dir=OPTS.output_path, sram=self.s, sp_file=sp_file)
print_time("Characterization", datetime.datetime.now(), start_time)
@ -114,20 +114,20 @@ class sram():
start_time = datetime.datetime.now()
from shutil import copyfile
copyfile(OPTS.config_file + '.py', OPTS.output_path + OPTS.output_name + '.py')
print("Config: Writing to {0}".format(OPTS.output_path + OPTS.output_name + '.py'))
debug.print_raw("Config: Writing to {0}".format(OPTS.output_path + OPTS.output_name + '.py'))
print_time("Config", datetime.datetime.now(), start_time)
# Write the datasheet
start_time = datetime.datetime.now()
from datasheet_gen import datasheet_gen
dname = OPTS.output_path + self.s.name + ".html"
print("Datasheet: Writing to {0}".format(dname))
datasheet_gen.datasheet_write(self.s,dname)
debug.print_raw("Datasheet: Writing to {0}".format(dname))
datasheet_gen.datasheet_write(dname)
print_time("Datasheet", datetime.datetime.now(), start_time)
# Write a verilog model
start_time = datetime.datetime.now()
vname = OPTS.output_path + self.s.name + ".v"
print("Verilog: Writing to {0}".format(vname))
debug.print_raw("Verilog: Writing to {0}".format(vname))
self.verilog_write(vname)
print_time("Verilog", datetime.datetime.now(), start_time)

View File

@ -1,14 +0,0 @@
SUBDIRS := $(wildcard */.)
SUBDIRSCLEAN=$(addsuffix clean,$(SUBDIRS))
all: $(SUBDIRS)
$(SUBDIRS):
$(MAKE) -k -C $@
clean:
for dir in $(SUBDIRS); do \
$(MAKE) -C $$dir $@; \
done
.PHONY: all $(SUBDIRS) $(SUBDIRSCLEAN)

View File

@ -1,5 +0,0 @@
This directory contains a set of common sizes based on
discussions with users. All of the files are pre-computed
to that common-case users don't need to setup/use OpenRAM.
The results will be updated automatically as improvements
are made to OpenRAM.

View File

@ -1,32 +0,0 @@
CUR_DIR = $(shell pwd)
TEST_DIR = ${CUR_DIR}/tests
#MAKEFLAGS += -j 2
CONFIG_DIR = configs
OUT_DIRS = sp lib lef gds verilog
$(shell mkdir -p $(OUT_DIRS))
SRCS=$(wildcard $(CONFIG_DIR)/*.py)
SPICES=$(SRCS:.py=.sp)
all : $(SPICES)
# Characterize and perform DRC/LVS
OPTS = -c
# Do not characterize or perform DRC/LVS
#OPTS += -n
# Verbosity
OPTS += -v
%.sp : %.py
$(eval bname=$(basename $(notdir $<)))
openram.py $(OPTS) $< 2>&1 > $(bname).log
mv $(bname).lef lef
mv $(bname).v verilog
mv $(bname).sp sp
mv $(bname).gds gds
mv $(bname)*.lib lib
clean:
rm -f *.log configs/*.pyc *~ *.gds *.lib *.sp *.v *.lef
rm -f gds/* lef/* lib/* sp/* verilog/*

View File

@ -1,8 +0,0 @@
word_size = 128
num_words = 1024
num_banks = 1
tech_name = "freepdk45"
process_corners = ["TT"]
supply_voltages = [1.0]
temperatures = [25]

View File

@ -1,7 +0,0 @@
word_size = 32
num_words = 1024
tech_name = "freepdk45"
process_corners = ["TT"]
supply_voltages = [1.0]
temperatures = [25]

View File

@ -1,7 +0,0 @@
word_size = 32
num_words = 2048
tech_name = "freepdk45"
process_corners = ["TT"]
supply_voltages = [1.0]
temperatures = [25]

View File

@ -1,7 +0,0 @@
word_size = 32
num_words = 256
tech_name = "freepdk45"
process_corners = ["TT"]
supply_voltages = [1.0]
temperatures = [25]

View File

@ -1,7 +0,0 @@
word_size = 32
num_words = 512
tech_name = "freepdk45"
process_corners = ["TT"]
supply_voltages = [1.0]
temperatures = [25]

View File

@ -1,7 +0,0 @@
word_size = 64
num_words = 1024
tech_name = "freepdk45"
process_corners = ["TT"]
supply_voltages = [1.0]
temperatures = [25]

View File

@ -1,7 +0,0 @@
word_size = 8
num_words = 1024
tech_name = "freepdk45"
process_corners = ["TT"]
supply_voltages = [1.0]
temperatures = [25]

View File

@ -1,7 +0,0 @@
word_size = 8
num_words = 256
tech_name = "freepdk45"
process_corners = ["TT"]
supply_voltages = [1.0]
temperatures = [25]

View File

@ -1,7 +0,0 @@
word_size = 8
num_words = 512
tech_name = "freepdk45"
process_corners = ["TT"]
supply_voltages = [1.0]
temperatures = [25]

View File

@ -1,33 +0,0 @@
CUR_DIR = $(shell pwd)
TEST_DIR = ${CUR_DIR}/tests
#MAKEFLAGS += -j 2
CONFIG_DIR = configs
OUT_DIRS = sp lib lef gds verilog
$(shell mkdir -p $(OUT_DIRS))
SRCS=$(wildcard $(CONFIG_DIR)/*.py)
SPICES=$(SRCS:.py=.sp)
all : $(SPICES)
OPTS =
# Characterize and perform DRC/LVS
#OPTS = -c
# Do not characterize or perform DRC/LVS
#OPTS += -n
# Verbosity
OPTS += -v
%.sp : %.py
$(eval bname=$(basename $(notdir $<)))
openram.py $(OPTS) $< 2>&1 > $(bname).log
mv $(bname).lef lef
mv $(bname).v verilog
mv $(bname).sp sp
mv $(bname).gds gds
mv $(bname)*.lib lib
clean:
rm -f *.log configs/*.pyc *~ *.gds *.lib *.sp *.v *.lef
rm -f gds/* lef/* lib/* sp/* verilog/*

View File

@ -1,12 +0,0 @@
word_size = 128
num_words = 1024
tech_name = "scn4m_subm"
process_corners = ["TT"]
supply_voltages = [ 5.0 ]
temperatures = [ 25 ]
drc_name = "magic"
lvs_name = "netgen"
pex_name = "magic"

View File

@ -1,12 +0,0 @@
word_size = 32
num_words = 1024
tech_name = "scn4m_subm"
process_corners = ["TT"]
supply_voltages = [ 5.0 ]
temperatures = [ 25 ]
drc_name = "magic"
lvs_name = "netgen"
pex_name = "magic"

View File

@ -1,12 +0,0 @@
word_size = 32
num_words = 2048
tech_name = "scn4m_subm"
process_corners = ["TT"]
supply_voltages = [ 5.0 ]
temperatures = [ 25 ]
drc_name = "magic"
lvs_name = "netgen"
pex_name = "magic"

View File

@ -1,12 +0,0 @@
word_size = 32
num_words = 256
tech_name = "scn4m_subm"
process_corners = ["TT"]
supply_voltages = [ 5.0 ]
temperatures = [ 25 ]
drc_name = "magic"
lvs_name = "netgen"
pex_name = "magic"

View File

@ -1,12 +0,0 @@
word_size = 32
num_words = 512
tech_name = "scn4m_subm"
process_corners = ["TT"]
supply_voltages = [ 5.0 ]
temperatures = [ 25 ]
drc_name = "magic"
lvs_name = "netgen"
pex_name = "magic"

View File

@ -1,12 +0,0 @@
word_size = 64
num_words = 1024
tech_name = "scn4m_subm"
process_corners = ["TT"]
supply_voltages = [ 5.0 ]
temperatures = [ 25 ]
drc_name = "magic"
lvs_name = "netgen"
pex_name = "magic"

View File

@ -1,12 +0,0 @@
word_size = 8
num_words = 1024
tech_name = "scn4m_subm"
process_corners = ["TT"]
supply_voltages = [ 5.0 ]
temperatures = [ 25 ]
drc_name = "magic"
lvs_name = "netgen"
pex_name = "magic"

View File

@ -1,12 +0,0 @@
word_size = 8
num_words = 256
tech_name = "scn4m_subm"
process_corners = ["TT"]
supply_voltages = [ 5.0 ]
temperatures = [ 25 ]
drc_name = "magic"
lvs_name = "netgen"
pex_name = "magic"

View File

@ -1,12 +0,0 @@
word_size = 8
num_words = 512
tech_name = "scn4m_subm"
process_corners = ["TT"]
supply_voltages = [ 5.0 ]
temperatures = [ 25 ]
drc_name = "magic"
lvs_name = "netgen"
pex_name = "magic"