2019-04-26 21:21:50 +02:00
|
|
|
# See LICENSE for licensing information.
|
|
|
|
|
#
|
2019-06-14 17:43:41 +02:00
|
|
|
# Copyright (c) 2016-2019 Regents of the University of California and The Board
|
|
|
|
|
# of Regents for the Oklahoma Agricultural and Mechanical College
|
|
|
|
|
# (acting for and on behalf of Oklahoma State University)
|
|
|
|
|
# All rights reserved.
|
2019-04-26 21:21:50 +02:00
|
|
|
#
|
2016-11-08 18:57:35 +01:00
|
|
|
"""
|
2019-11-14 21:18:18 +01:00
|
|
|
This is called globals.py, but it actually parses all the arguments
|
|
|
|
|
and performs the global OpenRAM setup as well.
|
2016-11-08 18:57:35 +01:00
|
|
|
"""
|
|
|
|
|
import os
|
|
|
|
|
import debug
|
|
|
|
|
import shutil
|
|
|
|
|
import optparse
|
|
|
|
|
import options
|
|
|
|
|
import sys
|
|
|
|
|
import re
|
2018-07-11 01:39:32 +02:00
|
|
|
import copy
|
2016-11-08 18:57:35 +01:00
|
|
|
import importlib
|
|
|
|
|
|
2020-12-15 19:56:45 +01:00
|
|
|
VERSION = "1.1.10"
|
2019-08-21 23:00:55 +02:00
|
|
|
NAME = "OpenRAM v{}".format(VERSION)
|
|
|
|
|
USAGE = "openram.py [options] <config file>\nUse -h for help.\n"
|
2019-06-29 00:43:09 +02:00
|
|
|
|
2016-11-08 18:57:35 +01:00
|
|
|
OPTS = options.options()
|
2019-11-14 21:18:18 +01:00
|
|
|
CHECKPOINT_OPTS = None
|
|
|
|
|
|
2016-11-08 18:57:35 +01:00
|
|
|
|
2018-02-09 18:53:28 +01:00
|
|
|
def parse_args():
|
2017-11-16 22:52:58 +01:00
|
|
|
""" Parse the optional arguments for OpenRAM """
|
2016-11-08 18:57:35 +01:00
|
|
|
|
|
|
|
|
global OPTS
|
|
|
|
|
|
|
|
|
|
option_list = {
|
2019-11-14 21:18:18 +01:00
|
|
|
optparse.make_option("-b",
|
|
|
|
|
"--backannotated",
|
|
|
|
|
action="store_true",
|
|
|
|
|
dest="use_pex",
|
2017-06-12 23:37:15 +02:00
|
|
|
help="Back annotate simulation"),
|
2019-11-14 21:18:18 +01:00
|
|
|
optparse.make_option("-o",
|
|
|
|
|
"--output",
|
|
|
|
|
dest="output_name",
|
|
|
|
|
help="Base output file name(s) prefix",
|
|
|
|
|
metavar="FILE"),
|
|
|
|
|
optparse.make_option("-p", "--outpath",
|
|
|
|
|
dest="output_path",
|
2017-06-12 23:37:15 +02:00
|
|
|
help="Output file(s) location"),
|
2019-11-14 21:18:18 +01:00
|
|
|
optparse.make_option("-i",
|
|
|
|
|
"--inlinecheck",
|
|
|
|
|
action="store_true",
|
|
|
|
|
help="Enable inline LVS/DRC checks",
|
|
|
|
|
dest="inline_lvsdrc"),
|
|
|
|
|
optparse.make_option("-n", "--nocheck",
|
|
|
|
|
action="store_false",
|
|
|
|
|
help="Disable all LVS/DRC checks",
|
|
|
|
|
dest="check_lvsdrc"),
|
2020-10-06 00:49:00 +02:00
|
|
|
optparse.make_option("-j", "--threads",
|
|
|
|
|
action="store",
|
|
|
|
|
type="int",
|
|
|
|
|
help="Specify the number of threads (default: 2)",
|
|
|
|
|
dest="num_threads"),
|
2019-11-14 21:18:18 +01:00
|
|
|
optparse.make_option("-v",
|
|
|
|
|
"--verbose",
|
|
|
|
|
action="count",
|
2020-11-05 22:12:26 +01:00
|
|
|
dest="verbose_level",
|
2016-11-08 18:57:35 +01:00
|
|
|
help="Increase the verbosity level"),
|
2019-11-14 21:18:18 +01:00
|
|
|
optparse.make_option("-t",
|
|
|
|
|
"--tech",
|
|
|
|
|
dest="tech_name",
|
2016-11-08 18:57:35 +01:00
|
|
|
help="Technology name"),
|
2019-11-14 21:18:18 +01:00
|
|
|
optparse.make_option("-s",
|
|
|
|
|
"--spice",
|
|
|
|
|
dest="spice_name",
|
2017-11-23 00:57:29 +01:00
|
|
|
help="Spice simulator executable name"),
|
2019-11-14 21:18:18 +01:00
|
|
|
optparse.make_option("-r",
|
|
|
|
|
"--remove_netlist_trimming",
|
|
|
|
|
action="store_false",
|
|
|
|
|
dest="trim_netlist",
|
2017-11-14 22:24:14 +01:00
|
|
|
help="Disable removal of noncritical memory cells during characterization"),
|
2019-11-14 21:18:18 +01:00
|
|
|
optparse.make_option("-c",
|
|
|
|
|
"--characterize",
|
|
|
|
|
action="store_false",
|
|
|
|
|
dest="analytical_delay",
|
2018-02-01 20:53:02 +01:00
|
|
|
help="Perform characterization to calculate delays (default is analytical models)"),
|
2020-11-05 22:12:26 +01:00
|
|
|
optparse.make_option("-k",
|
|
|
|
|
"--keeptemp",
|
|
|
|
|
action="store_true",
|
|
|
|
|
dest="keep_temp",
|
|
|
|
|
help="Keep the contents of the temp directory after a successful run"),
|
2019-11-14 21:18:18 +01:00
|
|
|
optparse.make_option("-d",
|
2020-11-05 22:12:26 +01:00
|
|
|
"--debug",
|
|
|
|
|
action="store_true",
|
|
|
|
|
dest="debug",
|
|
|
|
|
help="Run in debug mode to drop to pdb on failure")
|
2017-11-16 22:52:58 +01:00
|
|
|
# -h --help is implicit.
|
2016-11-08 18:57:35 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
parser = optparse.OptionParser(option_list=option_list,
|
2019-08-21 23:00:55 +02:00
|
|
|
description=NAME,
|
2016-11-08 18:57:35 +01:00
|
|
|
usage=USAGE,
|
2019-08-21 23:00:55 +02:00
|
|
|
version=VERSION)
|
2016-11-08 18:57:35 +01:00
|
|
|
|
|
|
|
|
(options, args) = parser.parse_args(values=OPTS)
|
2018-04-20 21:52:41 +02:00
|
|
|
# If we don't specify a tech, assume scmos.
|
2017-01-11 20:47:58 +01:00
|
|
|
# This may be overridden when we read a config file though...
|
|
|
|
|
if OPTS.tech_name == "":
|
2018-04-20 21:52:41 +02:00
|
|
|
OPTS.tech_name = "scmos"
|
2018-12-05 21:58:10 +01:00
|
|
|
# Alias SCMOS to 180nm
|
2018-01-11 19:24:44 +01:00
|
|
|
if OPTS.tech_name == "scmos":
|
2018-09-14 00:26:03 +02:00
|
|
|
OPTS.tech_name = "scn4m_subm"
|
2020-06-12 23:23:26 +02:00
|
|
|
# Alias s8 to sky130
|
|
|
|
|
if OPTS.tech_name == "s8":
|
|
|
|
|
OPTS.tech_name = "sky130"
|
2018-02-08 21:47:19 +01:00
|
|
|
|
2016-11-08 18:57:35 +01:00
|
|
|
return (options, args)
|
|
|
|
|
|
2019-11-14 21:18:18 +01:00
|
|
|
|
2016-11-15 19:13:57 +01:00
|
|
|
def print_banner():
|
|
|
|
|
""" Conditionally print the banner to stdout """
|
|
|
|
|
global OPTS
|
2018-02-08 21:47:19 +01:00
|
|
|
if OPTS.is_unit_test:
|
2016-11-15 19:13:57 +01:00
|
|
|
return
|
|
|
|
|
|
2019-01-13 23:34:46 +01:00
|
|
|
debug.print_raw("|==============================================================================|")
|
2019-08-21 23:00:55 +02:00
|
|
|
debug.print_raw("|=========" + NAME.center(60) + "=========|")
|
2019-01-13 23:34:46 +01:00
|
|
|
debug.print_raw("|=========" + " ".center(60) + "=========|")
|
|
|
|
|
debug.print_raw("|=========" + "VLSI Design and Automation Lab".center(60) + "=========|")
|
|
|
|
|
debug.print_raw("|=========" + "Computer Science and Engineering Department".center(60) + "=========|")
|
|
|
|
|
debug.print_raw("|=========" + "University of California Santa Cruz".center(60) + "=========|")
|
|
|
|
|
debug.print_raw("|=========" + " ".center(60) + "=========|")
|
2018-02-09 18:53:28 +01:00
|
|
|
user_info = "Usage help: openram-user-group@ucsc.edu"
|
2019-01-13 23:34:46 +01:00
|
|
|
debug.print_raw("|=========" + user_info.center(60) + "=========|")
|
2018-02-09 18:53:28 +01:00
|
|
|
dev_info = "Development help: openram-dev-group@ucsc.edu"
|
2019-01-13 23:34:46 +01:00
|
|
|
debug.print_raw("|=========" + dev_info.center(60) + "=========|")
|
2018-02-09 18:53:28 +01:00
|
|
|
temp_info = "Temp dir: {}".format(OPTS.openram_temp)
|
2019-01-13 23:34:46 +01:00
|
|
|
debug.print_raw("|=========" + temp_info.center(60) + "=========|")
|
|
|
|
|
debug.print_raw("|=========" + "See LICENSE for license info".center(60) + "=========|")
|
|
|
|
|
debug.print_raw("|==============================================================================|")
|
2016-11-15 19:13:57 +01:00
|
|
|
|
2016-11-08 18:57:35 +01:00
|
|
|
|
2018-02-08 21:47:19 +01:00
|
|
|
def check_versions():
|
2018-02-09 18:53:28 +01:00
|
|
|
""" Run some checks of required software versions. """
|
|
|
|
|
|
2018-06-29 18:23:43 +02:00
|
|
|
# Now require python >=3.5
|
2018-02-08 21:47:19 +01:00
|
|
|
major_python_version = sys.version_info.major
|
|
|
|
|
minor_python_version = sys.version_info.minor
|
2018-06-29 18:23:43 +02:00
|
|
|
major_required = 3
|
|
|
|
|
minor_required = 5
|
|
|
|
|
if not (major_python_version == major_required and minor_python_version >= minor_required):
|
2020-11-06 01:30:15 +01:00
|
|
|
debug.error("Python {0}.{1} or greater is required.".format(major_required, minor_required), -1)
|
2020-11-03 15:29:17 +01:00
|
|
|
|
2018-02-09 18:53:28 +01:00
|
|
|
# FIXME: Check versions of other tools here??
|
|
|
|
|
# or, this could be done in each module (e.g. verify, characterizer, etc.)
|
2018-10-18 16:21:03 +02:00
|
|
|
global OPTS
|
2020-11-03 15:29:17 +01:00
|
|
|
|
2018-12-03 22:24:22 +01:00
|
|
|
try:
|
|
|
|
|
import coverage
|
|
|
|
|
OPTS.coverage = 1
|
2020-11-06 01:30:15 +01:00
|
|
|
except ModuleNotFoundError:
|
2018-12-03 22:24:22 +01:00
|
|
|
OPTS.coverage = 0
|
2018-02-09 18:53:28 +01:00
|
|
|
|
2020-11-03 15:29:17 +01:00
|
|
|
|
2018-02-09 18:53:28 +01:00
|
|
|
def init_openram(config_file, is_unit_test=True):
|
2019-11-14 21:18:18 +01:00
|
|
|
""" Initialize the technology, paths, simulators, etc. """
|
2018-07-11 01:39:32 +02:00
|
|
|
|
2018-02-08 21:47:19 +01:00
|
|
|
check_versions()
|
2016-11-08 18:57:35 +01:00
|
|
|
|
2019-11-14 21:18:18 +01:00
|
|
|
debug.info(1, "Initializing OpenRAM...")
|
2016-11-08 18:57:35 +01:00
|
|
|
|
|
|
|
|
setup_paths()
|
2020-11-03 15:29:17 +01:00
|
|
|
|
2018-02-09 18:53:28 +01:00
|
|
|
read_config(config_file, is_unit_test)
|
2017-06-12 23:37:15 +02:00
|
|
|
|
2016-11-08 18:57:35 +01:00
|
|
|
import_tech()
|
|
|
|
|
|
2019-03-06 16:46:30 +01:00
|
|
|
set_default_corner()
|
|
|
|
|
|
2018-09-04 19:47:24 +02:00
|
|
|
init_paths()
|
|
|
|
|
|
2019-01-17 01:15:38 +01:00
|
|
|
from sram_factory import factory
|
|
|
|
|
factory.reset()
|
2019-02-26 01:10:34 +01:00
|
|
|
|
2018-07-11 21:12:03 +02:00
|
|
|
global OPTS
|
|
|
|
|
global CHECKPOINT_OPTS
|
|
|
|
|
|
|
|
|
|
# This is a hack. If we are running a unit test and have checkpointed
|
|
|
|
|
# the options, load them rather than reading the config file.
|
|
|
|
|
# This way, the configuration is reloaded at the start of every unit test.
|
2019-11-14 21:18:18 +01:00
|
|
|
# If a unit test fails,
|
|
|
|
|
# we don't have to worry about restoring the old config values
|
2018-07-11 21:12:03 +02:00
|
|
|
# that may have been tested.
|
|
|
|
|
if is_unit_test and CHECKPOINT_OPTS:
|
2019-11-14 21:18:18 +01:00
|
|
|
OPTS.__dict__ = CHECKPOINT_OPTS.__dict__.copy()
|
2018-07-11 21:12:03 +02:00
|
|
|
return
|
2020-11-03 01:00:16 +01:00
|
|
|
|
2020-11-03 02:00:15 +01:00
|
|
|
# Setup correct bitcell names
|
2020-11-03 01:00:16 +01:00
|
|
|
setup_bitcell()
|
2020-11-03 02:00:15 +01:00
|
|
|
|
2018-07-11 21:12:03 +02:00
|
|
|
# Import these to find the executables for checkpointing
|
|
|
|
|
import characterizer
|
|
|
|
|
import verify
|
|
|
|
|
# Make a checkpoint of the options so we can restore
|
|
|
|
|
# after each unit test
|
|
|
|
|
if not CHECKPOINT_OPTS:
|
|
|
|
|
CHECKPOINT_OPTS = copy.copy(OPTS)
|
2019-11-14 21:18:18 +01:00
|
|
|
|
2020-11-03 15:29:17 +01:00
|
|
|
|
2019-02-26 01:10:34 +01:00
|
|
|
def setup_bitcell():
|
|
|
|
|
"""
|
|
|
|
|
Determine the correct custom or parameterized bitcell for the design.
|
|
|
|
|
"""
|
2019-02-28 18:12:32 +01:00
|
|
|
# If we have non-1rw ports,
|
|
|
|
|
# and the user didn't over-ride the bitcell manually,
|
|
|
|
|
# figure out the right bitcell to use
|
2020-11-13 19:07:40 +01:00
|
|
|
if OPTS.bitcell == "pbitcell":
|
|
|
|
|
OPTS.bitcell = "pbitcell"
|
|
|
|
|
OPTS.dummy_bitcell = "dummy_pbitcell"
|
|
|
|
|
OPTS.replica_bitcell = "replica_pbitcell"
|
|
|
|
|
else:
|
2020-11-13 17:09:21 +01:00
|
|
|
num_ports = OPTS.num_rw_ports + OPTS.num_w_ports + OPTS.num_r_ports
|
|
|
|
|
OPTS.bitcell = "bitcell_{}port".format(num_ports)
|
2020-11-03 02:00:15 +01:00
|
|
|
OPTS.dummy_bitcell = "dummy_" + OPTS.bitcell
|
|
|
|
|
OPTS.replica_bitcell = "replica_" + OPTS.bitcell
|
2020-11-03 20:30:40 +01:00
|
|
|
|
2019-07-05 21:57:12 +02:00
|
|
|
# See if bitcell exists
|
|
|
|
|
try:
|
|
|
|
|
__import__(OPTS.bitcell)
|
|
|
|
|
except ImportError:
|
|
|
|
|
# Use the pbitcell if we couldn't find a custom bitcell
|
|
|
|
|
# or its custom replica bitcell
|
|
|
|
|
# Use the pbitcell (and give a warning if not in unit test mode)
|
|
|
|
|
OPTS.bitcell = "pbitcell"
|
2020-11-03 20:30:40 +01:00
|
|
|
OPTS.dummy_bitcell = "dummy_pbitcell"
|
|
|
|
|
OPTS.replica_bitcell = "replica_pbitcell"
|
2019-07-05 21:57:12 +02:00
|
|
|
if not OPTS.is_unit_test:
|
2020-11-13 17:09:21 +01:00
|
|
|
msg = "Didn't find {0}rw {1}w {2}r port bitcell. ".format(OPTS.num_rw_ports, OPTS.num_w_ports, OPTS.num_r_ports)
|
|
|
|
|
debug.warning("{} Using the parameterized bitcell which may have suboptimal density.".format(msg))
|
2019-11-14 21:18:18 +01:00
|
|
|
debug.info(1, "Using bitcell: {}".format(OPTS.bitcell))
|
2018-02-08 21:47:19 +01:00
|
|
|
|
2018-01-12 19:24:49 +01:00
|
|
|
|
2018-08-28 22:41:26 +02:00
|
|
|
def get_tool(tool_type, preferences, default_name=None):
|
2017-11-16 02:02:53 +01:00
|
|
|
"""
|
2017-11-16 22:52:58 +01:00
|
|
|
Find which tool we have from a list of preferences and return the
|
2018-08-28 22:41:26 +02:00
|
|
|
one selected and its full path. If default is specified,
|
|
|
|
|
find that one only and error otherwise.
|
2017-11-16 02:02:53 +01:00
|
|
|
"""
|
2019-11-14 21:18:18 +01:00
|
|
|
debug.info(2, "Finding {} tool...".format(tool_type))
|
2017-11-16 02:02:53 +01:00
|
|
|
|
2018-08-28 22:41:26 +02:00
|
|
|
if default_name:
|
2019-11-14 21:18:18 +01:00
|
|
|
exe_name = find_exe(default_name)
|
2018-08-28 22:41:26 +02:00
|
|
|
if exe_name == None:
|
2020-11-04 00:47:04 +01:00
|
|
|
debug.error("{0} not found. Cannot find {1} tool.".format(default_name, tool_type)
|
|
|
|
|
+ "Disable DRC/LVS with check_lvsdrc=False to ignore.",
|
2019-11-14 21:18:18 +01:00
|
|
|
2)
|
2017-11-16 02:02:53 +01:00
|
|
|
else:
|
2019-11-14 21:18:18 +01:00
|
|
|
debug.info(1, "Using {0}: {1}".format(tool_type, exe_name))
|
|
|
|
|
return(default_name, exe_name)
|
2017-11-16 02:02:53 +01:00
|
|
|
else:
|
2018-08-28 22:41:26 +02:00
|
|
|
for name in preferences:
|
|
|
|
|
exe_name = find_exe(name)
|
|
|
|
|
if exe_name != None:
|
2019-11-14 21:18:18 +01:00
|
|
|
debug.info(1, "Using {0}: {1}".format(tool_type, exe_name))
|
|
|
|
|
return(name, exe_name)
|
2018-08-28 22:41:26 +02:00
|
|
|
else:
|
2019-11-14 21:18:18 +01:00
|
|
|
debug.info(1,
|
2020-11-04 00:47:04 +01:00
|
|
|
"Could not find {0}, trying next {1} tool.".format(name, tool_type))
|
2018-08-28 22:41:26 +02:00
|
|
|
else:
|
2019-11-14 21:18:18 +01:00
|
|
|
return(None, "")
|
2017-11-16 02:02:53 +01:00
|
|
|
|
2020-11-03 15:29:17 +01:00
|
|
|
|
2018-02-09 18:53:28 +01:00
|
|
|
def read_config(config_file, is_unit_test=True):
|
2019-11-14 21:18:18 +01:00
|
|
|
"""
|
2017-11-16 22:52:58 +01:00
|
|
|
Read the configuration file that defines a few parameters. The
|
|
|
|
|
config file is just a Python file that defines some config
|
2018-07-11 01:39:32 +02:00
|
|
|
options. This will only actually get read the first time. Subsequent
|
|
|
|
|
reads will just restore the previous copy (ask mrg)
|
2017-11-16 22:52:58 +01:00
|
|
|
"""
|
2018-01-12 23:39:42 +01:00
|
|
|
global OPTS
|
2019-11-14 21:18:18 +01:00
|
|
|
|
|
|
|
|
# it is already not an abs path, make it one
|
2017-11-16 22:52:58 +01:00
|
|
|
if not os.path.isabs(config_file):
|
2019-11-15 19:47:59 +01:00
|
|
|
config_file = os.getcwd() + "/" + config_file
|
2020-11-03 15:29:17 +01:00
|
|
|
|
2019-11-16 23:22:30 +01:00
|
|
|
# Make it a python file if the base name was only given
|
|
|
|
|
config_file = re.sub(r'\.py$', "", config_file)
|
2020-11-03 15:29:17 +01:00
|
|
|
|
|
|
|
|
|
2017-11-16 22:52:58 +01:00
|
|
|
# Expand the user if it is used
|
|
|
|
|
config_file = os.path.expanduser(config_file)
|
2020-11-03 15:29:17 +01:00
|
|
|
|
2019-11-16 23:22:30 +01:00
|
|
|
OPTS.config_file = config_file + ".py"
|
2019-11-14 21:18:18 +01:00
|
|
|
# Add the path to the system path
|
|
|
|
|
# so we can import things in the other directory
|
2017-11-16 22:52:58 +01:00
|
|
|
dir_name = os.path.dirname(config_file)
|
2019-11-15 19:47:59 +01:00
|
|
|
module_name = os.path.basename(config_file)
|
2019-11-14 21:18:18 +01:00
|
|
|
|
2020-12-08 19:43:29 +01:00
|
|
|
# Check that the module name adheres to Python's module naming conventions.
|
|
|
|
|
# This will assist the user in interpreting subsequent errors in loading
|
|
|
|
|
# the module. Valid Python module naming is described here:
|
|
|
|
|
# https://docs.python.org/3/reference/simple_stmts.html#the-import-statement
|
|
|
|
|
if not module_name.isidentifier():
|
|
|
|
|
debug.error("Configuration file name is not a valid Python module name: "
|
|
|
|
|
"{0}. It should be a valid identifier.".format(module_name))
|
|
|
|
|
|
2017-11-20 20:57:41 +01:00
|
|
|
# Prepend the path to avoid if we are using the example config
|
2019-11-14 21:18:18 +01:00
|
|
|
sys.path.insert(0, dir_name)
|
2017-11-16 22:52:58 +01:00
|
|
|
# Import the configuration file of which modules to use
|
|
|
|
|
debug.info(1, "Configuration file is " + config_file + ".py")
|
2016-11-08 18:57:35 +01:00
|
|
|
try:
|
2019-11-15 19:47:59 +01:00
|
|
|
config = importlib.import_module(module_name)
|
2016-11-08 18:57:35 +01:00
|
|
|
except:
|
2020-11-06 20:09:50 +01:00
|
|
|
debug.error("Unable to read configuration file: {0}".format(config_file), 2)
|
2016-11-08 18:57:35 +01:00
|
|
|
|
2019-12-19 15:58:00 +01:00
|
|
|
OPTS.overridden = {}
|
2019-11-14 21:18:18 +01:00
|
|
|
for k, v in config.__dict__.items():
|
2018-01-21 20:21:09 +01:00
|
|
|
# The command line will over-ride the config file
|
|
|
|
|
# except in the case of the tech name! This is because the tech name
|
|
|
|
|
# is sometimes used to specify the config file itself (e.g. unit tests)
|
2018-07-11 01:39:32 +02:00
|
|
|
# Note that if we re-read a config file, nothing will get read again!
|
2019-11-15 19:47:59 +01:00
|
|
|
if k not in OPTS.__dict__ or k == "tech_name":
|
2019-11-14 21:18:18 +01:00
|
|
|
OPTS.__dict__[k] = v
|
2019-12-19 15:58:00 +01:00
|
|
|
OPTS.overridden[k] = True
|
2018-08-31 21:03:28 +02:00
|
|
|
|
|
|
|
|
# Massage the output path to be an absolute one
|
2017-06-12 23:37:15 +02:00
|
|
|
if not OPTS.output_path.endswith('/'):
|
|
|
|
|
OPTS.output_path += "/"
|
2018-02-26 17:54:35 +01:00
|
|
|
if not OPTS.output_path.startswith('/'):
|
|
|
|
|
OPTS.output_path = os.getcwd() + "/" + OPTS.output_path
|
2017-06-12 23:37:15 +02:00
|
|
|
debug.info(1, "Output saved in " + OPTS.output_path)
|
2017-07-06 17:42:25 +02:00
|
|
|
|
2018-08-31 21:03:28 +02:00
|
|
|
# Remember if we are running unit tests to reduce output
|
2019-11-14 21:18:18 +01:00
|
|
|
OPTS.is_unit_test = is_unit_test
|
2018-02-09 18:53:28 +01:00
|
|
|
|
2018-08-27 23:33:02 +02:00
|
|
|
# If we are only generating a netlist, we can't do DRC/LVS
|
|
|
|
|
if OPTS.netlist_only:
|
2019-11-14 21:18:18 +01:00
|
|
|
OPTS.check_lvsdrc = False
|
2020-11-03 15:29:17 +01:00
|
|
|
|
2018-02-09 18:26:13 +01:00
|
|
|
# If config didn't set output name, make a reasonable default.
|
|
|
|
|
if (OPTS.output_name == ""):
|
2018-09-24 22:44:31 +02:00
|
|
|
ports = ""
|
2019-11-14 21:18:18 +01:00
|
|
|
if OPTS.num_rw_ports > 0:
|
2018-09-24 22:44:31 +02:00
|
|
|
ports += "{}rw_".format(OPTS.num_rw_ports)
|
2019-11-14 21:18:18 +01:00
|
|
|
if OPTS.num_w_ports > 0:
|
2018-09-24 22:44:31 +02:00
|
|
|
ports += "{}w_".format(OPTS.num_w_ports)
|
2019-11-14 21:18:18 +01:00
|
|
|
if OPTS.num_r_ports > 0:
|
2018-09-24 22:44:31 +02:00
|
|
|
ports += "{}r_".format(OPTS.num_r_ports)
|
|
|
|
|
OPTS.output_name = "sram_{0}b_{1}_{2}{3}".format(OPTS.word_size,
|
|
|
|
|
OPTS.num_words,
|
|
|
|
|
ports,
|
|
|
|
|
OPTS.tech_name)
|
2019-02-26 01:10:34 +01:00
|
|
|
|
2020-11-03 15:29:17 +01:00
|
|
|
|
2016-11-11 23:05:14 +01:00
|
|
|
def end_openram():
|
|
|
|
|
""" Clean up openram for a proper exit """
|
|
|
|
|
cleanup_paths()
|
2018-07-11 21:00:15 +02:00
|
|
|
|
2018-07-25 22:44:34 +02:00
|
|
|
if OPTS.check_lvsdrc:
|
|
|
|
|
import verify
|
|
|
|
|
verify.print_drc_stats()
|
|
|
|
|
verify.print_lvs_stats()
|
2019-11-14 21:18:18 +01:00
|
|
|
verify.print_pex_stats()
|
2020-11-03 15:29:17 +01:00
|
|
|
|
2020-12-02 20:09:40 +01:00
|
|
|
|
|
|
|
|
def purge_temp():
|
|
|
|
|
""" Remove the temp directory. """
|
|
|
|
|
debug.info(1,
|
|
|
|
|
"Purging temp directory: {}".format(OPTS.openram_temp))
|
|
|
|
|
# This annoyingly means you have to re-cd into
|
|
|
|
|
# the directory each debug iteration
|
|
|
|
|
# shutil.rmtree(OPTS.openram_temp, ignore_errors=True)
|
|
|
|
|
contents = [os.path.join(OPTS.openram_temp, i) for i in os.listdir(OPTS.openram_temp)]
|
|
|
|
|
for i in contents:
|
|
|
|
|
if os.path.isfile(i) or os.path.islink(i):
|
|
|
|
|
os.remove(i)
|
|
|
|
|
else:
|
|
|
|
|
shutil.rmtree(i)
|
|
|
|
|
|
2020-11-03 15:29:17 +01:00
|
|
|
|
2016-11-11 23:05:14 +01:00
|
|
|
def cleanup_paths():
|
2017-11-16 22:52:58 +01:00
|
|
|
"""
|
|
|
|
|
We should clean up the temp directory after execution.
|
|
|
|
|
"""
|
2018-07-11 21:00:15 +02:00
|
|
|
global OPTS
|
2020-11-05 22:12:26 +01:00
|
|
|
if OPTS.keep_temp:
|
2019-11-14 21:18:18 +01:00
|
|
|
debug.info(0,
|
|
|
|
|
"Preserving temp directory: {}".format(OPTS.openram_temp))
|
2018-02-01 20:53:02 +01:00
|
|
|
return
|
2018-09-05 19:02:12 +02:00
|
|
|
elif os.path.exists(OPTS.openram_temp):
|
2020-12-02 20:09:40 +01:00
|
|
|
purge_temp()
|
2020-11-03 15:29:17 +01:00
|
|
|
|
2020-12-02 20:09:40 +01:00
|
|
|
|
2016-11-08 18:57:35 +01:00
|
|
|
def setup_paths():
|
|
|
|
|
""" Set up the non-tech related paths. """
|
2019-11-14 21:18:18 +01:00
|
|
|
debug.info(2, "Setting up paths...")
|
2016-11-08 18:57:35 +01:00
|
|
|
|
|
|
|
|
global OPTS
|
|
|
|
|
|
2016-11-09 21:00:16 +01:00
|
|
|
try:
|
|
|
|
|
OPENRAM_HOME = os.path.abspath(os.environ.get("OPENRAM_HOME"))
|
|
|
|
|
except:
|
2019-11-14 21:18:18 +01:00
|
|
|
debug.error("$OPENRAM_HOME is not properly defined.", 1)
|
|
|
|
|
debug.check(os.path.isdir(OPENRAM_HOME),
|
|
|
|
|
"$OPENRAM_HOME does not exist: {0}".format(OPENRAM_HOME))
|
2018-02-09 19:25:24 +01:00
|
|
|
|
2018-02-09 19:27:43 +01:00
|
|
|
# Add all of the subdirs to the python path
|
2019-11-14 21:18:18 +01:00
|
|
|
# These subdirs are modules and don't need
|
|
|
|
|
# to be added: characterizer, verify
|
2018-10-12 18:44:36 +02:00
|
|
|
subdirlist = [ item for item in os.listdir(OPENRAM_HOME) if os.path.isdir(os.path.join(OPENRAM_HOME, item)) ]
|
|
|
|
|
for subdir in subdirlist:
|
2019-11-14 21:18:18 +01:00
|
|
|
full_path = "{0}/{1}".format(OPENRAM_HOME, subdir)
|
2018-02-09 19:25:24 +01:00
|
|
|
debug.check(os.path.isdir(full_path),
|
2019-11-14 21:18:18 +01:00
|
|
|
"$OPENRAM_HOME/{0} does not exist: {1}".format(subdir, full_path))
|
2019-11-21 19:11:19 +01:00
|
|
|
if "__pycache__" not in full_path:
|
2020-11-03 15:29:17 +01:00
|
|
|
sys.path.append("{0}".format(full_path))
|
2016-11-08 18:57:35 +01:00
|
|
|
|
|
|
|
|
if not OPTS.openram_temp.endswith('/'):
|
|
|
|
|
OPTS.openram_temp += "/"
|
2016-11-15 18:04:32 +01:00
|
|
|
debug.info(1, "Temporary files saved in " + OPTS.openram_temp)
|
2016-11-08 18:57:35 +01:00
|
|
|
|
2020-12-02 20:09:40 +01:00
|
|
|
|
2017-11-16 02:02:53 +01:00
|
|
|
def is_exe(fpath):
|
2017-11-16 22:52:58 +01:00
|
|
|
""" Return true if the given is an executable file that exists. """
|
2017-11-16 02:02:53 +01:00
|
|
|
return os.path.exists(fpath) and os.access(fpath, os.X_OK)
|
2016-11-08 18:57:35 +01:00
|
|
|
|
2019-11-14 21:18:18 +01:00
|
|
|
|
2017-11-16 02:02:53 +01:00
|
|
|
def find_exe(check_exe):
|
2020-11-03 15:29:17 +01:00
|
|
|
"""
|
2019-11-14 21:18:18 +01:00
|
|
|
Check if the binary exists in any path dir
|
2020-11-03 15:29:17 +01:00
|
|
|
and return the full path.
|
2019-11-14 21:18:18 +01:00
|
|
|
"""
|
2017-11-16 02:02:53 +01:00
|
|
|
# Check if the preferred spice option exists in the path
|
|
|
|
|
for path in os.environ["PATH"].split(os.pathsep):
|
|
|
|
|
exe = os.path.join(path, check_exe)
|
|
|
|
|
# if it is found, then break and use first version
|
|
|
|
|
if is_exe(exe):
|
|
|
|
|
return exe
|
2017-11-16 22:52:58 +01:00
|
|
|
return None
|
2018-09-04 19:47:24 +02:00
|
|
|
|
2019-11-14 21:18:18 +01:00
|
|
|
|
2018-09-04 19:47:24 +02:00
|
|
|
def init_paths():
|
|
|
|
|
""" Create the temp and output directory if it doesn't exist """
|
2020-12-02 20:09:40 +01:00
|
|
|
if os.path.exists(OPTS.openram_temp):
|
|
|
|
|
purge_temp()
|
|
|
|
|
else:
|
|
|
|
|
# make the directory if it doesn't exist
|
|
|
|
|
try:
|
|
|
|
|
debug.info(1,
|
|
|
|
|
"Creating temp directory: {}".format(OPTS.openram_temp))
|
|
|
|
|
os.makedirs(OPTS.openram_temp, 0o750)
|
|
|
|
|
except OSError as e:
|
|
|
|
|
if e.errno == 17: # errno.EEXIST
|
|
|
|
|
os.chmod(OPTS.openram_temp, 0o750)
|
2020-11-03 15:29:17 +01:00
|
|
|
|
2018-09-04 19:47:24 +02:00
|
|
|
# Don't delete the output dir, it may have other files!
|
|
|
|
|
# make the directory if it doesn't exist
|
|
|
|
|
try:
|
|
|
|
|
os.makedirs(OPTS.output_path, 0o750)
|
|
|
|
|
except OSError as e:
|
|
|
|
|
if e.errno == 17: # errno.EEXIST
|
|
|
|
|
os.chmod(OPTS.output_path, 0o750)
|
|
|
|
|
except:
|
2019-11-14 21:18:18 +01:00
|
|
|
debug.error("Unable to make output directory.", -1)
|
2018-09-05 19:02:12 +02:00
|
|
|
|
2020-11-03 15:29:17 +01:00
|
|
|
|
2019-03-06 16:46:30 +01:00
|
|
|
def set_default_corner():
|
|
|
|
|
""" Set the default corner. """
|
2019-11-29 21:01:33 +01:00
|
|
|
|
|
|
|
|
import tech
|
2019-03-06 16:46:30 +01:00
|
|
|
# Set some default options now based on the technology...
|
|
|
|
|
if (OPTS.process_corners == ""):
|
2019-11-29 21:01:33 +01:00
|
|
|
if OPTS.nominal_corner_only:
|
|
|
|
|
OPTS.process_corners = ["TT"]
|
|
|
|
|
else:
|
2020-11-09 22:59:46 +01:00
|
|
|
OPTS.process_corners = list(tech.spice["fet_models"].keys())
|
2020-11-03 15:29:17 +01:00
|
|
|
|
2019-03-06 16:46:30 +01:00
|
|
|
if (OPTS.supply_voltages == ""):
|
2019-11-29 21:01:33 +01:00
|
|
|
if OPTS.nominal_corner_only:
|
|
|
|
|
OPTS.supply_voltages = [tech.spice["supply_voltages"][1]]
|
|
|
|
|
else:
|
|
|
|
|
OPTS.supply_voltages = tech.spice["supply_voltages"]
|
2020-11-03 15:29:17 +01:00
|
|
|
|
2019-03-06 16:46:30 +01:00
|
|
|
if (OPTS.temperatures == ""):
|
2019-11-29 21:01:33 +01:00
|
|
|
if OPTS.nominal_corner_only:
|
|
|
|
|
OPTS.temperatures = [tech.spice["temperatures"][1]]
|
|
|
|
|
else:
|
|
|
|
|
OPTS.temperatures = tech.spice["temperatures"]
|
2020-10-16 22:52:36 +02:00
|
|
|
|
|
|
|
|
# Load scales are fanout multiples of the DFF input cap
|
|
|
|
|
if (OPTS.load_scales == ""):
|
2020-10-17 01:51:28 +02:00
|
|
|
OPTS.load_scales = [0.25, 1, 4]
|
2020-10-16 22:52:36 +02:00
|
|
|
|
|
|
|
|
# Load scales are fanout multiples of the default spice input slew
|
|
|
|
|
if (OPTS.slew_scales == ""):
|
2020-10-17 01:51:28 +02:00
|
|
|
OPTS.slew_scales = [0.25, 1, 8]
|
2020-11-03 15:29:17 +01:00
|
|
|
|
|
|
|
|
|
2016-11-08 18:57:35 +01:00
|
|
|
def import_tech():
|
2019-03-06 16:46:30 +01:00
|
|
|
""" Dynamically adds the tech directory to the path and imports it. """
|
2016-11-08 18:57:35 +01:00
|
|
|
global OPTS
|
|
|
|
|
|
2019-11-14 21:18:18 +01:00
|
|
|
debug.info(2,
|
|
|
|
|
"Importing technology: " + OPTS.tech_name)
|
2016-11-08 18:57:35 +01:00
|
|
|
|
2018-02-10 00:49:55 +01:00
|
|
|
# environment variable should point to the technology dir
|
2016-11-09 21:00:16 +01:00
|
|
|
try:
|
|
|
|
|
OPENRAM_TECH = os.path.abspath(os.environ.get("OPENRAM_TECH"))
|
|
|
|
|
except:
|
2019-11-14 21:18:18 +01:00
|
|
|
debug.error("$OPENRAM_TECH environment variable is not defined.", 1)
|
2019-03-06 16:41:38 +01:00
|
|
|
|
|
|
|
|
# Add all of the paths
|
|
|
|
|
for tech_path in OPENRAM_TECH.split(":"):
|
2019-11-14 21:18:18 +01:00
|
|
|
debug.check(os.path.isdir(tech_path),
|
|
|
|
|
"$OPENRAM_TECH does not exist: {0}".format(tech_path))
|
2019-03-06 16:41:38 +01:00
|
|
|
sys.path.append(tech_path)
|
|
|
|
|
debug.info(1, "Adding technology path: {}".format(tech_path))
|
2016-11-08 18:57:35 +01:00
|
|
|
|
2020-11-03 15:29:17 +01:00
|
|
|
# Import the tech
|
2016-11-08 18:57:35 +01:00
|
|
|
try:
|
2019-03-06 16:41:38 +01:00
|
|
|
tech_mod = __import__(OPTS.tech_name)
|
2016-11-08 18:57:35 +01:00
|
|
|
except ImportError:
|
2019-11-21 19:11:19 +01:00
|
|
|
debug.error("Nonexistent technology module: {0}".format(OPTS.tech_name), -1)
|
2019-03-06 16:41:38 +01:00
|
|
|
|
|
|
|
|
OPTS.openram_tech = os.path.dirname(tech_mod.__file__) + "/"
|
2016-11-08 18:57:35 +01:00
|
|
|
|
2019-03-06 16:41:38 +01:00
|
|
|
# Add the tech directory
|
|
|
|
|
tech_path = OPTS.openram_tech
|
|
|
|
|
sys.path.append(tech_path)
|
|
|
|
|
try:
|
|
|
|
|
import tech
|
2020-11-02 19:44:49 +01:00
|
|
|
except ImportError:
|
2019-03-06 16:41:38 +01:00
|
|
|
debug.error("Could not load tech module.", -1)
|
2018-02-10 00:49:55 +01:00
|
|
|
|
2019-12-16 16:10:09 +01:00
|
|
|
# Add custom modules of the technology to the path, if they exist
|
|
|
|
|
custom_mod_path = os.path.join(tech_path, "modules/")
|
|
|
|
|
if os.path.exists(custom_mod_path):
|
|
|
|
|
sys.path.append(custom_mod_path)
|
|
|
|
|
|
2018-02-10 00:49:55 +01:00
|
|
|
|
2018-12-07 22:54:18 +01:00
|
|
|
def print_time(name, now_time, last_time=None, indentation=2):
|
2018-02-08 22:11:18 +01:00
|
|
|
""" Print a statement about the time delta. """
|
2018-12-08 00:14:28 +01:00
|
|
|
global OPTS
|
2020-11-03 15:29:17 +01:00
|
|
|
|
2018-12-08 00:14:28 +01:00
|
|
|
# Don't print during testing
|
2020-11-05 22:12:26 +01:00
|
|
|
if not OPTS.is_unit_test or OPTS.verbose_level > 0:
|
2018-12-08 00:14:28 +01:00
|
|
|
if last_time:
|
2020-11-05 22:12:26 +01:00
|
|
|
time = str(round((now_time - last_time).total_seconds(), 1)) + " seconds"
|
2018-12-08 00:14:28 +01:00
|
|
|
else:
|
|
|
|
|
time = now_time.strftime('%m/%d/%Y %H:%M:%S')
|
2020-11-05 22:12:26 +01:00
|
|
|
debug.print_raw("{0} {1}: {2}".format("*" * indentation, name, time))
|
2018-02-08 21:47:19 +01:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def report_status():
|
2020-11-03 15:29:17 +01:00
|
|
|
"""
|
2019-11-14 21:18:18 +01:00
|
|
|
Check for valid arguments and report the
|
2020-11-03 15:29:17 +01:00
|
|
|
info about the SRAM being generated
|
2019-11-14 21:18:18 +01:00
|
|
|
"""
|
2018-07-11 21:00:15 +02:00
|
|
|
global OPTS
|
2020-11-03 15:29:17 +01:00
|
|
|
|
2018-02-08 21:47:19 +01:00
|
|
|
# Check if all arguments are integers for bits, size, banks
|
2019-11-14 21:18:18 +01:00
|
|
|
if type(OPTS.word_size) != int:
|
2018-02-08 21:47:19 +01:00
|
|
|
debug.error("{0} is not an integer in config file.".format(OPTS.word_size))
|
2019-11-14 21:18:18 +01:00
|
|
|
if type(OPTS.num_words) != int:
|
2018-02-08 21:47:19 +01:00
|
|
|
debug.error("{0} is not an integer in config file.".format(OPTS.sram_size))
|
2019-07-22 23:58:43 +02:00
|
|
|
if type(OPTS.write_size) is not int and OPTS.write_size is not None:
|
2019-06-29 00:43:09 +02:00
|
|
|
debug.error("{0} is not an integer in config file.".format(OPTS.write_size))
|
2020-11-03 15:29:17 +01:00
|
|
|
|
2019-07-22 23:58:43 +02:00
|
|
|
# If a write mask is specified by the user, the mask write size should be the same as
|
|
|
|
|
# the word size so that an entire word is written at once.
|
|
|
|
|
if OPTS.write_size is not None:
|
|
|
|
|
if (OPTS.word_size % OPTS.write_size != 0):
|
|
|
|
|
debug.error("Write size needs to be an integer multiple of word size.")
|
2019-11-14 21:18:18 +01:00
|
|
|
# If write size is more than half of the word size,
|
|
|
|
|
# then it doesn't need a write mask. It would be writing
|
2019-08-09 23:27:53 +02:00
|
|
|
# the whole word.
|
|
|
|
|
if (OPTS.write_size < 1 or OPTS.write_size > OPTS.word_size/2):
|
|
|
|
|
debug.error("Write size needs to be between 1 bit and {0} bits/2.".format(OPTS.word_size))
|
|
|
|
|
|
2018-02-08 21:47:19 +01:00
|
|
|
if not OPTS.tech_name:
|
|
|
|
|
debug.error("Tech name must be specified in config file.")
|
|
|
|
|
|
2019-01-13 23:34:46 +01:00
|
|
|
debug.print_raw("Technology: {0}".format(OPTS.tech_name))
|
2019-02-25 19:07:05 +01:00
|
|
|
total_size = OPTS.word_size*OPTS.num_words*OPTS.num_banks
|
|
|
|
|
debug.print_raw("Total size: {} bits".format(total_size))
|
2019-11-14 21:18:18 +01:00
|
|
|
if total_size >= 2**14:
|
2019-02-25 23:57:18 +01:00
|
|
|
debug.warning("Requesting such a large memory size ({0}) will have a large run-time. ".format(total_size) +
|
|
|
|
|
"Consider using multiple smaller banks.")
|
2019-01-13 23:34:46 +01:00
|
|
|
debug.print_raw("Word size: {0}\nWords: {1}\nBanks: {2}".format(OPTS.word_size,
|
2019-11-14 21:18:18 +01:00
|
|
|
OPTS.num_words,
|
|
|
|
|
OPTS.num_banks))
|
2019-06-29 00:43:09 +02:00
|
|
|
if (OPTS.write_size != OPTS.word_size):
|
|
|
|
|
debug.print_raw("Write size: {}".format(OPTS.write_size))
|
2019-01-13 23:34:46 +01:00
|
|
|
debug.print_raw("RW ports: {0}\nR-only ports: {1}\nW-only ports: {2}".format(OPTS.num_rw_ports,
|
2019-11-14 21:18:18 +01:00
|
|
|
OPTS.num_r_ports,
|
|
|
|
|
OPTS.num_w_ports))
|
2019-06-29 00:43:09 +02:00
|
|
|
|
2018-08-27 23:33:02 +02:00
|
|
|
if OPTS.netlist_only:
|
2019-04-01 19:35:17 +02:00
|
|
|
debug.print_raw("Netlist only mode (no physical design is being done, netlist_only=False to disable).")
|
2020-11-03 15:29:17 +01:00
|
|
|
|
2019-04-01 19:35:17 +02:00
|
|
|
if not OPTS.route_supplies:
|
2019-09-03 20:23:35 +02:00
|
|
|
debug.print_raw("Design supply routing skipped. Supplies will have multiple must-connect pins. (route_supplies=True to enable supply routing).")
|
2020-11-03 15:29:17 +01:00
|
|
|
|
2018-11-14 01:51:19 +01:00
|
|
|
if not OPTS.inline_lvsdrc:
|
2019-09-03 20:23:35 +02:00
|
|
|
debug.print_raw("DRC/LVS/PEX is only run on the top-level design to save run-time (inline_lvsdrc=True to do inline checking).")
|
2018-11-14 01:51:19 +01:00
|
|
|
|
2018-02-09 18:53:28 +01:00
|
|
|
if not OPTS.check_lvsdrc:
|
2019-04-01 19:35:17 +02:00
|
|
|
debug.print_raw("DRC/LVS/PEX is disabled (check_lvsdrc=True to enable).")
|
2019-04-24 20:30:38 +02:00
|
|
|
|
|
|
|
|
if OPTS.analytical_delay:
|
2019-09-03 20:23:35 +02:00
|
|
|
debug.print_raw("Characterization is disabled (using analytical delay models) (analytical_delay=False to simulate).")
|
2019-04-24 20:30:38 +02:00
|
|
|
else:
|
2019-11-14 21:18:18 +01:00
|
|
|
if OPTS.spice_name != "":
|
2019-04-24 20:30:38 +02:00
|
|
|
debug.print_raw("Performing simulation-based characterization with {}".format(OPTS.spice_name))
|
|
|
|
|
if OPTS.trim_netlist:
|
|
|
|
|
debug.print_raw("Trimming netlist to speed up characterization (trim_netlist=False to disable).")
|
2019-11-29 23:47:02 +01:00
|
|
|
if OPTS.nominal_corner_only:
|
2020-11-04 01:49:49 +01:00
|
|
|
debug.print_raw("Only generating nominal corner timing.")
|