2019-04-26 21:21:50 +02:00
|
|
|
# See LICENSE for licensing information.
|
|
|
|
|
#
|
2021-01-22 20:23:28 +01:00
|
|
|
# Copyright (c) 2016-2021 Regents of the University of California and The Board
|
2019-06-14 17:43:41 +02:00
|
|
|
# of Regents for the Oklahoma Agricultural and Mechanical College
|
|
|
|
|
# (acting for and on behalf of Oklahoma State University)
|
|
|
|
|
# All rights reserved.
|
2019-04-26 21:21:50 +02:00
|
|
|
#
|
2017-11-14 23:59:14 +01:00
|
|
|
"""
|
2019-06-25 20:24:25 +02:00
|
|
|
This is a DRC/LVS/PEX interface file for magic + netgen.
|
2017-11-14 23:59:14 +01:00
|
|
|
|
2019-04-27 00:43:46 +02:00
|
|
|
We include the tech file for SCN4M_SUBM in the tech directory,
|
2019-06-25 20:24:25 +02:00
|
|
|
that is included in OpenRAM during DRC.
|
|
|
|
|
You can use this interactively by appending the magic system path in
|
2018-08-28 20:48:23 +02:00
|
|
|
your .magicrc file
|
|
|
|
|
path sys /Users/mrg/openram/technology/scn3me_subm/tech
|
|
|
|
|
|
|
|
|
|
We require the version 30 Magic rules which allow via stacking.
|
|
|
|
|
We obtained this file from Qflow ( http://opencircuitdesign.com/qflow/index.html )
|
|
|
|
|
and include its appropriate license.
|
2017-11-14 23:59:14 +01:00
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
import os
|
|
|
|
|
import re
|
2018-08-30 23:20:41 +02:00
|
|
|
import shutil
|
2017-11-14 23:59:14 +01:00
|
|
|
import debug
|
2017-11-16 22:52:58 +01:00
|
|
|
from globals import OPTS
|
2022-07-13 19:57:56 +02:00
|
|
|
from .run_script import *
|
2018-07-11 20:59:24 +02:00
|
|
|
# Keep track of statistics
|
|
|
|
|
num_drc_runs = 0
|
|
|
|
|
num_lvs_runs = 0
|
|
|
|
|
num_pex_runs = 0
|
|
|
|
|
|
2019-06-25 20:24:25 +02:00
|
|
|
|
2020-11-10 22:37:18 +01:00
|
|
|
# def filter_gds(cell_name, input_gds, output_gds):
|
|
|
|
|
# """ Run the gds through magic for any layer processing """
|
|
|
|
|
# global OPTS
|
|
|
|
|
|
|
|
|
|
# # Copy .magicrc file into temp dir
|
|
|
|
|
# magic_file = OPTS.openram_tech + "tech/.magicrc"
|
|
|
|
|
# if os.path.exists(magic_file):
|
|
|
|
|
# shutil.copy(magic_file, OPTS.openram_temp)
|
|
|
|
|
# else:
|
|
|
|
|
# debug.warning("Could not locate .magicrc file: {}".format(magic_file))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# run_file = OPTS.openram_temp + "run_filter.sh"
|
|
|
|
|
# f = open(run_file, "w")
|
|
|
|
|
# f.write("#!/bin/sh\n")
|
|
|
|
|
# f.write("{} -dnull -noconsole << EOF\n".format(OPTS.magic_exe[1]))
|
|
|
|
|
# f.write("gds polygon subcell true\n")
|
|
|
|
|
# f.write("gds warning default\n")
|
|
|
|
|
# f.write("gds read {}\n".format(input_gds))
|
|
|
|
|
# f.write("load {}\n".format(cell_name))
|
|
|
|
|
# f.write("cellname delete \\(UNNAMED\\)\n")
|
|
|
|
|
# #f.write("writeall force\n")
|
|
|
|
|
# f.write("select top cell\n")
|
|
|
|
|
# f.write("gds write {}\n".format(output_gds))
|
|
|
|
|
# f.write("quit -noprompt\n")
|
|
|
|
|
# f.write("EOF\n")
|
|
|
|
|
|
|
|
|
|
# f.close()
|
|
|
|
|
# os.system("chmod u+x {}".format(run_file))
|
|
|
|
|
|
|
|
|
|
# (outfile, errfile, resultsfile) = run_script(cell_name, "filter")
|
2020-06-15 22:58:26 +02:00
|
|
|
|
2020-11-03 15:29:17 +01:00
|
|
|
|
2020-11-11 02:06:24 +01:00
|
|
|
def write_drc_script(cell_name, gds_name, extract, final_verification, output_path, sp_name=None):
|
2018-01-18 01:48:35 +01:00
|
|
|
""" Write a magic script to perform DRC and optionally extraction. """
|
2018-01-12 23:39:42 +01:00
|
|
|
global OPTS
|
2017-11-14 23:59:14 +01:00
|
|
|
|
2020-11-10 22:37:18 +01:00
|
|
|
# Copy .magicrc file into the output directory
|
2021-09-20 20:06:27 +02:00
|
|
|
full_magic_file = os.environ.get('OPENRAM_MAGICRC', None)
|
|
|
|
|
if not full_magic_file:
|
|
|
|
|
full_magic_file = OPTS.openram_tech + "tech/.magicrc"
|
2021-06-13 23:18:55 +02:00
|
|
|
|
2021-09-20 20:06:27 +02:00
|
|
|
if os.path.exists(full_magic_file):
|
|
|
|
|
shutil.copy(full_magic_file, output_path + "/.magicrc")
|
2020-11-09 20:12:31 +01:00
|
|
|
else:
|
2021-09-20 20:06:27 +02:00
|
|
|
debug.warning("Could not locate .magicrc file: {}".format(full_magic_file))
|
|
|
|
|
|
2020-12-08 20:56:23 +01:00
|
|
|
run_file = output_path + "run_ext.sh"
|
2018-01-12 19:24:49 +01:00
|
|
|
f = open(run_file, "w")
|
|
|
|
|
f.write("#!/bin/sh\n")
|
2020-11-29 20:30:12 +01:00
|
|
|
f.write('export OPENRAM_TECH="{}"\n'.format(os.environ['OPENRAM_TECH']))
|
2020-12-08 20:56:23 +01:00
|
|
|
f.write('echo "$(date): Starting GDS to MAG using Magic {}"\n'.format(OPTS.drc_exe[1]))
|
2020-11-29 20:30:12 +01:00
|
|
|
f.write('\n')
|
2018-01-12 23:39:42 +01:00
|
|
|
f.write("{} -dnull -noconsole << EOF\n".format(OPTS.drc_exe[1]))
|
2020-12-08 20:56:23 +01:00
|
|
|
# Do not run DRC for extraction/conversion
|
|
|
|
|
f.write("drc off\n")
|
2021-05-29 01:39:48 +02:00
|
|
|
f.write("set VDD vdd\n")
|
|
|
|
|
f.write("set GND gnd\n")
|
|
|
|
|
f.write("set SUB gnd\n")
|
2021-05-30 01:54:36 +02:00
|
|
|
#f.write("gds polygon subcell true\n")
|
2018-01-12 19:24:49 +01:00
|
|
|
f.write("gds warning default\n")
|
2022-03-01 23:37:51 +01:00
|
|
|
# Flatten the transistors
|
2022-06-08 22:50:25 +02:00
|
|
|
# Bug in Netgen 1.5.194 when using this...
|
2022-08-23 00:30:11 +02:00
|
|
|
try:
|
|
|
|
|
from tech import blackbox_cells
|
|
|
|
|
except ImportError:
|
|
|
|
|
blackbox_cells = []
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
from tech import flatglob
|
|
|
|
|
except ImportError:
|
|
|
|
|
flatglob = []
|
|
|
|
|
f.write("gds readonly true\n")
|
|
|
|
|
|
|
|
|
|
for entry in flatglob:
|
|
|
|
|
f.write("gds flatglob " +entry + "\n")
|
2021-01-06 18:42:56 +01:00
|
|
|
# These two options are temporarily disabled until Tim fixes a bug in magic related
|
|
|
|
|
# to flattening channel routes and vias (hierarchy with no devices in it). Otherwise,
|
2021-09-20 20:06:27 +02:00
|
|
|
# they appear to be disconnected.
|
2021-01-13 01:20:26 +01:00
|
|
|
f.write("gds flatten true\n")
|
|
|
|
|
f.write("gds ordering true\n")
|
2020-11-09 20:12:31 +01:00
|
|
|
f.write("gds read {}\n".format(gds_name))
|
2020-11-29 20:30:12 +01:00
|
|
|
f.write('puts "Finished reading gds {}"\n'.format(gds_name))
|
2018-01-12 23:39:42 +01:00
|
|
|
f.write("load {}\n".format(cell_name))
|
2020-11-29 20:30:12 +01:00
|
|
|
f.write('puts "Finished loading cell {}"\n'.format(cell_name))
|
2019-02-24 19:48:54 +01:00
|
|
|
f.write("cellname delete \\(UNNAMED\\)\n")
|
2018-09-18 21:57:39 +02:00
|
|
|
f.write("writeall force\n")
|
2021-09-20 20:06:27 +02:00
|
|
|
|
2020-12-08 20:56:23 +01:00
|
|
|
# Extract
|
2020-11-11 02:06:24 +01:00
|
|
|
if not sp_name:
|
|
|
|
|
f.write("port makeall\n")
|
|
|
|
|
else:
|
|
|
|
|
f.write("readspice {}\n".format(sp_name))
|
2018-10-05 17:18:53 +02:00
|
|
|
if not extract:
|
|
|
|
|
pre = "#"
|
|
|
|
|
else:
|
|
|
|
|
pre = ""
|
2020-03-24 20:41:15 +01:00
|
|
|
# Hack to work around unit scales in SkyWater
|
2020-06-12 23:23:26 +02:00
|
|
|
if OPTS.tech_name=="sky130":
|
2020-03-24 20:41:15 +01:00
|
|
|
f.write(pre + "extract style ngspice(si)\n")
|
2022-06-08 23:23:50 +02:00
|
|
|
if final_verification and OPTS.route_supplies:
|
|
|
|
|
f.write(pre + "extract unique all\n")
|
2022-06-09 00:06:06 +02:00
|
|
|
f.write(pre + "extract all\n")
|
2022-02-23 22:05:03 +01:00
|
|
|
f.write(pre + "select top cell\n")
|
|
|
|
|
f.write(pre + "feedback why\n")
|
2020-11-29 20:30:12 +01:00
|
|
|
f.write('puts "Finished extract"\n')
|
2020-03-24 20:41:15 +01:00
|
|
|
# f.write(pre + "ext2spice hierarchy on\n")
|
|
|
|
|
# f.write(pre + "ext2spice scale off\n")
|
2019-02-22 23:38:00 +01:00
|
|
|
# lvs exists in 8.2.79, but be backword compatible for now
|
2020-11-11 02:06:24 +01:00
|
|
|
# f.write(pre + "ext2spice lvs\n")
|
|
|
|
|
f.write(pre + "ext2spice hierarchy on\n")
|
|
|
|
|
f.write(pre + "ext2spice format ngspice\n")
|
|
|
|
|
f.write(pre + "ext2spice cthresh infinite\n")
|
|
|
|
|
f.write(pre + "ext2spice rthresh infinite\n")
|
|
|
|
|
f.write(pre + "ext2spice renumber off\n")
|
|
|
|
|
f.write(pre + "ext2spice scale off\n")
|
|
|
|
|
f.write(pre + "ext2spice blackbox on\n")
|
|
|
|
|
f.write(pre + "ext2spice subcircuit top on\n")
|
|
|
|
|
f.write(pre + "ext2spice global off\n")
|
2019-06-25 20:24:25 +02:00
|
|
|
|
2018-10-05 17:18:53 +02:00
|
|
|
# Can choose hspice, ngspice, or spice3,
|
|
|
|
|
# but they all seem compatible enough.
|
2020-11-11 02:06:24 +01:00
|
|
|
f.write(pre + "ext2spice format ngspice\n")
|
|
|
|
|
f.write(pre + "ext2spice {}\n".format(cell_name))
|
2022-02-23 22:05:03 +01:00
|
|
|
f.write(pre + "select top cell\n")
|
|
|
|
|
f.write(pre + "feedback why\n")
|
2020-11-29 20:30:12 +01:00
|
|
|
f.write('puts "Finished ext2spice"\n')
|
2021-09-20 20:06:27 +02:00
|
|
|
|
2018-01-12 19:24:49 +01:00
|
|
|
f.write("quit -noprompt\n")
|
|
|
|
|
f.write("EOF\n")
|
2020-11-29 20:30:12 +01:00
|
|
|
f.write("magic_retcode=$?\n")
|
2020-12-08 20:56:23 +01:00
|
|
|
f.write('echo "$(date): Finished ($magic_retcode) GDS to MAG using Magic {}"\n'.format(OPTS.drc_exe[1]))
|
2020-11-29 20:30:12 +01:00
|
|
|
f.write("exit $magic_retcode\n")
|
2019-06-25 20:24:25 +02:00
|
|
|
|
2017-11-14 23:59:14 +01:00
|
|
|
f.close()
|
2018-01-12 19:24:49 +01:00
|
|
|
os.system("chmod u+x {}".format(run_file))
|
2018-01-18 01:48:35 +01:00
|
|
|
|
2020-12-08 20:56:23 +01:00
|
|
|
run_file = output_path + "run_drc.sh"
|
|
|
|
|
f = open(run_file, "w")
|
|
|
|
|
f.write("#!/bin/sh\n")
|
|
|
|
|
f.write('export OPENRAM_TECH="{}"\n'.format(os.environ['OPENRAM_TECH']))
|
|
|
|
|
# Copy the bitcell mag files if they exist
|
2020-12-11 19:06:00 +01:00
|
|
|
for blackbox_cell_name in blackbox_cells:
|
|
|
|
|
mag_file = OPTS.openram_tech + "maglef_lib/" + blackbox_cell_name + ".mag"
|
2020-12-08 20:56:23 +01:00
|
|
|
debug.check(os.path.isfile(mag_file), "Could not find blackbox cell {}".format(mag_file))
|
2020-12-11 19:06:00 +01:00
|
|
|
f.write('cp {0} .\n'.format(mag_file))
|
2021-09-20 20:06:27 +02:00
|
|
|
|
2020-12-08 20:56:23 +01:00
|
|
|
f.write('echo "$(date): Starting DRC using Magic {}"\n'.format(OPTS.drc_exe[1]))
|
|
|
|
|
f.write('\n')
|
|
|
|
|
f.write("{} -dnull -noconsole << EOF\n".format(OPTS.drc_exe[1]))
|
|
|
|
|
f.write("load {} -dereference\n".format(cell_name))
|
|
|
|
|
f.write('puts "Finished loading cell {}"\n'.format(cell_name))
|
|
|
|
|
f.write("cellname delete \\(UNNAMED\\)\n")
|
|
|
|
|
f.write("select top cell\n")
|
|
|
|
|
f.write("expand\n")
|
|
|
|
|
f.write('puts "Finished expanding"\n')
|
2020-12-11 19:06:00 +01:00
|
|
|
f.write("drc euclidean on\n")
|
2020-12-08 20:56:23 +01:00
|
|
|
f.write("drc check\n")
|
|
|
|
|
f.write('puts "Finished drc check"\n')
|
|
|
|
|
f.write("drc catchup\n")
|
|
|
|
|
f.write('puts "Finished drc catchup"\n')
|
2021-06-04 20:06:39 +02:00
|
|
|
# This is needed instead of drc count total because it displays
|
|
|
|
|
# some errors that are not "DRC" errors.
|
2021-06-05 00:51:50 +02:00
|
|
|
# f.write("puts -nonewline \"Total DRC errors found: \"\n")
|
|
|
|
|
# f.write("puts stdout [drc listall count total]\n")
|
|
|
|
|
f.write("drc count total\n")
|
2020-12-08 20:56:23 +01:00
|
|
|
f.write("quit -noprompt\n")
|
|
|
|
|
f.write("EOF\n")
|
|
|
|
|
f.write("magic_retcode=$?\n")
|
|
|
|
|
f.write('echo "$(date): Finished ($magic_retcode) DRC using Magic {}"\n'.format(OPTS.drc_exe[1]))
|
|
|
|
|
f.write("exit $magic_retcode\n")
|
|
|
|
|
|
|
|
|
|
f.close()
|
|
|
|
|
os.system("chmod u+x {}".format(run_file))
|
2021-09-20 20:06:27 +02:00
|
|
|
|
2019-06-25 20:24:25 +02:00
|
|
|
|
2020-11-11 02:06:24 +01:00
|
|
|
def run_drc(cell_name, gds_name, sp_name=None, extract=True, final_verification=False):
|
2018-01-18 01:48:35 +01:00
|
|
|
"""Run DRC check on a cell which is implemented in gds_name."""
|
|
|
|
|
|
2018-07-11 20:59:24 +02:00
|
|
|
global num_drc_runs
|
|
|
|
|
num_drc_runs += 1
|
|
|
|
|
|
2020-11-11 02:06:24 +01:00
|
|
|
write_drc_script(cell_name, gds_name, extract, final_verification, OPTS.openram_temp, sp_name=sp_name)
|
2021-09-20 20:06:27 +02:00
|
|
|
|
2020-12-08 20:56:23 +01:00
|
|
|
(outfile, errfile, resultsfile) = run_script(cell_name, "ext")
|
2021-09-20 20:06:27 +02:00
|
|
|
|
2019-04-27 00:43:46 +02:00
|
|
|
(outfile, errfile, resultsfile) = run_script(cell_name, "drc")
|
2017-11-14 23:59:14 +01:00
|
|
|
|
2018-01-12 23:39:42 +01:00
|
|
|
# Check the result for these lines in the summary:
|
|
|
|
|
# Total DRC errors found: 0
|
|
|
|
|
# The count is shown in this format:
|
|
|
|
|
# Cell replica_cell_6t has 3 error tiles.
|
|
|
|
|
# Cell tri_gate_array has 8 error tiles.
|
|
|
|
|
# etc.
|
2017-11-14 23:59:14 +01:00
|
|
|
try:
|
2018-01-12 23:39:42 +01:00
|
|
|
f = open(outfile, "r")
|
2019-02-22 23:38:00 +01:00
|
|
|
except FileNotFoundError:
|
2020-11-11 02:06:24 +01:00
|
|
|
debug.error("Unable to load DRC results file from {}. Is magic set up?".format(outfile), 1)
|
2019-06-25 20:24:25 +02:00
|
|
|
|
2017-11-14 23:59:14 +01:00
|
|
|
results = f.readlines()
|
|
|
|
|
f.close()
|
2019-02-14 02:01:26 +01:00
|
|
|
errors=1
|
2017-11-14 23:59:14 +01:00
|
|
|
# those lines should be the last 3
|
2018-01-12 23:39:42 +01:00
|
|
|
for line in results:
|
|
|
|
|
if "Total DRC errors found:" in line:
|
2018-05-12 01:32:00 +02:00
|
|
|
errors = int(re.split(": ", line)[1])
|
2018-01-12 23:39:42 +01:00
|
|
|
break
|
2019-02-14 02:01:26 +01:00
|
|
|
else:
|
2020-11-11 02:06:24 +01:00
|
|
|
debug.error("Unable to find the total error line in Magic output.", 1)
|
2019-06-25 20:24:25 +02:00
|
|
|
|
2017-11-14 23:59:14 +01:00
|
|
|
|
|
|
|
|
# always display this summary
|
2020-06-30 16:16:05 +02:00
|
|
|
result_str = "DRC Errors {0}\t{1}".format(cell_name, errors)
|
2017-11-14 23:59:14 +01:00
|
|
|
if errors > 0:
|
2018-01-12 23:39:42 +01:00
|
|
|
for line in results:
|
|
|
|
|
if "error tiles" in line:
|
2020-11-11 02:06:24 +01:00
|
|
|
debug.info(1, line.rstrip("\n"))
|
2020-06-30 16:16:05 +02:00
|
|
|
debug.warning(result_str)
|
2017-11-14 23:59:14 +01:00
|
|
|
else:
|
2020-06-30 16:16:05 +02:00
|
|
|
debug.info(1, result_str)
|
2017-11-14 23:59:14 +01:00
|
|
|
|
|
|
|
|
return errors
|
|
|
|
|
|
|
|
|
|
|
2020-11-11 02:06:24 +01:00
|
|
|
def write_lvs_script(cell_name, gds_name, sp_name, final_verification=False, output_path=None):
|
2020-11-09 20:12:31 +01:00
|
|
|
""" Write a netgen script to perform LVS. """
|
|
|
|
|
|
|
|
|
|
global OPTS
|
|
|
|
|
|
2020-11-11 02:06:24 +01:00
|
|
|
if not output_path:
|
|
|
|
|
output_path = OPTS.openram_temp
|
|
|
|
|
|
2021-06-13 23:18:55 +02:00
|
|
|
# Copy setup.tcl file into the output directory
|
2021-09-20 20:06:27 +02:00
|
|
|
full_setup_file = os.environ.get('OPENRAM_NETGENRC', None)
|
|
|
|
|
if not full_setup_file:
|
|
|
|
|
full_setup_file = OPTS.openram_tech + "tech/setup.tcl"
|
|
|
|
|
setup_file = os.path.basename(full_setup_file)
|
|
|
|
|
|
|
|
|
|
if os.path.exists(full_setup_file):
|
2020-11-09 20:12:31 +01:00
|
|
|
# Copy setup.tcl file into temp dir
|
2021-09-20 20:06:27 +02:00
|
|
|
shutil.copy(full_setup_file, output_path)
|
2022-07-22 18:52:38 +02:00
|
|
|
|
2022-06-16 19:30:58 +02:00
|
|
|
setup_file_object = open(output_path + "/setup.tcl", 'a')
|
|
|
|
|
setup_file_object.write("# Increase the column sizes for ease of reading long names\n")
|
2022-07-13 19:57:56 +02:00
|
|
|
setup_file_object.write("::netgen::format 120\n")
|
2022-06-16 19:30:58 +02:00
|
|
|
|
2020-11-09 20:12:31 +01:00
|
|
|
else:
|
|
|
|
|
setup_file = 'nosetup'
|
|
|
|
|
|
|
|
|
|
run_file = output_path + "/run_lvs.sh"
|
|
|
|
|
f = open(run_file, "w")
|
|
|
|
|
f.write("#!/bin/sh\n")
|
2020-11-29 20:30:12 +01:00
|
|
|
f.write('export OPENRAM_TECH="{}"\n'.format(os.environ['OPENRAM_TECH']))
|
2020-12-02 20:26:00 +01:00
|
|
|
f.write('echo "$(date): Starting LVS using Netgen {}"\n'.format(OPTS.lvs_exe[1]))
|
2020-11-09 20:12:31 +01:00
|
|
|
f.write("{} -noconsole << EOF\n".format(OPTS.lvs_exe[1]))
|
|
|
|
|
# f.write("readnet spice {0}.spice\n".format(cell_name))
|
|
|
|
|
# f.write("readnet spice {0}\n".format(sp_name))
|
2020-11-11 05:38:41 +01:00
|
|
|
f.write("lvs {{{0}.spice {0}}} {{{1} {0}}} {2} {0}.lvs.report -full -json\n".format(cell_name, sp_name, setup_file))
|
2020-11-09 20:12:31 +01:00
|
|
|
f.write("quit\n")
|
|
|
|
|
f.write("EOF\n")
|
2020-11-29 20:30:12 +01:00
|
|
|
f.write("magic_retcode=$?\n")
|
2020-12-02 20:26:00 +01:00
|
|
|
f.write('echo "$(date): Finished ($magic_retcode) LVS using Netgen {}"\n'.format(OPTS.lvs_exe[1]))
|
2020-11-29 20:30:12 +01:00
|
|
|
f.write("exit $magic_retcode\n")
|
2020-11-09 20:12:31 +01:00
|
|
|
f.close()
|
|
|
|
|
os.system("chmod u+x {}".format(run_file))
|
|
|
|
|
|
|
|
|
|
|
2020-11-11 02:06:24 +01:00
|
|
|
def run_lvs(cell_name, gds_name, sp_name, final_verification=False, output_path=None):
|
2017-11-14 23:59:14 +01:00
|
|
|
"""Run LVS check on a given top-level name which is
|
2018-02-05 23:52:51 +01:00
|
|
|
implemented in gds_name and sp_name. Final verification will
|
|
|
|
|
ensure that there are no remaining virtual conections. """
|
2017-11-14 23:59:14 +01:00
|
|
|
|
2018-07-11 20:59:24 +02:00
|
|
|
global num_lvs_runs
|
|
|
|
|
num_lvs_runs += 1
|
2019-04-27 00:17:39 +02:00
|
|
|
|
2020-11-11 02:06:24 +01:00
|
|
|
if not output_path:
|
|
|
|
|
output_path = OPTS.openram_temp
|
2021-09-20 20:06:27 +02:00
|
|
|
|
2020-11-11 02:06:24 +01:00
|
|
|
write_lvs_script(cell_name, gds_name, sp_name, final_verification)
|
2017-11-14 23:59:14 +01:00
|
|
|
|
2019-04-27 00:43:46 +02:00
|
|
|
(outfile, errfile, resultsfile) = run_script(cell_name, "lvs")
|
2019-06-25 20:24:25 +02:00
|
|
|
|
2018-04-21 00:46:12 +02:00
|
|
|
total_errors = 0
|
2019-06-25 20:24:25 +02:00
|
|
|
|
2017-11-14 23:59:14 +01:00
|
|
|
# check the result for these lines in the summary:
|
2019-02-22 23:38:00 +01:00
|
|
|
try:
|
|
|
|
|
f = open(resultsfile, "r")
|
|
|
|
|
except FileNotFoundError:
|
2020-11-09 20:12:31 +01:00
|
|
|
debug.error("Unable to load LVS results from {}".format(resultsfile), 1)
|
2019-06-25 20:24:25 +02:00
|
|
|
|
2017-11-14 23:59:14 +01:00
|
|
|
results = f.readlines()
|
|
|
|
|
f.close()
|
2018-04-21 00:46:12 +02:00
|
|
|
# Look for the results after the final "Subcircuit summary:"
|
|
|
|
|
# which will be the top-level netlist.
|
|
|
|
|
final_results = []
|
|
|
|
|
for line in reversed(results):
|
|
|
|
|
if "Subcircuit summary:" in line:
|
|
|
|
|
break
|
|
|
|
|
else:
|
2020-11-09 20:12:31 +01:00
|
|
|
final_results.insert(0, line)
|
2017-11-14 23:59:14 +01:00
|
|
|
|
2018-04-21 00:46:12 +02:00
|
|
|
# There were property errors in any module.
|
2018-01-18 01:48:35 +01:00
|
|
|
test = re.compile("Property errors were found.")
|
2018-05-12 01:32:00 +02:00
|
|
|
propertyerrors = list(filter(test.search, results))
|
2018-04-21 00:46:12 +02:00
|
|
|
total_errors += len(propertyerrors)
|
2019-06-25 20:24:25 +02:00
|
|
|
|
2018-02-01 14:38:48 +01:00
|
|
|
# Require pins to match?
|
|
|
|
|
# Cell pin lists for pnand2_1.spice and pnand2_1 altered to match.
|
2018-02-02 21:47:42 +01:00
|
|
|
# test = re.compile(".*altered to match.")
|
2018-05-12 01:32:00 +02:00
|
|
|
# pinerrors = list(filter(test.search, results))
|
2018-02-02 21:47:42 +01:00
|
|
|
# if len(pinerrors)>0:
|
|
|
|
|
# debug.warning("Pins altered to match in {}.".format(cell_name))
|
2019-06-25 20:24:25 +02:00
|
|
|
|
2018-01-23 02:50:53 +01:00
|
|
|
#if len(propertyerrors)>0:
|
|
|
|
|
# debug.warning("Property errors found, but not checking them.")
|
2017-11-14 23:59:14 +01:00
|
|
|
|
2018-04-21 00:46:12 +02:00
|
|
|
# Netlists do not match.
|
|
|
|
|
test = re.compile("Netlists do not match.")
|
2018-05-12 01:32:00 +02:00
|
|
|
incorrect = list(filter(test.search, final_results))
|
2018-04-21 00:46:12 +02:00
|
|
|
total_errors += len(incorrect)
|
2019-06-25 20:24:25 +02:00
|
|
|
|
2018-01-18 01:48:35 +01:00
|
|
|
# Netlists match uniquely.
|
2018-05-12 01:32:00 +02:00
|
|
|
test = re.compile("match uniquely.")
|
2021-11-19 18:42:06 +01:00
|
|
|
uniquely = list(filter(test.search, final_results))
|
|
|
|
|
|
|
|
|
|
# Netlists match uniquely.
|
|
|
|
|
test = re.compile("match correctly.")
|
|
|
|
|
correctly = list(filter(test.search, final_results))
|
|
|
|
|
|
2022-05-24 01:28:28 +02:00
|
|
|
# Top level pins mismatch
|
|
|
|
|
test = re.compile("The top level cell failed pin matching.")
|
|
|
|
|
pins_incorrectly = list(filter(test.search, final_results))
|
|
|
|
|
|
|
|
|
|
# Fail if the pins mismatched
|
|
|
|
|
if len(pins_incorrectly) > 0:
|
|
|
|
|
total_errors += 1
|
|
|
|
|
|
2018-01-18 01:48:35 +01:00
|
|
|
# Fail if they don't match. Something went wrong!
|
2021-11-19 18:42:06 +01:00
|
|
|
if len(uniquely) == 0 and len(correctly) == 0:
|
2018-01-18 01:48:35 +01:00
|
|
|
total_errors += 1
|
2018-01-30 00:31:14 +01:00
|
|
|
|
2021-11-19 18:42:06 +01:00
|
|
|
if len(uniquely) == 0 and len(correctly) > 0:
|
|
|
|
|
debug.warning("{0}\tLVS matches but not uniquely".format(cell_name))
|
|
|
|
|
|
2018-01-18 01:48:35 +01:00
|
|
|
if total_errors>0:
|
|
|
|
|
# Just print out the whole file, it is short.
|
|
|
|
|
for e in results:
|
2018-01-30 00:31:14 +01:00
|
|
|
debug.info(1,e.strip("\n"))
|
2021-11-19 18:42:06 +01:00
|
|
|
debug.error("{0}\tLVS mismatch (results in {1})".format(cell_name,
|
|
|
|
|
resultsfile))
|
2018-07-18 23:28:43 +02:00
|
|
|
else:
|
|
|
|
|
debug.info(1, "{0}\tLVS matches".format(cell_name))
|
2017-11-14 23:59:14 +01:00
|
|
|
|
|
|
|
|
return total_errors
|
|
|
|
|
|
|
|
|
|
|
2020-11-11 02:06:24 +01:00
|
|
|
def run_pex(name, gds_name, sp_name, output=None, final_verification=False, output_path=None):
|
2017-11-14 23:59:14 +01:00
|
|
|
"""Run pex on a given top-level name which is
|
|
|
|
|
implemented in gds_name and sp_name. """
|
|
|
|
|
|
2018-07-11 20:59:24 +02:00
|
|
|
global num_pex_runs
|
|
|
|
|
num_pex_runs += 1
|
2021-09-20 20:06:27 +02:00
|
|
|
|
2020-11-12 23:43:57 +01:00
|
|
|
if not output_path:
|
|
|
|
|
output_path = OPTS.openram_temp
|
2021-09-20 20:06:27 +02:00
|
|
|
|
2020-11-11 02:06:24 +01:00
|
|
|
os.chdir(output_path)
|
|
|
|
|
|
|
|
|
|
if not output_path:
|
|
|
|
|
output_path = OPTS.openram_temp
|
2017-11-14 23:59:14 +01:00
|
|
|
|
|
|
|
|
if output == None:
|
|
|
|
|
output = name + ".pex.netlist"
|
|
|
|
|
|
|
|
|
|
# check if lvs report has been done
|
|
|
|
|
# if not run drc and lvs
|
|
|
|
|
if not os.path.isfile(name + ".lvs.report"):
|
|
|
|
|
run_drc(name, gds_name)
|
|
|
|
|
run_lvs(name, gds_name, sp_name)
|
|
|
|
|
|
2019-06-25 20:24:25 +02:00
|
|
|
# pex_fix did run the pex using a script while dev orignial method
|
|
|
|
|
# use batch mode.
|
|
|
|
|
# the dev old code using batch mode does not run and is split into functions
|
2020-11-12 23:43:57 +01:00
|
|
|
pex_runset = write_script_pex_rule(gds_name, name, sp_name, output)
|
2019-06-25 20:24:25 +02:00
|
|
|
|
2020-11-11 02:06:24 +01:00
|
|
|
errfile = "{0}{1}.pex.err".format(output_path, name)
|
|
|
|
|
outfile = "{0}{1}.pex.out".format(output_path, name)
|
2019-06-25 20:24:25 +02:00
|
|
|
|
|
|
|
|
script_cmd = "{0} 2> {1} 1> {2}".format(pex_runset,
|
|
|
|
|
errfile,
|
|
|
|
|
outfile)
|
|
|
|
|
cmd = script_cmd
|
|
|
|
|
debug.info(2, cmd)
|
|
|
|
|
os.system(cmd)
|
|
|
|
|
|
|
|
|
|
# rename technology models
|
|
|
|
|
pex_nelist = open(output, 'r')
|
|
|
|
|
s = pex_nelist.read()
|
|
|
|
|
pex_nelist.close()
|
2020-10-01 20:10:18 +02:00
|
|
|
s = s.replace('pfet', 'p')
|
|
|
|
|
s = s.replace('nfet', 'n')
|
2019-06-25 20:24:25 +02:00
|
|
|
f = open(output, 'w')
|
|
|
|
|
f.write(s)
|
|
|
|
|
f.close()
|
|
|
|
|
|
|
|
|
|
# also check the output file
|
|
|
|
|
f = open(outfile, "r")
|
|
|
|
|
results = f.readlines()
|
|
|
|
|
f.close()
|
|
|
|
|
out_errors = find_error(results)
|
2020-10-01 20:10:18 +02:00
|
|
|
debug.check(os.path.isfile(output), "Couldn't find PEX extracted output.")
|
2019-06-25 20:24:25 +02:00
|
|
|
|
2020-10-01 20:10:18 +02:00
|
|
|
correct_port(name, output, sp_name)
|
2019-06-25 20:24:25 +02:00
|
|
|
return out_errors
|
|
|
|
|
|
2020-10-01 20:10:18 +02:00
|
|
|
|
|
|
|
|
def write_batch_pex_rule(gds_name, name, sp_name, output):
|
2019-06-25 20:24:25 +02:00
|
|
|
"""
|
|
|
|
|
The dev branch old batch mode runset
|
|
|
|
|
2. magic can perform extraction with the following:
|
|
|
|
|
#!/bin/sh
|
|
|
|
|
rm -f $1.ext
|
|
|
|
|
rm -f $1.spice
|
|
|
|
|
magic -dnull -noconsole << EOF
|
|
|
|
|
tech load SCN3ME_SUBM.30
|
|
|
|
|
#scalegrid 1 2
|
|
|
|
|
gds rescale no
|
|
|
|
|
gds polygon subcell true
|
|
|
|
|
gds warning default
|
|
|
|
|
gds read $1
|
|
|
|
|
extract
|
|
|
|
|
ext2spice scale off
|
|
|
|
|
ext2spice
|
|
|
|
|
quit -noprompt
|
|
|
|
|
EOF
|
|
|
|
|
"""
|
2017-11-14 23:59:14 +01:00
|
|
|
pex_rules = drc["xrc_rules"]
|
|
|
|
|
pex_runset = {
|
|
|
|
|
'pexRulesFile': pex_rules,
|
|
|
|
|
'pexRunDir': OPTS.openram_temp,
|
|
|
|
|
'pexLayoutPaths': gds_name,
|
|
|
|
|
'pexLayoutPrimary': name,
|
|
|
|
|
#'pexSourcePath' : OPTS.openram_temp+"extracted.sp",
|
|
|
|
|
'pexSourcePath': sp_name,
|
|
|
|
|
'pexSourcePrimary': name,
|
|
|
|
|
'pexReportFile': name + ".lvs.report",
|
|
|
|
|
'pexPexNetlistFile': output,
|
|
|
|
|
'pexPexReportFile': name + ".pex.report",
|
|
|
|
|
'pexMaskDBFile': name + ".maskdb",
|
|
|
|
|
'cmnFDIDEFLayoutPath': name + ".def",
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
# write the runset file
|
2019-06-25 20:24:25 +02:00
|
|
|
file = OPTS.openram_temp + "pex_runset"
|
|
|
|
|
f = open(file, "w")
|
|
|
|
|
for k in sorted(pex_runset.keys()):
|
2017-11-14 23:59:14 +01:00
|
|
|
f.write("*{0}: {1}\n".format(k, pex_runset[k]))
|
|
|
|
|
f.close()
|
2019-06-25 20:24:25 +02:00
|
|
|
return file
|
2017-11-14 23:59:14 +01:00
|
|
|
|
2020-10-01 20:10:18 +02:00
|
|
|
|
2020-11-11 02:06:24 +01:00
|
|
|
def write_script_pex_rule(gds_name, cell_name, sp_name, output):
|
2019-06-25 20:24:25 +02:00
|
|
|
global OPTS
|
|
|
|
|
run_file = OPTS.openram_temp + "run_pex.sh"
|
|
|
|
|
f = open(run_file, "w")
|
|
|
|
|
f.write("#!/bin/sh\n")
|
2020-11-29 20:30:12 +01:00
|
|
|
f.write('export OPENRAM_TECH="{}"\n'.format(os.environ['OPENRAM_TECH']))
|
|
|
|
|
f.write('echo "$(date): Starting PEX using Magic {}"\n'.format(OPTS.drc_exe[1]))
|
|
|
|
|
f.write("{} -dnull -noconsole << EOF\n".format(OPTS.drc_exe[1]))
|
2019-06-25 20:24:25 +02:00
|
|
|
f.write("gds polygon subcell true\n")
|
|
|
|
|
f.write("gds warning default\n")
|
|
|
|
|
f.write("gds read {}\n".format(gds_name))
|
|
|
|
|
f.write("load {}\n".format(cell_name))
|
|
|
|
|
f.write("select top cell\n")
|
|
|
|
|
f.write("expand\n")
|
2020-11-11 02:06:24 +01:00
|
|
|
if not sp_name:
|
|
|
|
|
f.write("port makeall\n")
|
2019-06-25 20:24:25 +02:00
|
|
|
else:
|
2020-11-11 02:06:24 +01:00
|
|
|
f.write("readspice {}\n".format(sp_name))
|
|
|
|
|
f.write("extract\n")
|
|
|
|
|
f.write("ext2sim labels on\n")
|
|
|
|
|
f.write("ext2sim\n")
|
|
|
|
|
f.write("extresist simplify off\n")
|
|
|
|
|
f.write("extresist all\n")
|
|
|
|
|
f.write("ext2spice hierarchy off\n")
|
|
|
|
|
f.write("ext2spice format ngspice\n")
|
|
|
|
|
f.write("ext2spice renumber off\n")
|
|
|
|
|
f.write("ext2spice scale off\n")
|
|
|
|
|
f.write("ext2spice blackbox on\n")
|
|
|
|
|
f.write("ext2spice subcircuit top on\n")
|
|
|
|
|
f.write("ext2spice global off\n")
|
|
|
|
|
f.write("ext2spice extresist on\n")
|
|
|
|
|
f.write("ext2spice {}\n".format(cell_name))
|
2019-06-25 20:24:25 +02:00
|
|
|
f.write("quit -noprompt\n")
|
2020-11-29 20:30:12 +01:00
|
|
|
f.write("EOF\n")
|
|
|
|
|
f.write("magic_retcode=$?\n")
|
2020-10-01 20:10:18 +02:00
|
|
|
f.write("mv {0}.spice {1}\n".format(cell_name, output))
|
2020-11-29 20:30:12 +01:00
|
|
|
f.write('echo "$(date): Finished PEX using Magic {}"\n'.format(OPTS.drc_exe[1]))
|
|
|
|
|
f.write("exit $magic_retcode\n")
|
2017-11-14 23:59:14 +01:00
|
|
|
|
|
|
|
|
f.close()
|
2019-06-25 20:24:25 +02:00
|
|
|
os.system("chmod u+x {}".format(run_file))
|
|
|
|
|
return run_file
|
2017-11-14 23:59:14 +01:00
|
|
|
|
2020-10-01 20:10:18 +02:00
|
|
|
|
2019-06-25 20:24:25 +02:00
|
|
|
def find_error(results):
|
2017-11-14 23:59:14 +01:00
|
|
|
# Errors begin with "ERROR:"
|
|
|
|
|
test = re.compile("ERROR:")
|
2018-05-12 01:32:00 +02:00
|
|
|
stdouterrors = list(filter(test.search, results))
|
2017-11-14 23:59:14 +01:00
|
|
|
for e in stdouterrors:
|
|
|
|
|
debug.error(e.strip("\n"))
|
|
|
|
|
out_errors = len(stdouterrors)
|
|
|
|
|
return out_errors
|
|
|
|
|
|
2020-10-01 20:10:18 +02:00
|
|
|
|
2019-06-25 20:24:25 +02:00
|
|
|
def correct_port(name, output_file_name, ref_file_name):
|
|
|
|
|
pex_file = open(output_file_name, "r")
|
|
|
|
|
contents = pex_file.read()
|
|
|
|
|
# locate the start of circuit definition line
|
2019-12-18 12:03:13 +01:00
|
|
|
match = re.search(r'^\.subckt+[^M]*', contents, re.MULTILINE)
|
2019-06-25 20:24:25 +02:00
|
|
|
match_index_start = match.start()
|
2019-12-18 12:03:13 +01:00
|
|
|
match_index_end = match.end()
|
2019-06-25 20:24:25 +02:00
|
|
|
# store the unchanged part of pex file in memory
|
|
|
|
|
pex_file.seek(0)
|
|
|
|
|
part1 = pex_file.read(match_index_start)
|
2019-12-18 12:03:13 +01:00
|
|
|
pex_file.seek(match_index_end)
|
2019-06-25 20:24:25 +02:00
|
|
|
part2 = pex_file.read()
|
2019-12-18 12:03:13 +01:00
|
|
|
|
|
|
|
|
bitcell_list = "+ "
|
2020-08-04 13:40:20 +02:00
|
|
|
if OPTS.words_per_row:
|
2020-05-29 08:56:15 +02:00
|
|
|
for bank in range(OPTS.num_banks):
|
2020-08-04 13:40:20 +02:00
|
|
|
for bank in range(OPTS.num_banks):
|
|
|
|
|
row = int(OPTS.num_words / OPTS.words_per_row) - 1
|
2020-10-01 20:10:18 +02:00
|
|
|
col = int(OPTS.word_size * OPTS.words_per_row) - 1
|
|
|
|
|
bitcell_list += "bitcell_Q_b{0}_r{1}_c{2} ".format(bank, row, col)
|
|
|
|
|
bitcell_list += "bitcell_Q_bar_b{0}_r{1}_c{2} ".format(bank, row, col)
|
2020-08-04 13:40:20 +02:00
|
|
|
for col in range(OPTS.word_size * OPTS.words_per_row):
|
|
|
|
|
for port in range(OPTS.num_r_ports + OPTS.num_w_ports + OPTS.num_rw_ports):
|
|
|
|
|
bitcell_list += "bl{0}_{1} ".format(bank, col)
|
|
|
|
|
bitcell_list += "br{0}_{1} ".format(bank, col)
|
2019-12-18 12:03:13 +01:00
|
|
|
bitcell_list += "\n"
|
2020-01-16 12:51:29 +01:00
|
|
|
control_list = "+ "
|
2020-08-04 13:40:20 +02:00
|
|
|
if OPTS.words_per_row:
|
|
|
|
|
for bank in range(OPTS.num_banks):
|
|
|
|
|
control_list += "bank_{}/s_en0".format(bank)
|
2020-01-16 12:51:29 +01:00
|
|
|
control_list += '\n'
|
|
|
|
|
|
|
|
|
|
part2 = bitcell_list + control_list + part2
|
2019-12-18 12:03:13 +01:00
|
|
|
|
2019-06-25 20:24:25 +02:00
|
|
|
pex_file.close()
|
|
|
|
|
|
|
|
|
|
# obtain the correct definition line from the original spice file
|
|
|
|
|
sp_file = open(ref_file_name, "r")
|
|
|
|
|
contents = sp_file.read()
|
2019-12-18 12:03:13 +01:00
|
|
|
circuit_title = re.search(".SUBCKT " + str(name) + ".*", contents)
|
2019-06-25 20:24:25 +02:00
|
|
|
circuit_title = circuit_title.group()
|
|
|
|
|
sp_file.close()
|
|
|
|
|
|
|
|
|
|
# write the new pex file with info in the memory
|
|
|
|
|
output_file = open(output_file_name, "w")
|
|
|
|
|
output_file.write(part1)
|
2020-10-01 20:10:18 +02:00
|
|
|
output_file.write(circuit_title + '\n')
|
2019-06-25 20:24:25 +02:00
|
|
|
output_file.write(part2)
|
|
|
|
|
output_file.close()
|
|
|
|
|
|
2020-11-03 15:29:17 +01:00
|
|
|
|
2018-07-11 20:59:24 +02:00
|
|
|
def print_drc_stats():
|
2020-10-01 20:10:18 +02:00
|
|
|
debug.info(1, "DRC runs: {0}".format(num_drc_runs))
|
|
|
|
|
|
2020-11-03 15:29:17 +01:00
|
|
|
|
2018-07-11 20:59:24 +02:00
|
|
|
def print_lvs_stats():
|
2020-10-01 20:10:18 +02:00
|
|
|
debug.info(1, "LVS runs: {0}".format(num_lvs_runs))
|
|
|
|
|
|
2020-11-03 15:29:17 +01:00
|
|
|
|
2018-07-11 20:59:24 +02:00
|
|
|
def print_pex_stats():
|
2020-10-01 20:10:18 +02:00
|
|
|
debug.info(1, "PEX runs: {0}".format(num_pex_runs))
|