scripts: use open safe file class

Signed-off-by: Alessandro Comodi <acomodi@antmicro.com>
This commit is contained in:
Alessandro Comodi 2022-03-15 10:47:58 +01:00
parent 6e026bf30e
commit 1bd8142625
34 changed files with 200 additions and 181 deletions

View File

@ -8,6 +8,9 @@
# https://opensource.org/licenses/ISC
#
# SPDX-License-Identifier: ISC
from prjxray.util import OpenSafeFile
class Bitfilter(object):
def __init__(
self, frames_to_include=None, frames_to_exclude=[],

View File

@ -10,7 +10,7 @@
# SPDX-License-Identifier: ISC
import json
import os
from prjxray import util
from prjxray.util import block_type_s2i
# Break frames into WORD_SIZE bit words.
WORD_SIZE_BITS = 32
@ -119,7 +119,7 @@ def addr_bits2word(block_type, top_bottom, cfg_row, cfg_col, minor_addr):
"""Convert a deconstructed address to a 32 bit word"""
# https://www.xilinx.com/support/documentation/user_guides/ug470_7Series_Config.pdf
ret = 0
ret |= util.block_type_s2i[block_type] << 23
ret |= block_type_s2i[block_type] << 23
ret |= {"top": 0, "bottom": 1}[top_bottom] << 22
ret |= cfg_row << 17
ret |= cfg_col << 7

View File

@ -14,6 +14,8 @@ import pickle
import re
from collections import namedtuple
from prjxray.util import OpenSafeFile
def read_root_csv(root_dir):
""" Reads root.csv from raw db directory.
@ -24,7 +26,7 @@ def read_root_csv(root_dir):
tiles = {}
nodes = []
with open(os.path.join(root_dir, 'root.csv')) as f:
with OpenSafeFile(os.path.join(root_dir, 'root.csv')) as f:
for d in csv.DictReader(f):
if d['filetype'] == 'tile':
if d['subtype'] not in tiles:
@ -123,17 +125,17 @@ class NodeLookup(object):
import pyjson5 as json5
import progressbar
for node in progressbar.progressbar(nodes):
with open(node) as f:
with OpenSafeFile(node) as f:
node_wires = json5.load(f)
assert node_wires['node'] not in self.nodes
self.nodes[node_wires['node']] = node_wires['wires']
def load_from_file(self, fname):
with open(fname, 'rb') as f:
with OpenSafeFile(fname, 'rb') as f:
self.nodes = pickle.load(f)
def save_to_file(self, fname):
with open(fname, 'wb') as f:
with OpenSafeFile(fname, 'wb') as f:
pickle.dump(self.nodes, f)
def site_pin_node_to_wires(self, tile, node):

View File

@ -54,6 +54,8 @@ import json
import numpy as np
import numpy.linalg as linalg
from prjxray.util import OpenSafeFile
# =============================================================================
@ -83,7 +85,7 @@ def load_data(file_name, tagfilter=lambda tag: True, address_map=None):
segdata = None
all_segdata = []
with open(file_name, "r") as fp:
with OpenSafeFile(file_name, "r") as fp:
for line in fp.readlines():
line = line.strip()
@ -174,7 +176,7 @@ def write_segbits(file_name, all_tags, all_bits, W):
lines.append(all_tags[r] + " " + " ".join(bits) + "\n")
with open(file_name, "w") as fp:
with OpenSafeFile(file_name, "w") as fp:
for line in lines:
fp.write(line)
@ -702,7 +704,7 @@ def build_address_map(tilegrid_file):
address_map = {}
# Load tilegrid
with open(tilegrid_file, "r") as fp:
with OpenSafeFile(tilegrid_file, "r") as fp:
tilegrid = json.load(fp)
# Loop over tiles
@ -982,7 +984,7 @@ def main():
# Dump to CSV
if args.x is not None:
with open(args.x, "w") as fp:
with OpenSafeFile(args.x, "w") as fp:
dump_solution_to_csv(fp, tags_to_solve, bits_to_solve, X)
# Dump results

View File

@ -13,6 +13,7 @@ import progressbar
import pyjson5 as json5
import os.path
from prjxray.util import OpenSafeFile
def create_tables(conn):
c = conn.cursor()
@ -63,7 +64,7 @@ class NodeLookup(object):
nodes_processed = set()
for node in progressbar.progressbar(nodes):
with open(node) as f:
with OpenSafeFile(node) as f:
node_wires = json5.load(f)
assert node_wires['node'] not in nodes_processed
nodes_processed.add(node_wires['node'])

View File

@ -24,7 +24,7 @@ tilegrid.json provides tile addresses
'''
import os, json, re
from prjxray import util
from prjxray.util import OpenSafeFile, get_db_root, get_fabric
BLOCK_TYPES = set(('CLB_IO_CLK', 'BLOCK_RAM', 'CFG_CLB'))
@ -85,12 +85,12 @@ class Segmaker:
def __init__(self, bitsfile, verbose=None, db_root=None, fabric=None):
self.db_root = db_root
if self.db_root is None:
self.db_root = util.get_db_root()
self.db_root = get_db_root()
assert self.db_root, "No db root specified."
self.fabric = fabric
if self.fabric is None:
self.fabric = util.get_fabric()
self.fabric = get_fabric()
assert self.fabric, "No fabric specified."
self.verbose = verbose if verbose is not None else os.getenv(
@ -129,7 +129,7 @@ class Segmaker:
def load_grid(self):
'''Load self.grid holding tile addresses'''
with open(os.path.join(self.db_root, self.fabric, "tilegrid.json"),
with OpenSafeFile(os.path.join(self.db_root, self.fabric, "tilegrid.json"),
"r") as f:
self.grid = json.load(f)
assert "segments" not in self.grid, "Old format tilegrid.json"
@ -152,7 +152,7 @@ class Segmaker:
'''
self.bits = dict()
print("Loading bits from %s." % bitsfile)
with open(bitsfile, "r") as f:
with OpenSafeFile(bitsfile, "r") as f:
for line in f:
# ex: bit_00020500_000_17
line = line.split("_")
@ -446,7 +446,7 @@ class Segmaker:
segments = self.segments_by_type[segtype]
if segments:
print("Writing %s." % filename)
with open(filename, "w") as f:
with OpenSafeFile(filename, "w") as f:
for segname, segdata in sorted(segments.items()):
# seg 00020300_010
print("seg %s" % segname, file=f)

View File

@ -13,6 +13,7 @@ from collections import namedtuple
import json
from prjxray import lib
from prjxray.timing import fast_slow_tuple_to_corners, RcElement
from prjxray.util import OpenSafeFile
TileDbs = namedtuple(
'TileDbs', 'segbits block_ram_segbits ppips mask tile_type')
@ -313,7 +314,7 @@ class Tile(object):
backward_timing=get_pip_timing(pip.get('dst_to_src')),
)
with open(self.tile_dbs.tile_type) as f:
with OpenSafeFile(self.tile_dbs.tile_type) as f:
tile_type = json.load(f)
assert self.tilename_upper == tile_type['tile_type']
self.wires = get_wires(tile_type['wires'])

View File

@ -10,8 +10,8 @@
# SPDX-License-Identifier: ISC
from collections import namedtuple
from prjxray import bitstream
from prjxray import util
from prjxray.grid_types import BlockType
from prjxray.util import OpenSafeFile
import enum
@ -84,22 +84,16 @@ class TileSegbits(object):
self.feature_addresses = {}
if tile_db.ppips is not None:
with open(tile_db.ppips) as f:
util.lock_file(f, 10)
with OpenSafeFile(tile_db.ppips) as f:
self.ppips = read_ppips(f)
util.unlock_file(f)
if tile_db.segbits is not None:
with open(tile_db.segbits) as f:
util.lock_file(f, 10)
with OpenSafeFile(tile_db.segbits) as f:
self.segbits[BlockType.CLB_IO_CLK] = read_segbits(f)
util.unlock_file(f)
if tile_db.block_ram_segbits is not None:
with open(tile_db.block_ram_segbits) as f:
util.lock_file(f, 10)
with OpenSafeFile(tile_db.block_ram_segbits) as f:
self.segbits[BlockType.BLOCK_RAM] = read_segbits(f)
util.unlock_file(f)
for block_type in self.segbits:
for feature in self.segbits[block_type]:

View File

@ -21,6 +21,7 @@ TileSegbitsAlias performs severals functions to achieve the alias:
from prjxray import bitstream
from prjxray.grid_types import Bits
from prjxray.tile_segbits import read_ppips
from prjxray.util import OpenSafeFile
class TileSegbitsAlias(object):
@ -67,7 +68,7 @@ class TileSegbitsAlias(object):
self.ppips = {}
if tile_db.ppips is not None:
with open(tile_db.ppips) as f:
with OpenSafeFile(tile_db.ppips) as f:
self.ppips = read_ppips(f)
self.tile_segbits = db.get_tile_segbits(self.alias_tile_type)

View File

@ -18,6 +18,49 @@ import yaml
from .roi import Roi
def timeout_handler(signum, frame):
raise Exception("ERROR TIMEOUT: could not lock file")
class OpenSafeFile:
"""
Opens a file in a thread-safe mode, allowing for safe read and writes
to a file that can potentially be modified by multiple processes at
the same time.
"""
def __init__(self, name, mode="r", timeout=10):
self.name = name
self.mode = mode
self.timeout = timeout
self.fd = None
def __enter__(self):
self.fd = open(self.name, self.mode)
self.lock_file()
return self.fd
def __exit__(self, exc_type, exc_value, traceback):
self.unlock_file()
self.fd.close()
def lock_file(self):
assert self.fd is not None
try:
signal.signal(signal.SIGALRM, timeout_handler)
signal.alarm(self.timeout)
fcntl.flock(self.fd.fileno(), fcntl.LOCK_EX)
signal.alarm(0)
except Exception as e:
print(f"{e}: {self.name}")
exit(1)
def unlock_file(self):
assert self.fd is not None
fcntl.flock(self.fd.fileno(), fcntl.LOCK_UN)
def get_db_root():
# Used during tilegrid db bootstrap
ret = os.getenv("XRAY_DATABASE_ROOT", None)
@ -44,7 +87,7 @@ def get_part_information(db_root, part):
filename = os.path.join(db_root, "mapping", "parts.yaml")
assert os.path.isfile(filename), \
"Mapping file {} does not exists".format(filename)
with open(filename, 'r') as stream:
with OpenSafeFile(filename, 'r') as stream:
part_mapping = yaml.load(stream, Loader=yaml.FullLoader)
part = part_mapping.get(part, None)
assert part, "Part {} not found in {}".format(part, part_mapping)
@ -53,7 +96,7 @@ def get_part_information(db_root, part):
def set_part_information(db_root, information):
filename = os.path.join(db_root, "mapping", "parts.yaml")
with open(filename, 'w+') as stream:
with OpenSafeFile(filename, 'w+') as stream:
yaml.dump(information, stream)
assert os.path.isfile(filename), \
"Mapping file {} does not exists".format(filename)
@ -63,7 +106,7 @@ def get_part_resources(file_path, part):
filename = os.path.join(file_path, "resources.yaml")
assert os.path.isfile(filename), \
"Mapping file {} does not exists".format(filename)
with open(filename, 'r') as stream:
with OpenSafeFile(filename, 'r') as stream:
res_mapping = yaml.load(stream, Loader=yaml.FullLoader)
res = res_mapping.get(part, None)
assert res, "Part {} not found in {}".format(part, part_mapping)
@ -72,7 +115,7 @@ def get_part_resources(file_path, part):
def set_part_resources(file_path, information):
filename = os.path.join(file_path, "resources.yaml")
with open(filename, 'w+') as stream:
with OpenSafeFile(filename, 'w+') as stream:
yaml.dump(information, stream)
assert os.path.isfile(filename), \
"Mapping file {} does not exists".format(filename)
@ -83,7 +126,7 @@ def get_fabric_for_part(db_root, part):
assert os.path.isfile(filename), \
"Mapping file {} does not exists".format(filename)
part = get_part_information(db_root, part)
with open(filename, 'r') as stream:
with OpenSafeFile(filename, 'r') as stream:
device_mapping = yaml.load(stream, Loader=yaml.FullLoader)
device = device_mapping.get(part['device'], None)
assert device, "Device {} not found in {}".format(
@ -95,7 +138,7 @@ def get_devices(db_root):
filename = os.path.join(db_root, "mapping", "devices.yaml")
assert os.path.isfile(filename), \
"Mapping file {} does not exists".format(filename)
with open(filename, 'r') as stream:
with OpenSafeFile(filename, 'r') as stream:
device_mapping = yaml.load(stream, Loader=yaml.FullLoader)
return device_mapping
@ -104,7 +147,7 @@ def get_parts(db_root):
filename = os.path.join(db_root, "mapping", "parts.yaml")
assert os.path.isfile(filename), \
"Mapping file {} does not exists".format(filename)
with open(filename, 'r') as stream:
with OpenSafeFile(filename, 'r') as stream:
part_mapping = yaml.load(stream, Loader=yaml.FullLoader)
return part_mapping
@ -255,11 +298,9 @@ def parse_db_line(line):
def parse_db_lines(fn):
with open(fn, "r") as f:
lock_file(f, 10)
with OpenSafeFile(fn, "r") as f:
for line in f:
yield line, parse_db_line(line)
unlock_file(f)
def write_db_lines(fn, entries, track_origin=False):
@ -272,11 +313,9 @@ def write_db_lines(fn, entries, track_origin=False):
new_line = " ".join([tag] + sorted(bits))
new_lines.append(new_line)
with open(fn, "w") as f:
lock_file(f, 10)
with OpenSafeFile(fn, "w") as f:
for line in sorted(new_lines):
print(line, file=f)
unlock_file(f)
def parse_tagbit(x):
@ -409,44 +448,3 @@ def add_bool_arg(parser, yes_arg, default=False, **kwargs):
parser.add_argument(
'--no-' + dashed, dest=dest, action='store_false', **kwargs)
def timeout_handler(signum, frame):
raise Exception("ERROR: could not lock file!")
class OpenSafeFile:
"""
Opens a file in a thread-safe mode, allowing for safe read and writes
to a file that can potentially be modified by multiple processes at
the same time.
"""
def __init__(self, name, mode, timeout=10):
self.name = name
self.mode = mode
self.timeout = timeout
self.fd = None
def __enter__(self):
self.fd = open(self.name, self.mode)
self.lock_file()
return self.fd
def __exit__(self):
self.unlock_file()
self.fd.close()
def lock_file(self):
assert self.fd is not None
try:
signal.signal(signal.SIGALRM, timeout_handler)
signal.alarm(timeout)
fcntl.flock(self.fd.fileno(), fcntl.LOCK_EX)
signal.alarm(0)
except Exception as e:
print(e)
exit(1)
def unlock_file(self):
assert self.fd is not None
fcntl.flock(self.fd.fileno(), fcntl.LOCK_UN)

View File

@ -16,9 +16,10 @@ import contextlib
import os
import fasm
import fasm.output
from prjxray.db import Database
from prjxray import fasm_disassembler
from prjxray import bitstream
from prjxray.db import Database
from prjxray.util import OpenSafeFile
import subprocess
import tempfile
@ -41,7 +42,7 @@ def bits_to_fasm(db_root, part, bits_file, verbose, canonical):
grid = db.grid()
disassembler = fasm_disassembler.FasmDisassembler(db)
with open(bits_file) as f:
with OpenSafeFile(bits_file) as f:
bitdata = bitstream.load_bitdata(f)
model = fasm.output.merge_and_sort(

View File

@ -21,6 +21,7 @@ from prjxray.timing import Outpin, Inpin, Wire, Buffer, \
PassTransistor, IntristicDelay, RcElement, PvtCorner
from prjxray.math_models import ExcelMathModel
from prjxray.db import Database
from prjxray.util import OpenSafeFile
from prjxray import util
FAST = PvtCorner.FAST
@ -481,7 +482,7 @@ def add_net(wb, net, timing_lookup):
def build_wire_filter(wire_filter):
wires_to_include = set()
with open(wire_filter) as f:
with OpenSafeFile(wire_filter) as f:
for l in f:
wire = l.strip()
if not wire:
@ -514,7 +515,7 @@ def main():
args = parser.parse_args()
with open(args.timing_json) as f:
with OpenSafeFile(args.timing_json) as f:
timing = json.load(f)
db = Database(args.db_root, args.part)

View File

@ -218,7 +218,7 @@ def read_segbits(fn_in):
lines = []
llast = None
with open(fn_in, "r") as f:
with util.OpenSafeFile(fn_in, "r") as f:
for line in f:
# Hack: skip duplicate lines
# This happens while merging a new multibit entry
@ -327,7 +327,7 @@ def update_mask(db_root, mask_db, src_dbs, offset=0):
mask_db_file = "%s/mask_%s.db" % (db_root, mask_db)
if os.path.exists(mask_db_file):
with open(mask_db_file, "r") as f:
with util.OpenSafeFile(mask_db_file, "r") as f:
for line in f:
line = line.split()
assert len(line) == 2
@ -340,7 +340,7 @@ def update_mask(db_root, mask_db, src_dbs, offset=0):
if not os.path.exists(seg_db_file):
continue
with open(seg_db_file, "r") as f:
with util.OpenSafeFile(seg_db_file, "r") as f:
for line in f:
line = line.split()
for bit in line[1:]:
@ -353,7 +353,7 @@ def update_mask(db_root, mask_db, src_dbs, offset=0):
bits.add(bit)
if len(bits) > 0:
with open(mask_db_file, "w") as f:
with util.OpenSafeFile(mask_db_file, "w") as f:
for bit in sorted(bits):
print("bit %s" % bit, file=f)
@ -361,14 +361,15 @@ def update_mask(db_root, mask_db, src_dbs, offset=0):
def load_zero_db(fn):
# Remove comments and convert to list of lines
ret = []
for l in open(fn, "r"):
pos = l.find("#")
if pos >= 0:
l = l[0:pos]
l = l.strip()
if not l:
continue
ret.append(l)
with util.OpenSafeFile(fn, "r") as f:
for l in f:
pos = l.find("#")
if pos >= 0:
l = l[0:pos]
l = l.strip()
if not l:
continue
ret.append(l)
return ret
@ -535,11 +536,9 @@ def update_seg_fns(
)
changes += new_changes
with open(fn_out, "w") as f:
util.lock_file(f, 10)
with util.OpenSafeFile(fn_out, "w") as f:
for line in sorted(lines):
print(line, file=f)
util.unlock_file(f)
if changes is not None:
seg_files += 1
@ -654,7 +653,7 @@ def load_tag_groups(file_name):
tag_groups = []
# Load tag group specifications
with open(file_name, "r") as fp:
with util.OpenSafeFile(file_name, "r") as fp:
for line in fp:
line = line.strip()

View File

@ -23,6 +23,7 @@ from collections import defaultdict
from prjxray import fasm_assembler, util
from prjxray.db import Database
from prjxray.roi import Roi
from prjxray.util import OpenSafeFile
import sys
@ -133,11 +134,11 @@ def run(
bank_to_tile = defaultdict(lambda: set())
if part is not None:
with open(os.path.join(db_root, part, "package_pins.csv"), "r") as fp:
with OpenSafeFile(os.path.join(db_root, part, "package_pins.csv"), "r") as fp:
reader = csv.DictReader(fp)
package_pins = [l for l in reader]
with open(os.path.join(db_root, part, "part.json"), "r") as fp:
with OpenSafeFile(os.path.join(db_root, part, "part.json"), "r") as fp:
part_data = json.load(fp)
for bank, loc in part_data["iobanks"].items():
@ -167,7 +168,7 @@ def run(
extra_features = []
if roi:
with open(roi) as f:
with OpenSafeFile(roi) as f:
roi_j = json.load(f)
x1 = roi_j['info']['GRID_X_MIN']
x2 = roi_j['info']['GRID_X_MAX']

View File

@ -24,6 +24,8 @@ import argparse
import os
import re
from prjxray.util import OpenSafeFile
# =============================================================================
@ -33,7 +35,7 @@ def read_pips_from_tile(tile_file):
PIP name strings. Names are formatted as <dst_wire>.<src_wire>
"""
with open(tile_file, "r") as fp:
with OpenSafeFile(tile_file, "r") as fp:
root = json.load(fp)
pips = root["pips"]
@ -53,7 +55,7 @@ def read_ppips(ppips_file):
"""
ppips = {}
with open(ppips_file, "r") as fp:
with OpenSafeFile(ppips_file, "r") as fp:
for line in fp.readlines():
line = line.split()
if len(line) == 2:
@ -70,7 +72,7 @@ def read_segbits(segbits_file):
"""
segbits = []
with open(segbits_file, "r") as fp:
with OpenSafeFile(segbits_file, "r") as fp:
for line in fp.readlines():
line = line.split()
if len(line) > 1:

View File

@ -21,6 +21,8 @@ import argparse
import re
import itertools
from prjxray.util import OpenSafeFile
# =============================================================================
@ -34,7 +36,7 @@ def load_tag_groups(file_name):
tag_groups = []
# Load tag group specifications
with open(file_name, "r") as fp:
with OpenSafeFile(file_name, "r") as fp:
for line in fp:
line = line.strip()
@ -89,7 +91,7 @@ def load_segbits(file_name):
segbits = {}
with open(file_name, "r") as fp:
with OpenSafeFile(file_name, "r") as fp:
for line in fp:
line = line.strip()
fields = line.split()
@ -114,7 +116,7 @@ def save_segbits(file_name, segbits):
Save segbits to a .db or .rdb file
"""
with open(file_name, "w") as fp:
with OpenSafeFile(file_name, "w") as fp:
for tag, bits in segbits.items():
if isinstance(bits, str):

View File

@ -10,7 +10,7 @@
# SPDX-License-Identifier: ISC
import sys, os, re
from prjxray import util
from prjxray.util import OpenSafeFile, parse_db_lines, write_db_lines
def index_masks(fn_in, groups_in):
@ -21,7 +21,7 @@ def index_masks(fn_in, groups_in):
groups[group] = set()
# Index bits
for line, (tag, bits, mode) in util.parse_db_lines(fn_in):
for line, (tag, bits, mode) in parse_db_lines(fn_in):
assert not mode, "Unresolved tag: %s" % (line, )
prefix = tag[0:tag.rfind(".")]
group = groups.get(prefix, None)
@ -42,7 +42,7 @@ def index_masks(fn_in, groups_in):
def apply_masks(fn_in, groups):
"""Add 0 entries ("!") to .db entries based on groups definition"""
new_db = {}
for line, (tag, bits, mode) in util.parse_db_lines(fn_in):
for line, (tag, bits, mode) in parse_db_lines(fn_in):
assert not mode, "Unresolved tag: %s" % (line, )
prefix = tag[0:tag.rfind(".")]
group = groups.get(prefix, None)
@ -58,8 +58,9 @@ def apply_masks(fn_in, groups):
def load_groups(fn):
ret = []
for l in open(fn, "r"):
ret.append(l.strip())
with OpenSafeFile(fn, "r") as f:
for l in f:
ret.append(l.strip())
return ret
@ -67,7 +68,7 @@ def run(fn_in, fn_out, groups_fn, verbose=False):
groups_in = load_groups(groups_fn)
groups = index_masks(fn_in, groups_in)
new_db = apply_masks(fn_in, groups)
util.write_db_lines(fn_out, new_db)
write_db_lines(fn_out, new_db)
def main():

View File

@ -15,6 +15,7 @@ import os
import parse as format_parser
import subprocess
import sys
"""Module for generating the Info.md file found in the database directory."""
info_md_header = """

View File

@ -23,6 +23,8 @@ import re
from collections import defaultdict
from prjxray.util import OpenSafeFile
def main():
@ -49,7 +51,7 @@ def main():
args = parser.parse_args()
# Load pin dump
with open(args.csv, "r") as fp:
with OpenSafeFile(args.csv, "r") as fp:
pin_dump = list(csv.DictReader(fp))
# Group pins into ports
@ -103,7 +105,7 @@ def main():
port["width"] += 1
# Write pin ports to a JSON file
with open(args.json, "w") as fp:
with OpenSafeFile(args.json, "w") as fp:
json.dump(ports, fp, indent=1, sort_keys=True)

View File

@ -12,6 +12,7 @@
import json
import argparse
from prjxray.util import OpenSafeFile
def get_elems_count(timings, slice, site, bel_type):
combinational = 0
@ -113,7 +114,7 @@ def produce_sdf(timings, outdir):
"""
)"""
with open(outdir + '/' + slice + '.sdf', "w") as fp:
with OpenSafeFile(outdir + '/' + slice + '.sdf', "w") as fp:
fp.write(sdf)
@ -125,7 +126,7 @@ def main():
args = parser.parse_args()
with open(args.json, 'r') as fp:
with OpenSafeFile(args.json, 'r') as fp:
timings = json.load(fp)
produce_sdf(timings, args.sdf)

View File

@ -24,6 +24,8 @@ import sys
import argparse
import re
from prjxray.util import OpenSafeFile
# =============================================================================
@ -32,7 +34,7 @@ def load_just_bits(file_name):
Read bits from a .db or .rdb file. Ignores tags and bit values.
"""
with open(file_name, "r") as fp:
with OpenSafeFile(file_name, "r") as fp:
lines = fp.readlines()
bits = set()

View File

@ -10,11 +10,12 @@
# SPDX-License-Identifier: ISC
import sys, re
from prjxray import util
from prjxray.util import OpenSafeFile, db_root_arg, parse_db_line
def run(fnin, fnout=None, strict=False, verbose=False):
lines = open(fnin, 'r').read().split('\n')
with OpenSafeFile(fnin) as f:
lines = f.read().split('\n')
tags = dict()
bitss = dict()
for line in lines:
@ -24,7 +25,7 @@ def run(fnin, fnout=None, strict=False, verbose=False):
# TODO: figure out what to do with masks
if line.startswith("bit "):
continue
tag, bits, mode, _ = util.parse_db_line(line)
tag, bits, mode, _ = parse_db_line(line)
if strict:
if mode != "always":
assert not mode, "strict: got ill defined line: %s" % (line, )
@ -39,7 +40,7 @@ def run(fnin, fnout=None, strict=False, verbose=False):
bitss[bits] = tag
if fnout:
with open(fnout, "w") as fout:
with OpenSafeFile(fnout, "w") as fout:
for line in sorted(lines):
line = line.strip()
if line == '':
@ -53,7 +54,7 @@ def main():
parser = argparse.ArgumentParser(
description="Parse a db file, checking for consistency")
util.db_root_arg(parser)
db_root_arg(parser)
parser.add_argument('--verbose', action='store_true', help='')
parser.add_argument(
'--strict',

View File

@ -11,7 +11,9 @@
import argparse
import json
from sdf_timing import sdfparse
from prjxray.util import OpenSafeFile
def merge(timings_list, site):
@ -59,15 +61,16 @@ def main():
timings_list = list()
for sdf in args.sdfs:
with open(sdf, 'r') as fp:
with OpenSafeFile(sdf, 'r') as fp:
timing = sdfparse.parse(fp.read())
timings_list.append(timing)
merged_sdf = merge(timings_list, args.site)
open(args.out, 'w').write(sdfparse.emit(merged_sdf, timescale='1ns'))
with OpenSafeFile(args.out, 'w') as fp:
fp.write(sdfparse.emit(merged_sdf, timescale='1ns'))
if args.json is not None:
with open(args.json, 'w') as fp:
with OpenSafeFile(args.json, 'w') as fp:
json.dump(merged_sdf, fp, indent=4, sort_keys=True)

View File

@ -19,7 +19,7 @@ import sys, os, json, re
import copy
from prjxray import bitstream
from prjxray import db as prjxraydb
from prjxray import util
from prjxray.util import OpenSafeFile, parse_tagbit, db_root_arg, part_arg
class NoDB(Exception):
@ -39,7 +39,7 @@ def process_db(db, tile_type, process, verbose):
verbose and print("process_db(%s): %s" % (tile_type, fns))
for fn in fns:
if fn:
with open(fn, "r") as f:
with OpenSafeFile(fn, "r") as f:
for line in f:
process(line)
@ -61,7 +61,7 @@ def get_database(db, tile_type, bit_only=False, verbose=False):
return
tagbits = []
else:
tagbits = [util.parse_tagbit(x) for x in parts[1:]]
tagbits = [parse_tagbit(x) for x in parts[1:]]
tags.append(list([name] + tagbits))
@ -430,7 +430,7 @@ def tile_segnames(tiles):
def load_tiles(db_root, part):
# TODO: Migrate to new tilegrid format via library.
with open("%s/%s/tilegrid.json" % (db_root, part), "r") as f:
with OpenSafeFile("%s/%s/tilegrid.json" % (db_root, part), "r") as f:
tiles = json.load(f)
return tiles
@ -449,7 +449,8 @@ def run(
db = prjxraydb.Database(db_root, part)
tiles = load_tiles(db_root, part)
segments = mk_segments(tiles)
bitdata = bitstream.load_bitdata2(open(bits_file, "r"))
with OpenSafeFile(bits_file) as f:
bitdata = bitstream.load_bitdata2(f)
if flag_unknown_bits:
print_unknown_bits(tiles, bitdata)
@ -486,8 +487,8 @@ def main():
parser = argparse.ArgumentParser(
description="Decode bits within a tile's address space")
util.db_root_arg(parser)
util.part_arg(parser)
db_root_arg(parser)
part_arg(parser)
parser.add_argument('--verbose', action='store_true', help='')
parser.add_argument(
'-z',

View File

@ -21,6 +21,8 @@ import sys
import itertools
from prjxray.util import OpenSafeFile
# =============================================================================
@ -69,7 +71,7 @@ def load_and_sort_segbits(file_name, tagmap=lambda tag: tag):
# Load segbits
segbits = {}
with open(file_name, "r") as fp:
with OpenSafeFile(file_name, "r") as fp:
lines = fp.readlines()
# Parse lines

View File

@ -12,6 +12,7 @@
import sys, os, json
import pickle
from prjxray.util import OpenSafeFile
class MergeFind:
def __init__(self):
@ -34,11 +35,11 @@ class MergeFind:
def db_gen():
print("Reading database..")
with open("%s/%s/tilegrid.json" % (os.getenv("XRAY_DATABASE_DIR"),
with OpenSafeFile("%s/%s/tilegrid.json" % (os.getenv("XRAY_DATABASE_DIR"),
os.getenv("XRAY_DATABASE")), "r") as f:
tilegrid = json.load(f)
with open("%s/%s/tileconn.json" % (os.getenv("XRAY_DATABASE_DIR"),
with OpenSafeFile("%s/%s/tileconn.json" % (os.getenv("XRAY_DATABASE_DIR"),
os.getenv("XRAY_DATABASE")), "r") as f:
tileconn = json.load(f)
@ -81,7 +82,7 @@ def db_gen():
reverse_node_node = dict()
for tile_type in ["int_l", "int_r"]:
with open("%s/%s/segbits_%s.db" %
with OpenSafeFile("%s/%s/segbits_%s.db" %
(os.getenv("XRAY_DATABASE_DIR"), os.getenv("XRAY_DATABASE"),
tile_type), "r") as f:
for line in f:

View File

@ -65,8 +65,8 @@ import sys
import json
import utils.xjson as xjson
import utils.cmp as cmp
from prjxray.util import lock_file, unlock_file
from prjxray.util import OpenSafeFile
def split_all(s, chars):
"""Split on multiple character values.
@ -323,7 +323,8 @@ def sort_db(pathname):
else:
return False
lines = open(pathname).readlines()
with OpenSafeFile(pathname) as f:
lines = f.readlines()
tosort = []
for l in lines:
@ -334,22 +335,10 @@ def sort_db(pathname):
tosort.sort(key=cmp.cmp_key)
# Make sure the sort is stable
#for i in range(0, 4):
# copy = tosort.copy()
# random.shuffle(copy)
# copy.sort(key=cmp.cmp_key)
# assert len(copy) == len(tosort)
# for i in range(0, len(copy)):
# assert copy[i] == tosort[i], "\n%r\n != \n%r\n" % (
# copy[i], tosort[i])
with open(pathname, 'w') as f:
lock_file(f, 10)
for _, l in tosort:
f.write(l)
f.write('\n')
unlock_file(f)
return True
@ -387,13 +376,15 @@ def sort_csv(pathname):
def sort_json(filename):
"""Sort a XXX.json file."""
try:
d = json.load(open(filename))
with OpenSafeFile(filename) as f:
d = json.load(f)
except json.JSONDecodeError as e:
print(e)
return False
with open(filename, 'w') as f:
with OpenSafeFile(filename, 'w') as f:
xjson.pprint(f, d)
return True
@ -401,17 +392,15 @@ def sort_json(filename):
def sort_db_text(n):
rows = []
with open(n) as f:
with OpenSafeFile(n) as f:
for l in f:
rows.append(([extract_num(s) for s in l.split()], l))
rows.sort(key=lambda i: i[0])
with open(n, 'w') as f:
lock_file(f, 10)
with OpenSafeFile(n, 'w') as f:
for l in rows:
f.write(l[-1])
unlock_file(f)
return True

View File

@ -25,6 +25,8 @@ a valid bitstream.
import argparse
from io import StringIO
from prjxray.util import OpenSafeFile
conf_regs = {
0: "CRC",
1: "FAR_MAJ",
@ -124,7 +126,7 @@ class Bitstream:
self.curr_fdri_write_len = 0
self.curr_crc_check = 0
self.fdri_in_progress = False
with open(file_name, "rb") as f:
with OpenSafeFile(file_name, "rb") as f:
self.bytes = f.read()
pos, self.header = self.get_header()
self.body = [
@ -395,7 +397,7 @@ class Bitstream:
else:
frame_stream.write(
"#{:3}:{:6},".format(i % 65, hex(self.frame_data[i])))
with open(file_name, "w") as f:
with OpenSafeFile(file_name, "w") as f:
print(frame_stream.getvalue(), file=f)
def write_frames(self, file_name):
@ -409,7 +411,7 @@ class Bitstream:
frame_stream.write("\n")
elif i < len(self.frame_data) - 1:
frame_stream.write(",")
with open(file_name, "w") as f:
with OpenSafeFile(file_name, "w") as f:
print(frame_stream.getvalue(), file=f)

View File

@ -19,6 +19,7 @@ import tempfile
import prjxray
import utils.fasm2frames as fasm2frames
from prjxray.util import OpenSafeFile
from textx.exceptions import TextXSyntaxError
@ -69,7 +70,7 @@ class TestStringMethods(unittest.TestCase):
return os.path.join(os.path.dirname(__file__), 'test_data', fname)
def get_test_data(self, fname):
with open(self.filename_test_data(fname)) as f:
with OpenSafeFile(self.filename_test_data(fname)) as f:
return f.read()
def fasm2frames(self, fin_data, **kw):

View File

@ -14,13 +14,14 @@
import os, sys, json
from prjxray.util import OpenSafeFile
def main():
with open("%s/%s/tilegrid.json" % (os.getenv("XRAY_DATABASE_DIR"),
with OpenSafeFile("%s/%s/tilegrid.json" % (os.getenv("XRAY_DATABASE_DIR"),
os.getenv("XRAY_DATABASE")), "r") as f:
tilegrid = json.load(f)
with open("%s/%s/tileconn.json" % (os.getenv("XRAY_DATABASE_DIR"),
with OpenSafeFile("%s/%s/tileconn.json" % (os.getenv("XRAY_DATABASE_DIR"),
os.getenv("XRAY_DATABASE")), "r") as f:
tileconn = json.load(f)

View File

@ -11,13 +11,14 @@
import os, sys, json
from prjxray.util import OpenSafeFile
def main(argv):
if len(argv) != 3:
print("Usage example: python3 %s HCLK_R HCLK_SW6E3" % sys.argv[0])
sys.exit(1)
with open("%s/%s/tileconn.json" % (os.getenv("XRAY_DATABASE_DIR"),
with OpenSafeFile("%s/%s/tileconn.json" % (os.getenv("XRAY_DATABASE_DIR"),
os.getenv("XRAY_DATABASE")), "r") as f:
tileconn = json.load(f)

View File

@ -15,7 +15,7 @@ import os
import re
import tempfile
import json
from prjxray import util
from prjxray.util import OpenSafeFile, db_root_arg, get_parts, set_part_resources
def main():
@ -31,7 +31,7 @@ def main():
'family',
help="Name of the device family.",
choices=['artix7', 'kintex7', 'zynq7', 'spartan7'])
util.db_root_arg(parser)
db_root_arg(parser)
args = parser.parse_args()
env = os.environ.copy()
@ -40,7 +40,7 @@ def main():
os.getenv('XRAY_DIR'), 'settings', args.family)
information = {}
parts = util.get_parts(args.db_root)
parts = get_parts(args.db_root)
processed_parts = dict()
for part in parts.keys():
# Skip parts which differ only in the speedgrade, as they have the same pins
@ -64,7 +64,7 @@ def main():
cwd=cwd,
stdout=subprocess.PIPE)
with open(tmp_file, "r") as fp:
with OpenSafeFile(tmp_file, "r") as fp:
pins_json = json.load(fp)
os.remove(tmp_file)
@ -81,7 +81,7 @@ def main():
processed_parts[common_part] = {'pins': pins}
# Overwrites the <family>/resources.yaml file completly with new data
util.set_part_resources(resource_path, information)
set_part_resources(resource_path, information)
if __name__ == '__main__':

View File

@ -18,7 +18,7 @@ import multiprocessing
import pyjson5 as json5
import json
import sys
from prjxray import util
from prjxray.util import OpenSafeFile, db_root_arg, part_arg
def full_wire_name(wire_in_grid):
@ -64,7 +64,7 @@ def make_connections(db_root, part):
def read_json5(fname):
with open(fname, 'r') as f:
with OpenSafeFile(fname, 'r') as f:
return json5.load(f)
@ -72,8 +72,8 @@ def main():
parser = argparse.ArgumentParser(
description="Tests database against raw node list.")
util.db_root_arg(parser)
util.part_arg(parser)
db_root_arg(parser)
part_arg(parser)
parser.add_argument('--raw_node_root', required=True)
parser.add_argument('--error_nodes', default="error_nodes.json")
parser.add_argument('--ignored_wires')
@ -109,7 +109,7 @@ def main():
if len(error_nodes) > 0:
if args.ignored_wires:
with open(args.ignored_wires, 'r') as f:
with OpenSafeFile(args.ignored_wires, 'r') as f:
ignored_wires = [l.strip() for l in f.readlines()]
print(
@ -119,7 +119,7 @@ def main():
args.error_nodes,
))
with open(args.error_nodes, 'w') as f:
with OpenSafeFile(args.error_nodes, 'w') as f:
json.dump(error_nodes, f, indent=2)
if not args.ignored_wires:

View File

@ -11,6 +11,7 @@
import sys
import json
from prjxray.xjson import pprint
from prjxray.util import OpenSafeFile
if __name__ == "__main__":
if len(sys.argv) == 1:
@ -18,5 +19,6 @@ if __name__ == "__main__":
doctest.testmod()
else:
assert len(sys.argv) == 2
d = json.load(open(sys.argv[1]))
with OpenSafeFile(sys.argv[1]) as f:
d = json.load(f)
pprint(sys.stdout, d)