2020-12-03 00:20:50 +01:00
|
|
|
# See LICENSE for licensing information.
|
|
|
|
|
#
|
|
|
|
|
# Copyright (c) 2016-2019 Regents of the University of California and The Board
|
|
|
|
|
# of Regents for the Oklahoma Agricultural and Mechanical College
|
|
|
|
|
# (acting for and on behalf of Oklahoma State University)
|
|
|
|
|
# All rights reserved.
|
|
|
|
|
#
|
|
|
|
|
|
|
|
|
|
from .analytical_util import *
|
2020-12-14 22:59:31 +01:00
|
|
|
from .simulation import simulation
|
2020-12-03 00:20:50 +01:00
|
|
|
from globals import OPTS
|
|
|
|
|
import debug
|
|
|
|
|
|
2020-12-14 22:59:31 +01:00
|
|
|
import os
|
|
|
|
|
from sklearn.linear_model import LinearRegression
|
|
|
|
|
import math
|
|
|
|
|
|
2020-12-03 00:20:50 +01:00
|
|
|
relative_data_path = "/sim_data"
|
2020-12-14 22:59:31 +01:00
|
|
|
data_fnames = ["delay_data.csv",
|
|
|
|
|
"power_data.csv",
|
|
|
|
|
"leakage_data.csv",
|
|
|
|
|
"slew_data.csv"]
|
2020-12-03 00:20:50 +01:00
|
|
|
tech_path = os.environ.get('OPENRAM_TECH')
|
|
|
|
|
data_dir = tech_path+'/'+OPTS.tech_name+relative_data_path
|
2020-12-14 22:59:31 +01:00
|
|
|
data_paths = [data_dir +'/'+fname for fname in data_fnames]
|
2020-12-03 00:20:50 +01:00
|
|
|
|
2020-12-14 22:59:31 +01:00
|
|
|
class linear_regression(simulation):
|
2020-12-03 00:20:50 +01:00
|
|
|
|
2020-12-14 22:59:31 +01:00
|
|
|
def __init__(self, sram, spfile, corner):
|
|
|
|
|
super().__init__(sram, spfile, corner)
|
|
|
|
|
self.set_corner(corner)
|
|
|
|
|
self.create_signal_names()
|
|
|
|
|
self.add_graph_exclusions()
|
2020-12-10 00:31:43 +01:00
|
|
|
self.delay_model = None
|
2020-12-14 22:59:31 +01:00
|
|
|
self.slew_model = None
|
2020-12-10 00:31:43 +01:00
|
|
|
self.power_model = None
|
2020-12-14 22:59:31 +01:00
|
|
|
self.leakage_model = None
|
|
|
|
|
|
|
|
|
|
def get_lib_values(self, slews, loads):
|
|
|
|
|
"""
|
|
|
|
|
A model and prediction is created for each output needed for the LIB
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
log_num_words = math.log(OPTS.num_words, 2)
|
|
|
|
|
debug.info(1, "OPTS.words_per_row={}".format(OPTS.words_per_row))
|
|
|
|
|
model_inputs = [log_num_words,
|
|
|
|
|
OPTS.word_size,
|
|
|
|
|
OPTS.words_per_row,
|
|
|
|
|
self.sram.width * self.sram.height]
|
|
|
|
|
|
|
|
|
|
# List returned with value order being delay, power, leakage, slew
|
|
|
|
|
# FIXME: make order less hard coded
|
|
|
|
|
sram_vals = self.get_predictions(model_inputs)
|
|
|
|
|
|
|
|
|
|
self.create_measurement_names()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# Set delay/power for slews and loads
|
|
|
|
|
port_data = self.get_empty_measure_data_dict()
|
|
|
|
|
debug.info(1, 'Slew, Load, Delay(ns), Slew(ns)')
|
|
|
|
|
max_delay = 0.0
|
|
|
|
|
for slew in slews:
|
|
|
|
|
for load in loads:
|
|
|
|
|
# Calculate delay based on slew and load
|
|
|
|
|
debug.info(1,
|
|
|
|
|
'{}, {}, {}, {}'.format(slew,
|
|
|
|
|
load,
|
|
|
|
|
total_delay.delay / 1e3,
|
|
|
|
|
total_delay.slew / 1e3))
|
|
|
|
|
|
|
|
|
|
# Delay is only calculated on a single port and replicated for now.
|
|
|
|
|
for port in self.all_ports:
|
|
|
|
|
for mname in self.delay_meas_names + self.power_meas_names:
|
|
|
|
|
#FIXME: fix magic for indexing the data
|
|
|
|
|
if "power" in mname:
|
|
|
|
|
port_data[port][mname].append(sram_vals[1])
|
|
|
|
|
elif "delay" in mname and port in self.read_ports:
|
|
|
|
|
port_data[port][mname].append(sram_vals[0])
|
|
|
|
|
elif "slew" in mname and port in self.read_ports:
|
|
|
|
|
port_data[port][mname].append(sram_vals[3])
|
|
|
|
|
else:
|
|
|
|
|
debug.error("Measurement name not recognized: {}".format(mname), 1)
|
|
|
|
|
|
|
|
|
|
# Estimate the period as double the delay with margin
|
|
|
|
|
period_margin = 0.1
|
|
|
|
|
sram_data = {"min_period": sram_vals[0] * 2,
|
|
|
|
|
"leakage_power": sram_vals[2]}
|
|
|
|
|
|
|
|
|
|
debug.info(2, "SRAM Data:\n{}".format(sram_data))
|
|
|
|
|
debug.info(2, "Port Data:\n{}".format(port_data))
|
|
|
|
|
|
|
|
|
|
return (sram_data, port_data)
|
2020-12-07 22:11:04 +01:00
|
|
|
|
2020-12-10 00:31:43 +01:00
|
|
|
def get_predictions(self, model_inputs):
|
2020-12-14 22:59:31 +01:00
|
|
|
"""
|
|
|
|
|
Generate a model and prediction for LIB output
|
|
|
|
|
"""
|
2020-12-03 00:20:50 +01:00
|
|
|
|
2020-12-14 22:59:31 +01:00
|
|
|
scaled_inputs = np.asarray([scale_input_datapoint(model_inputs, data_paths[0])])
|
2020-12-07 23:22:53 +01:00
|
|
|
|
2020-12-10 00:31:43 +01:00
|
|
|
predictions = []
|
2020-12-14 22:59:31 +01:00
|
|
|
for path in data_paths:
|
2020-12-10 00:31:43 +01:00
|
|
|
features, labels = get_scaled_data(path)
|
|
|
|
|
model = self.generate_model(features, labels)
|
|
|
|
|
scaled_pred = self.model_prediction(model, scaled_inputs)
|
|
|
|
|
pred = unscale_data(scaled_pred.tolist(), path)
|
|
|
|
|
debug.info(1,"Unscaled Prediction = {}".format(pred))
|
|
|
|
|
predictions.append(pred)
|
|
|
|
|
return predictions
|
2020-12-07 23:22:53 +01:00
|
|
|
|
2020-12-10 00:31:43 +01:00
|
|
|
def generate_model(self, features, labels):
|
2020-12-07 23:22:53 +01:00
|
|
|
"""
|
|
|
|
|
Supervised training of model.
|
|
|
|
|
"""
|
|
|
|
|
|
2020-12-10 00:31:43 +01:00
|
|
|
model = LinearRegression()
|
|
|
|
|
model.fit(features, labels)
|
|
|
|
|
return model
|
2020-12-07 23:22:53 +01:00
|
|
|
|
2020-12-10 00:31:43 +01:00
|
|
|
def model_prediction(self, model, features):
|
2020-12-07 23:22:53 +01:00
|
|
|
"""
|
|
|
|
|
Have the model perform a prediction and unscale the prediction
|
|
|
|
|
as the model is trained with scaled values.
|
|
|
|
|
"""
|
|
|
|
|
|
2020-12-10 00:31:43 +01:00
|
|
|
pred = model.predict(features)
|
2020-12-07 23:22:53 +01:00
|
|
|
return pred
|
|
|
|
|
|