Newer
Older
dam1n19
committed
#-----------------------------------------------------------------------------
dam1n19
committed
# A joint work commissioned on behalf of SoC Labs, under Arm Academic Access license.
#
# Contributors
#
# David Mapstone (d.a.mapstone@soton.ac.uk)
#
# Copyright 2022, SoC Labs (www.soclabs.org)
#-----------------------------------------------------------------------------
dam1n19
committed
# Check Environment Variables set
if not "SHA_2_ACC_DIR" in os.environ:
print("Sourceme file at root of repository has not been sourced. Please source this file and try again.")
quit()
dam1n19
committed
# Read in Descriptor File
# - contains number of packets of data to generate and random seed
stim_file = os.environ["SHA_2_ACC_DIR"] + "/simulate/stimulus/model/" + "model_builder_stim.csv"
with open(stim_file, "r") as stim:
csvreader = csv.reader(stim, delimiter=",")
stim_list = list(csvreader)
seed = int(stim_list[0][0])
packets = int(stim_list[0][1])
dam1n19
committed
random.seed(seed)
print(f"Generating {packets} packets using seed: {seed}")
cfg_words_list = []
in_data_words_list = []
in_data_words_last_list = []
out_data_words_list = []
out_data_words_last_list = []
dam1n19
committed
for i in range(packets):
# Generate expected output in 512 bit chunks
cfg_size = math.ceil(random.randint(0,pow(2,14))/8)*8
dam1n19
committed
cfg_size_bin = "{0:b}".format(cfg_size)
# Pad Size to 64 bits
cfg_size_str = "0"*(64-len(cfg_size_bin)) + str(cfg_size_bin)
dam1n19
committed
# Generate Random Data using Size
data = "{0:b}".format(random.getrandbits(cfg_size))
data = "0"*(cfg_size - len(data)) + data # Pad Data to length of config (Python like to concatenate values)
dam1n19
committed
chunked_data_words = chunkstring(str(data),512)
in_data_words = chunked_data_words.copy()
in_data_words[-1] = in_data_words[-1] + "0"*(512-len(in_data_words[-1]))
in_data_words_last = []
out_data_words = chunked_data_words.copy()
out_data_words_last = []
last_len = len(chunked_data_words[-1])
# print(f"{chunked_data_words[-1]} {last_len}")
dam1n19
committed
if (last_len == 512):
out_data_words.append("1" + "0"*447 + cfg_size_str)
else:
out_data_words[-1] = out_data_words[-1] + "1"
if last_len > 447: # Size can't fit in last word
# Pad last word to 512 bits
out_data_words[-1] = out_data_words[-1] + "0"*(512 - len(out_data_words[-1]))
# Create New word with Size at the end
out_data_words.append("0"*448 + cfg_size_str)
else:
out_data_words[-1] = out_data_words[-1] + "0"*(512 - 64- len(out_data_words[-1])) + cfg_size_str
for i in range(len(in_data_words) - 1):
in_data_words_last.append("0")
in_data_words_last.append("1")
for i in range(len(out_data_words) - 1):
out_data_words_last.append("0")
out_data_words_last.append("1")
cfg_words_list.append(cfg_size_str)
in_data_words_list += in_data_words
in_data_words_last_list += in_data_words_last
out_data_words_list += out_data_words
out_data_words_last_list += out_data_words_last
hash_val = 0
if (platform.system() == "Darwin"):
cmd = f"echo -n {data} | shasum -a 256 -0"
hash_process = subprocess.Popen(cmd, stdout=subprocess.PIPE, shell=True)
hash_val = (str(hash_process.communicate()[0]).split()[0][2:])
else:
print("Platform not supported yet")
exit()
# print(hash_val)
hash_list.append(hash_val)
dam1n19
committed
# Write out Input Data Stimulus to Text File
input_header = ["input_data", "input_data_last"]
with open(os.environ["SHA_2_ACC_DIR"] + "/simulate/stimulus/testbench/" + "input_data_builder_stim.csv", "w", encoding="UTF8", newline='') as f:
writer = csv.writer(f)
for idx, word in enumerate(in_data_words_list):
writer.writerow(["{0:x}".format(int(word, 2)), in_data_words_last_list[idx]])
# Write out Cfg Stimulus to Text File
input_header = ["input_cfg_size", "input_cfg_scheme", "input_cfg_last"]
with open(os.environ["SHA_2_ACC_DIR"] + "/simulate/stimulus/testbench/" + "input_cfg_builder_stim.csv", "w", encoding="UTF8", newline='') as f:
writer = csv.writer(f)
for idx, word in enumerate(cfg_words_list):
writer.writerow(["{0:x}".format(int(word, 2)), "0", "1"])
# Write out Expected output to text file
output_header = ["output_data", "output_data_last"]
with open(os.environ["SHA_2_ACC_DIR"] + "/simulate/stimulus/testbench/" + "output_data_builder_stim.csv", "w", encoding="UTF8", newline='') as f:
writer = csv.writer(f)
for idx, word in enumerate(out_data_words_list):
writer.writerow(["{0:x}".format(int(word, 2)), out_data_words_last_list[idx]])
# Write out hash value to text file
output_header = ["output_data", "output_data_last"]
with open(os.environ["SHA_2_ACC_DIR"] + "/simulate/stimulus/testbench/" + "output_data_hash_stim.csv", "w", encoding="UTF8", newline='') as f:
writer = csv.writer(f)
for idx, word in enumerate(hash_list):
writer.writerow([word, "1"])
dam1n19
committed
def chunkstring(string, length):
array_len = math.ceil(len(string)/length)
array = []
for i in range(array_len):
array.append(string[i*length:i*length + length])
return array
if __name__ == "__main__":