diff --git a/hdl/src/message_build.sv b/hdl/src/message_build.sv index 631e167002d312b8e894f88d77520282ccd110cf..6df6b48b712ba5fa6eef4833912a70e3a21b997f 100644 --- a/hdl/src/message_build.sv +++ b/hdl/src/message_build.sv @@ -15,6 +15,7 @@ module message_build ( // Config data and Handshaking input logic [63:0] cfg_size, input logic [1:0] cfg_scheme, + input logic cfg_last, input logic cfg_valid, output logic cfg_ready, diff --git a/hdl/verif/tb_engine.sv b/hdl/verif/tb_engine.sv index 78b51759ffd996e342f5b80cc6437fc9527b385e..92425b7c7e5255452fc17f298f70c62547a49dc7 100644 --- a/hdl/verif/tb_engine.sv +++ b/hdl/verif/tb_engine.sv @@ -24,6 +24,7 @@ module tb_engine; // Config data and Handshaking logic [63:0] cfg_size; logic [1:0] cfg_scheme; + logic cfg_last; logic cfg_valid; logic cfg_ready; @@ -41,6 +42,7 @@ module tb_engine; .data_in_last(data_in_last), .cfg_size(cfg_size), .cfg_scheme(cfg_scheme), + .cfg_last(cfg_last), .cfg_valid(cfg_valid), .cfg_ready(cfg_ready), .data_out(data_out), @@ -48,10 +50,17 @@ module tb_engine; .data_out_ready(data_out_ready)); logic data_in_drive_en; + logic cfg_drive_en; + logic [511:0] data_in_queue [$]; - logic data_in_last_queue [$]; + logic data_in_last_queue [$]; logic data_in_wait_queue; + logic [63:0] cfg_size_queue [$]; + logic [1:0] cfg_scheme_queue [$]; + logic cfg_last_queue [$]; + logic cfg_wait_queue; + // Handle Valid and Data for data_in always_ff @(posedge clk, negedge nrst) begin: data_in_valid_drive if (!nrst) begin @@ -77,14 +86,48 @@ module tb_engine; end end - int fd; // File descriptor handle - logic [511:0] input_data; - logic input_data_last; + // Handle Valid and Data for cfg + always_ff @(posedge clk, negedge nrst) begin: cfg_valid_drive + if (!nrst) begin + cfg_size <= 64'd0; + cfg_scheme <= 2'd0; + cfg_valid <= 1'b0; + cfg_last <= 1'b0; + cfg_wait_queue <= 1'b1; + end else if (cfg_drive_en) begin + if (((cfg_valid == 1'b1) && (cfg_ready == 1'b1)) || + (cfg_wait_queue == 1'b1)) begin + // cfg transfer just completed or transfers already up to date + if ((cfg_size_queue.size() > 0) && (cfg_scheme_queue.size() > 0 ) && (cfg_last_queue.size() > 0)) begin + cfg_size <= cfg_size_queue.pop_front(); + cfg_scheme <= cfg_scheme_queue.pop_front(); + cfg_last <= cfg_last_queue.pop_front(); + cfg_valid <= 1'b1; + cfg_wait_queue <= 1'b0; + end else begin + // No data currently avaiable in queue to write but transfers up to date + cfg_wait_queue <= 1'b1; + cfg_valid <= 1'b0; + end + end + end + end + + // File Reading Variables + int fd; // File descriptor Handle + + logic [511:0] input_data; // Temporary Input Data Storage + logic input_data_last; // Temporary Input Data Last + + logic [63:0] input_cfg_size; // Temporary cfg size + logic [1:0] input_cfg_scheme; // Temporary cfg scheme + logic input_cfg_last; // Temporary cfg last; initial begin $dumpfile("engine_sim.vcd"); $dumpvars(0, tb_engine); data_in_drive_en = 0; + cfg_drive_en = 0; // Read input data into Queue fd = $fopen("../stimulus/input_data_builder_stim.csv", "r"); @@ -94,6 +137,15 @@ module tb_engine; end $fclose(fd); + // Read input cfg into Queue + fd = $fopen("../stimulus/input_cfg_builder_stim.csv", "r"); + while ($fscanf (fd, "%x,%x,%b", input_cfg_size, input_cfg_scheme, input_cfg_last) == 3) begin + cfg_size_queue.push_back(input_cfg_size); + cfg_scheme_queue.push_back(input_cfg_scheme); + cfg_last_queue.push_back(input_cfg_last); + end + $fclose(fd); + cfg_size = 0; cfg_scheme = 0; cfg_valid = 0; @@ -107,9 +159,7 @@ module tb_engine; // Write some data into the config register # 30 - cfg_size = 448; - cfg_scheme = 2; - cfg_valid = 1; + cfg_drive_en = 1; #1200 $display("Test Complete"); diff --git a/model/py/builder.py b/model/py/builder.py index ffe3dec1f9085a8b21883f556ac05a2d3491a681..0edc6520dec03d75669c74b8fa690053b3621b89 100644 --- a/model/py/builder.py +++ b/model/py/builder.py @@ -24,12 +24,19 @@ def main(argv): random.seed(seed) print(f"Generating {packets} packets using seed: {seed}") + cfg_words_list = [] + in_data_words_list = [] + in_data_words_last_list = [] + out_data_words_list = [] + out_data_words_last_list = [] + for i in range(packets): # Generate expected output in 512 bit chunks cfg_size = random.randint(0,pow(2,14)) cfg_size_bin = "{0:b}".format(cfg_size) # Pad Size to 64 bits cfg_size_str = "0"*(64-len(cfg_size_bin)) + str(cfg_size_bin) + # Generate Random Data using Size data = "{0:b}".format(random.getrandbits(cfg_size)) @@ -59,27 +66,33 @@ def main(argv): for i in range(len(out_data_words) - 1): out_data_words_last.append("0") out_data_words_last.append("1") + + cfg_words_list.append(cfg_size_str) + in_data_words_list += in_data_words + in_data_words_last_list += in_data_words_last + out_data_words_list += out_data_words + out_data_words_last_list += out_data_words_last - # Ouptut Input Data Stimulus to Text File - input_header = ["input_data", "input_data_last"] - with open(os.environ["SHA_2_ACC_DIR"] + "/simulate/stimulus/" + "input_data_builder_stim.csv", "w", encoding="UTF8", newline='') as f: - writer = csv.writer(f) - for idx, word in enumerate(in_data_words): - writer.writerow(["{0:x}".format(int(word, 2)), in_data_words_last[idx]]) - - # Ouptut Input Data Stimulus to Text File - input_header = ["input_cfg_size", "input_cfg_scheme", "input_cfg_last"] - with open(os.environ["SHA_2_ACC_DIR"] + "/simulate/stimulus/" + "input_cfg_builder_stim.csv", "w", encoding="UTF8", newline='') as f: - writer = csv.writer(f) - for idx, word in enumerate(in_data_words): - writer.writerow(["{0:x}".format(int(cfg_size_str, 2)), "00", "1"]) - - # Output Expected output to text file - output_header = ["output_data", "output_data_last"] - with open(os.environ["SHA_2_ACC_DIR"] + "/simulate/stimulus/" + "output_data_builder_stim.csv", "w", encoding="UTF8", newline='') as f: - writer = csv.writer(f) - for idx, word in enumerate(out_data_words): - writer.writerow(["{0:x}".format(int(word, 2)), out_data_words_last[idx]]) + # Write out Input Data Stimulus to Text File + input_header = ["input_data", "input_data_last"] + with open(os.environ["SHA_2_ACC_DIR"] + "/simulate/stimulus/" + "input_data_builder_stim.csv", "w", encoding="UTF8", newline='') as f: + writer = csv.writer(f) + for idx, word in enumerate(in_data_words_list): + writer.writerow(["{0:x}".format(int(word, 2)), in_data_words_last_list[idx]]) + + # Write out Cfg Stimulus to Text File + input_header = ["input_cfg_size", "input_cfg_scheme", "input_cfg_last"] + with open(os.environ["SHA_2_ACC_DIR"] + "/simulate/stimulus/" + "input_cfg_builder_stim.csv", "w", encoding="UTF8", newline='') as f: + writer = csv.writer(f) + for idx, word in enumerate(cfg_words_list): + writer.writerow(["{0:x}".format(int(word, 2)), "0", "1"]) + + # Write out Expected output to text file + output_header = ["output_data", "output_data_last"] + with open(os.environ["SHA_2_ACC_DIR"] + "/simulate/stimulus/" + "output_data_builder_stim.csv", "w", encoding="UTF8", newline='') as f: + writer = csv.writer(f) + for idx, word in enumerate(out_data_words_list): + writer.writerow(["{0:x}".format(int(word, 2)), out_data_words_last_list[idx]]) def chunkstring(string, length): array_len = math.ceil(len(string)/length) diff --git a/simulate/stimulus/input_cfg_builder_stim.csv b/simulate/stimulus/input_cfg_builder_stim.csv index e7996044484fd6007917073bb65af9d56833376d..a99cf6fe3dbe1c47f50051ac05174461892373b0 100644 --- a/simulate/stimulus/input_cfg_builder_stim.csv +++ b/simulate/stimulus/input_cfg_builder_stim.csv @@ -1,9 +1 @@ -1132,00,1 -1132,00,1 -1132,00,1 -1132,00,1 -1132,00,1 -1132,00,1 -1132,00,1 -1132,00,1 -1132,00,1 +1132,0,1