Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found
Select Git revision

Target

Select target project
  • soclabs/aes-128-project
  • soclabs/accelerator-project
2 results
Select Git revision
Show changes
Commits on Source (286)
Showing
with 492 additions and 245 deletions
# Simulation Related File Removal
*.vcd
*.vvp
simulate/sim/*
lint/*
.socinit
system/bootrom
\ No newline at end of file
.dma350_configured
accelerator/html/*
wrapper/html/*
system/src/bootrom/*
system/src/defines/*
system/html/*
flist/tmp
memories/*
# Compile Test Code Removal
/system/testcodes/*/*.elf
/system/testcodes/*/*.ELF
/system/testcodes/*/*.lst
/system/testcodes/*/*.o
/system/testcodes/*/*.bin
# Remove Implementation Files
imp/fpga/*
imp/ASIC/*
memories/*
html
\ No newline at end of file
# This file is a template, and might need editing before it works on your project.
# This is a sample GitLab CI/CD configuration file that should run without any modifications.
# It demonstrates a basic 3 stage CI/CD pipeline. Instead of real tests or scripts,
# it uses echo commands to simulate the pipeline execution.
#
# A pipeline is composed of independent jobs that run scripts, grouped into stages.
# Stages run in sequential order, but jobs within stages run in parallel.
#
# For more information, see: https://docs.gitlab.com/ee/ci/yaml/index.html#stages
#
# You can copy and paste this template into a new `.gitlab-ci.yml` file.
# You should not add this template to an existing `.gitlab-ci.yml` file by using the `include:` keyword.
#
# To contribute improvements to CI/CD templates, please follow the Development guide at:
# https://docs.gitlab.com/ee/development/cicd/templates.html
# This specific template is located at:
# https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/ci/templates/Getting-Started.gitlab-ci.yml
stages: # List of stages for jobs, and their order of execution
- compile
- simulate
- simulate_qs
- build
- deploy
variables:
GIT_SUBMODULE_STRATEGY: recursive
GIT_SUBMODULE_FORCE_HTTPS: "true"
compile-software:
stage: compile
script:
- echo "Compilation phase started"
- git submodule foreach --recursive git submodule init
- git fetch --recurse-submodules
- source set_env.sh
- socpull
- cd $SOCLABS_NANOSOC_TECH_DIR
- make compile_all_code
artifacts:
paths:
- ./nanosoc_tech/testcodes/*/*.hex
tags:
- ds5
simulate-hello:
stage: simulate
script:
- echo "Simulation phase started"
- source set_env.sh
- socsim test_nanosoc TESTNAME=hello
tags:
- VLAB-ZCU
simulate_regression:
stage: simulate
script:
- echo "Simulation phase started"
- source set_env.sh
- socpull
- socsim clean all
- socsim regression_nanosoc
tags:
- VLAB-ZCU
simulate_quickstart-hello:
stage: simulate_qs
script:
- echo "Simulation phase started"
- source set_env.sh
- socpull
- socsim clean all
- socsim test_nanosoc TESTNAME=hello QUICKSTART=yes
tags:
- VLAB-ZCU
simulate_quickstart-regression:
stage: simulate_qs
script:
- echo "Simulation phase started"
- source set_env.sh
- socsim clean all
- socsim regression_nanosoc QUICKSTART=yes
tags:
- VLAB-ZCU
build-job-Z2: # This job runs in the build stage, which runs first.
stage: build
script:
# move to fpga_imp directory and run the fpga build script for pynq z2
- git submodule foreach --recursive git submodule init
- git fetch --recurse-submodules
- source set_env.sh
- socpull
- cd $SOCLABS_NANOSOC_TECH_DIR
- make build_fpga FPGA=z2 ACCELERATOR=no
- FILE=$SOCLABS_DESIGN_ROOT/imp/fpga/output/pynq_z2/overlays/nanosoc_design.bit
- if test -f "$FILE"; then
- echo "Build successful"
- else
- echo "Build failed"
- exit 1
- fi
artifacts:
paths:
# Keep the generated bit and hwh file from fpga build script
- ./imp/fpga/output/pynq_z2/overlays/nanosoc_design.bit
- ./imp/fpga/output/pynq_z2/overlays/nanosoc_design.hwh
tags:
- Vivado2021.1
build-job-ZCU104: # This job runs in the build stage, which runs first.
stage: build
script:
# move to fpga_imp directory and run the fpga build script for pynq z2
- git submodule foreach --recursive git submodule init
- git fetch --recurse-submodules
- source set_env.sh
- socpull
- cd $SOCLABS_NANOSOC_TECH_DIR
- make build_fpga FPGA=zcu104 ACCELERATOR=no
- FILE=$SOCLABS_DESIGN_ROOT/imp/fpga/output/pynq_zcu104/overlays/nanosoc_design.bit
- if test -f "$FILE"; then
- echo "Build successful"
- else
- echo "Build failed"
- exit 1
- fi
artifacts:
paths:
# Keep the generated bit and hwh file from fpga build script
- ./imp/fpga/output/pynq_zcu104/overlays/nanosoc_design.bit
- ./imp/fpga/output/pynq_zcu104/overlays/nanosoc_design.hwh
tags:
- Vivado2021.1
#deploy-job-Z2: # This job runs in the deploy stage.
# stage: deploy # It only runs when *both* jobs in the test stage complete successfully.
# environment: production
# script:
# - echo "Deploying application to Z2"
# # use smbclient to transfer accross the bit, hwh and python script files to the z2 xilinx board
# # could probably set this up as scp with RSA keys in future
# - smbclient //192.168.2.99/xilinx -m SMB3 -U xilinx%xilinx -c 'cd ./pynq/overlays/soclabs/ ; put ./nanosoc/fpga_imp/pynq_export/pz2/pynq/overlays/#soclabs/design_1.bit ./design_1.bit' -E 2>errorlog
# - if (grep -r "Connection to 192.168.2.99 failed" ./errorlog)
# - then
# - echo "Connection to Z2 Board Failed"
# - exit 1
# - else
# - echo "Connection to Z2 Board successful"
# - fi
# - rm errorlog
# - smbclient //192.168.2.99/xilinx -m SMB3 -U xilinx%xilinx -c 'cd ./pynq/overlays/soclabs/ ; put ./nanosoc/fpga_imp/pynq_export/pz2/pynq/overlays/#soclabs/design_1.hwh ./design_1.hwh' -E 2>errorlog
# - if (grep -r "Connection to 192.168.2.99 failed" ./errorlog)
# - then
# - echo "Connection to Z2 Board Failed"
# - exit 1
# - else
# - echo "Connection to Z2 Board successful"
# - fi
# - rm errorlog
# - cd ./nanosoc/fpga_imp/CI_verification
# - smbclient //192.168.2.99/xilinx -m SMB3 -U xilinx%xilinx -c 'put ./load_bitfile.py ./load_bitfile.py' -E 2>errorlog
# - if (grep -r "Connection to 192.168.2.99 failed" ./errorlog)
# - then
# - echo "Connection to Z2 Board Failed"
# - exit 1
# - else
# - echo "Connection to Z2 Board successful"
# - fi
# - rm errorlog
# # get root access on host machine, this was found to be needed because other screen would not work
# # however a more elegant solution would be better
# - echo gitrunner1 | sudo -S su
# # open detatched terminal with connection to the Xilinx Z2 board
# - sudo screen -S zynq -dm /dev/ttyUSB1 115200
# # get root access on xilinx board, this is needed because the python script won't run without
# # being root.
# - sudo screen -r zynq -X stuff "sudo -S su \n"
# # setup pynq environment
# - sudo screen -r zynq -X stuff "source /etc/profile.d/pynq_venv.sh \n"
# - sudo screen -r zynq -X stuff "source /etc/profile.d/xrt_setup.sh \n"
# - sudo screen -r zynq -X stuff "source /etc/profile.d/boardname.sh \n"
# # run load_bitfile: this loads the overlay and checks that it has been loaded
# # script will output "Overlay Loaded" if successful
# - sudo screen -r zynq -X stuff "python3 load_bitfile.py > tmp \n"
# # sleep 1 minute: this is needed as currently the terminal running the CI/CD script will not wait
# # for the python script to finish. A more elegant solution should be implemented in future
# - sleep 60
# # copy over the tmp file back to host machine and check if "Overlay Loaded" has been outputed f
# - smbclient //192.168.2.99/xilinx -m SMB3 -U xilinx%xilinx -c 'get tmp'
# - if (grep -r "Overlay Loaded" ./tmp)
# - then
# - echo "Bit file loaded successfully"
# - else
# - echo "Bit file load failed"
# - exit 1
# - fi
# after_script:
# # cleanup: remove detached terminal screen
# - echo gitrunner1 | sudo -S su
# - sudo screen -X -S zynq quit
# tags:
# - Z2
deploy-job-ZCU104: # This job runs in the deploy stage.
stage: deploy # It only runs when *both* jobs in the test stage complete successfully.
environment: production
script:
- echo "Deploying application to ZCU104"
- source set_env.sh
- socpull
# list all tests and write list to fpga_tests file
- find nanosoc_tech/testcodes/*/*.hex > fpga_tests
# start a detached terminal so that the xilinx environment can be opened without interferring w
- screen -dmS zynq -L -Logfile screenlog
- sleep 5
# copy over vlab.py and vkey and then connect to ZCU104 board
# FUTURE Work: need to add error handling for if the board does not connect,
# could grep from screenlog to see if successfully connected
- screen -r zynq -X stuff "cp -r /home/dwn1c21/FPGA/. ./ \n"
- screen -r zynq -X stuff "./ZCU104_connect.sh\n"
- sleep 10
# use scp to copy over bit files and python script
- screen -r zynq -X stuff "scp -rp -i ~/.ssh/id_rsa dwn1c21@soclabs.soton.ac.uk:~/builds/wzndG1mA/0/soclabs/accelerator-project/nanosoc_tech/fpga/ci_tools/* ./ \n"
- sleep 2
- screen -r zynq -X stuff "scp -i ~/.ssh/id_rsa dwn1c21@soclabs.soton.ac.uk:~/builds/wzndG1mA/0/soclabs/accelerator-project/imp/fpga/output/pynq_zcu104/overlays/nanosoc_design.* ./pynq/overlays/soclabs/ \n"
- sleep 2
- screen -r zynq -X stuff "scp -i ~/.ssh/id_rsa dwn1c21@soclabs.soton.ac.uk:~/builds/wzndG1mA/0/soclabs/accelerator-project/fpga_tests ./ \n"
- sleep 2
- screen -r zynq -X stuff "mkdir binaries\n"
- screen -r zynq -X stuff "scp -i ~/.ssh/id_rsa dwn1c21@soclabs.soton.ac.uk:~/builds/wzndG1mA/0/soclabs/accelerator-project/nanosoc_tech/testcodes/*/*.hex ./binaries/ \n"
- sleep 10
# Need root access to load the overlay onto the FPGA
- screen -r zynq -X stuff "sudo su\n"
- sleep 1
- screen -r zynq -X stuff "xilinx\n"
- screen -r zynq -X stuff "source /etc/profile.d/pynq_venv.sh \n"
- screen -r zynq -X stuff "source /etc/profile.d/xrt_setup.sh \n"
- screen -r zynq -X stuff "source /etc/profile.d/boardname.sh \n"
- sleep 5
# run run_full_verification: this loads the overlay and checks that it has been loaded
# script will output "Overlay Loaded" if successful
- screen -r zynq -X stuff "python run_full_verification.py \n"
- while ! grep -q "ALL TESTS FINISHED" screenlog; do sleep 10; done
- screen -r zynq -X stuff "scp -i ~/.ssh/id_rsa verification_log dwn1c21@soclabs.soton.ac.uk:~/builds/wzndG1mA/0/soclabs/accelerator-project/ \n"
# deactivate the pynq virtual environment and exit root access
- screen -r zynq -X stuff "deactivate \n"
- screen -r zynq -X stuff "exit \n"
- sleep 20
# Display test results
- cat verification_log
after_script:
# cleanup xilinx directories and quit screen
- screen -r zynq -X stuff "rm load_bitfile.py \n"
- screen -X -S zynq quit
tags:
- ZCU104
[submodule "accelerator_wrapper_tech"]
path = accelerator_wrapper_tech
url = https://git.soton.ac.uk/soclabs/accelerator_wrapper_tech.git
url = https://git.soton.ac.uk/soclabs/accelerator_wrapper_tech.git
branch = main
[submodule "nanosoc_tech"]
path = nanosoc_tech
url = https://git.soton.ac.uk/soclabs/nanosoc_tech.git
[submodule "socsim_flow"]
path = socsim_flow
url = https://git.soton.ac.uk/soclabs/socsim_flow.git
[submodule "chipkit_flow"]
path = chipkit_flow
url = https://git.soton.ac.uk/soclabs/chipkit_flow.git
url = https://git.soton.ac.uk/soclabs/nanosoc_tech.git
branch = main
[submodule "soctools_flow"]
path = soctools_flow
url = https://git.soton.ac.uk/soclabs/soctools_flow.git
branch = main
[submodule "generic_lib_tech"]
path = generic_lib_tech
url = https://git.soton.ac.uk/soclabs/generic_lib_tech.git
branch = main
[submodule "fpga_lib_tech"]
path = fpga_lib_tech
url = https://git.soton.ac.uk/soclabs/fpga_lib_tech.git
branch = main
[submodule "rtl_primitives_tech"]
path = rtl_primitives_tech
url = https://git.soton.ac.uk/soclabs/rtl_primitives_tech.git
branch = main
[submodule "asic_lib_tech"]
path = asic_lib_tech
url = https://git.soton.ac.uk/soclabs/asic_library_tech.git
branch = main
#-----------------------------------------------------------------------------
# SoC Labs icarus verilog simulation script for engine testbench
# SoC Labs Project Root Marker
# - This file tells environment setter that this is root of a SoC Labs Project
# A joint work commissioned on behalf of SoC Labs, under Arm Academic Access license.
#
# Contributors
#
# David Mapstone (d.a.mapstone@soton.ac.uk)
#
# Copyright 2022, SoC Labs (www.soclabs.org)
#-----------------------------------------------------------------------------
#!/usr/bin/env bash
mkdir -p $PROJECT_DIR/simulate/sim/
iverilog -g2012 -o $PROJECT_DIR/simulate/sim/$1.vvp $WRAPPER_TECH_DIR/hdl/verif/tb_$1.sv
cd $PROJECT_DIR/simulate/sim/ && vvp $1.vvp $2
\ No newline at end of file
# Copyright 2023, SoC Labs (www.soclabs.org)
#-----------------------------------------------------------------------------
\ No newline at end of file
# Accelerator System Top-Level
# Accelerator System Project
This repo is the top-level repository which contains accelerator and SoC Labs provided design IP in forms of git subrepositories.
This repo is the top-level repository which contains all the relavant IP for integrating your custom hardware accelerator with the SoC Labs provided nanosoc chip design IP in forms of git subrepositories.
The SoC wiring is handled in this repository too, along with design and verification for accelerator wrappers.
### Fork this repository
## Creating own top-level
In order to use this repository for your own project, we recommend that you fork a version of this repository first. In your forked version of the project you can add you accelerator as a subrepo if you are already using repositories, or alternatively add the source for you accelerator directly to your forked repository.
The first stage of putting your accelerator into a SoC is to build the accelerator in your own repository.
### Cloning this repository
---
Once you have a custom design repository, you are able to fork the template System Top-level repository and make some changes.
This Repository contains multiple sub-repositories. In order to clone them with this repository, please use the following command:
After forking, you need to add your own repository as a submodule. The first thing to do is to clone your new forked top-level.
`git clone --recurse https://git.soton.ac.uk/soclabs/accelerator-project.git`
## Cloning
At this stage you can also add your submodule with:
`git submodule add`
To clone this repository and its subrepository, use the following command:
After doing this you should update the projbranch file to include your repository name (as it appears in .gitmodules) and the branch. This will allow the set_env.sh script to pull in your repository when updates are made
`git clone --recusrive $REPO_NAME`
At this point you may also like to edit the /env/dependency_env.sh to include your accelerator directory for example:
Once the repository and the subrepository has been cloned, the next stage is to initalise the environment variables and check out the sub repositories to a branch.
`export ACCELERATOR_DIR="$SOCLABS_PROJECT_DIR/accelerator"`
First navigate to the top of this cloned repository and run:
## Setting up the Project Environment
---
In order to checkout all the sub-repositories in your project to their branches and set up your local environment variables, from the top-level of this project run:
`source set_env.sh`
### Setting up the Project Environment
---
Every time you wish to run commands in this project, you will need to make sure the set environment script has been run for your current terminal session. This is done by moving to the top-level of the project and running:
`source set_env.sh`
This sets the environment variables related to this project and creates visability to the scripts in the flow directory. Because of this, you scan now run:
This sets the environment variables related to this project and creates visability to the scripts in the flow directory.
### Updating Subrepositories
---
Once you have run a `source set_env.sh` in your current terminal, you are then able to update all your repositories to their latest version by running:
`socpull`
This runs a git pull on all repositories in your project.
## Project Structure
---
The core of the SoC is NanoSoC. This is an example, configurable system that is the main framework. It has many different memory-mapped regions, one of which is designed for the connection of accelerator subsystems called the expansion region.
The expansion region is able to instantiate an accelerator_subsystem by default. This means that anyone using NanoSoC as a platform for accelerator experimentation will need to build an `accelerator_subsystem` rtl module.
There is an example file in `/system/src/accelerator_subsystem.v`. You will need to add an instantiation of your top level to this file. The connections at this level are an AHB subordinate, DMA data requist signals, and CPU interupt signals.
### Using the makefiles and FLISTS
Simulation, FPGA implementation, and ASIC synthesis can all be performed from the main makefile in the nanosoc_tech repository. In order for these to work correctly you must make sure the flist files include your accelerator source code.
You can add these to `/flist/project/accelerator`. It is recommended here to use environment variables, the top level of your project will be `$SOCLABS_PROJECT_DIR`. You can include files directly, or include other flists with the `-f` command at the start of the line.
### htmlgen design visualisation
---
A tool is provided to generate an html documentation tree to traverse and explore the design hierarchy:
`htmlgen -f $SOCLABS_PROJECT_DIR/flist/project/top.flist`
or
`htmlgen`
populates the html/top/build/ directiory. Open `nanosoc_tb.html` to explore from the testbench down in to the design.
### Accelerator Subsystem
---
`accelerator_subsystem` can either directly contain an accelerator (or multiple) or can instantiate accelerator wrappers which in turn instantiate accelerators.
This module is expected to be found in `system/src/accelerator_subsystem.v`.
### Accelerator Wrapper
---
Accelerator wrappers are located in `wrapper/src`. These should instantiate accelerators and can use wrapper components within the `accelerator_wrapper_tech` repository to allow a conversion of valid//ready interfaces to a memory-mapped AHB interface.
## Running the simulation
---
This design instantiates a custom (AMBA-AHB) wrapper around the AES core to implement a memory-mapped 128-bit AES encrypt/decrypt accelerator that can be used as a software-driven peripheral or a semi-autonomous DMA subystem when 128-bit keys and variable length data payloads can be set up as scatter/gather descriptor chains for background processing.
To run the simulation the 'socsim' command executes the makefile in the 'nanosoc_tech' microcontroller framework. (Edit the simulator target in nanosoc_tech/system/makefile for the simulator EDA tool used). Then use the:
`socsim test_nanosoc TESTNAME=hello`
This runs the integration test program on the Arm Cortex-M0 processor using the 'test_nanosoc.sh' script provided in the simulate/socsim directory and the logs are produced in the simulate/sim/test_nanosoc/logs directory.
This will only run simulations on nanosoc, without the accelerator instantiated, you can also run simulations with your accelerator using
`socsim test_accelerator TESTNAME=xx`
Replacing the xx with the chosen test you want to run
## Adding testcodes
To add your own testcodes to be run from the Arm Cortex-M0 processor, it is recommnded these are added in the `/system/testcodes/$TESTNAME` directory.
To enable the makefiles to find your testcode, you should also add the name of your test to the `/system/testcodes/software_list.txt` file.
It is recommended that you copy and edit one of the makefiles for compiling your software into .hex files. (for example in `/system/testcodes/adp_v4_cmd_tests/makefile`) You will have to edit the TESTNAME (line 46) and may also need to change some of the compiler options depending on your code.
You can then either simulate using the `socim test_accelerator TESTNAME=x` or alternatively from the `/nanosoc_tech` directory you can run `make run_%SIM% TESTNAME=x ACCELERATOR=yes`
`soc-init`
The currently supported simulators are VCS, Excelium and Questa Sim. Currently we mostly use Quest Sim (run_mti)
This checks out all the repositories to the `main` branch. You are then able to check out the sub repos to the desired branches.
You can also run `sim_%SIMULATOR%` and this will run the simulation from the GUI.
## Adding Submodule
When using the makefile, you must include the `ACCELERATOR=yes` directive to include your accelerator
After setting up your workarea, you now need to add your accelerator design repository as a subrepo.
## FPGA Builds
From `$DESIGN_ROOT`, you are able to run:
We currently have build files for the ARM MPS3, PYNQ ZCU104, PYNQ Z2, KRIA KR260 and KRIA KV260. To build the bitfiles you can run the `make build_fpga FPGA=%target% ACCELERATOR=yes` from the `/nanosoc_tech/` directory.
The acceptable targets are defined in the `/nanosoc_tech/fpga/makefile.targets` and are `mps3, zcu104, z2, kr260, kv260`
`git submodule status`
This script will run the vivado build scripts. The output from this will be in the `/imp/FPGA` directory.
This lists the sub repositories and their branches. Make sure these are all you are expecting other than your design repo and you can then use the
## ASIC Synthesis
`git submodule add -b $BRANCH $REPOSITORY_URL`
To run the ASIC synthesis you will first need to define a `$PHYS_IP` environment variable. This should point to the uncompressed Arm bundles for the TSMC 65nm LP node.
to add the repo into this work area.
For a Cadence synthesis flow use:
1. Run `make gen_memories` this will generate the bootrom, and SRAMS using the artisan memory compilers
2. Run `make flist_genus_nanosoc ACCELERATOR=yes ASIC=yes` which will generate the top flist for the genus synthesiser tool
3. Run `make syn_genus ACCELERATOR=yes ASIC=yes` which will run the synthesis
You then need to push the .gitmodules file back to remote to save this configuration.
\ No newline at end of file
The output from the synthesis will be in the `/imp/ASIC` directory.
Subproject commit 33b0a1afab0d8d3279e9f6156a18d7b4954f9f5f
Subproject commit 0748aa7d6c1186bf8195557eef230a95d2a7c53c
Subproject commit 0977f3e957d303064bb5f23a11bc7f552ecf6a26
Subproject commit 672416929813708f48e5376c8de00a4f7a69aa9a
......@@ -15,29 +15,42 @@
#-----------------------------------------------------------------------------
# Accelerator Engine -- Add Your Accelerator Environment Variable HERE!
# export YOUR_ACCELERATOR_DIR="$PROJECT_DIR/your_accelerator"
export ACCELERATOR_DIR="$SOCLABS_PROJECT_DIR/secworks-aes"
# Accelerator Wrapper
export WRAPPER_TECH_DIR="$PROJECT_DIR/accelerator_wrapper_tech"
export SOCLABS_WRAPPER_TECH_DIR="$SOCLABS_PROJECT_DIR/accelerator_wrapper_tech"
# NanoSoC
export NANOSOC_TECH_DIR="$PROJECT_DIR/nanosoc_tech"
export SOCLABS_NANOSOC_TECH_DIR="$SOCLABS_PROJECT_DIR/nanosoc_tech"
# SoCDebug
export SOCLABS_SOCDEBUG_TECH_DIR="$SOCLABS_PROJECT_DIR/nanosoc_tech/nanosoc/socdebug_tech"
# SLCore-M0
export SOCLABS_SLCOREM0_TECH_DIR="$SOCLABS_PROJECT_DIR/nanosoc_tech/nanosoc/slcorem0_tech"
# SLDMA-230
export SOCLABS_SLDMA230_TECH_DIR="$SOCLABS_PROJECT_DIR/nanosoc_tech/nanosoc/sldma230_tech"
export SOCLABS_SLDMA350_TECH_DIR="$SOCLABS_PROJECT_DIR/nanosoc_tech/nanosoc/sldma350_tech"
# Primtives
export PRIMITIVES_TECH_DIR="$PROJECT_DIR/rtl_primitives_tech"
export SOCLABS_PRIMITIVES_TECH_DIR="$SOCLABS_PROJECT_DIR/rtl_primitives_tech"
# FPGA Libraries
export FPGA_LIB_TECH_DIR="$PROJECT_DIR/fpga_lib_tech"
export SOCLABS_FPGA_LIB_TECH_DIR="$SOCLABS_PROJECT_DIR/fpga_lib_tech"
# ASIC Libraries
export SOCLABS_ASIC_LIB_TECH_DIR="$SOCLABS_PROJECT_DIR/asic_lib_tech"
# Generic Libraries
export GENERIC_LIB_TECH_DIR="$PROJECT_DIR/generic_lib_tech"
export SOCLABS_GENERIC_LIB_TECH_DIR="$SOCLABS_PROJECT_DIR/generic_lib_tech"
#-----------------------------------------------------------------------------
# Flows
#-----------------------------------------------------------------------------
# CHIPKIT - Register Generation
export CHIPKIT_FLOW_DIR="$PROJECT_DIR/chipkit_flow"
# SoCTools - Toolkit of scripts related to SoCLabs projects
export SOCLABS_SOCTOOLS_FLOW_DIR="$SOCLABS_PROJECT_DIR/soctools_flow"
# SoCSim - Basic Simulation Flow Wrapper
export SOCSIM_FLOW_DIR="$PROJECT_DIR/socsim_flow"
\ No newline at end of file
# CHIPKIT - Register Generation
export SOCLABS_CHIPKIT_FLOW_DIR="$SOCLABS_SOCTOOLS_FLOW_DIR/tools/chipkit_flow"
......@@ -16,8 +16,8 @@
+libext+.v+.vlib
// ============= Accelerator Module search path =============
+incdir+$(ARM_IP_LIBRARY_PATH)/latest/Corstone-101/logical/cmsdk_ahb_slave_mux/verilog
+incdir+$(ARM_IP_LIBRARY_PATH)/latest/Corstone-101/logical/cmsdk_ahb_default_slave/verilog
// CMSDK AHB Slave Mux IP
//$(ARM_IP_LIBRARY_PATH)/latest/Corstone-101/logical/cmsdk_ahb_slave_mux/verilog/cmsdk_ahb_slave_mux.v
-y $(ARM_IP_LIBRARY_PATH)/latest/Corstone-101/logical/cmsdk_ahb_slave_mux/verilog
-y $(ARM_IP_LIBRARY_PATH)/latest/Corstone-101/logical/cmsdk_ahb_default_slave/verilog
\ No newline at end of file
// CMSDK AHB Default Slave IP
//$(ARM_IP_LIBRARY_PATH)/latest/Corstone-101/logical/cmsdk_ahb_default_slave/verilog/cmsdk_ahb_default_slave.v
\ No newline at end of file
//-----------------------------------------------------------------------------
// Accelerator Wrapper Filelist
// Accelerator Wrapper CMSDK Filelist
// A joint work commissioned on behalf of SoC Labs, under Arm Academic Access license.
//
// Contributors
......@@ -9,18 +9,15 @@
// Copyright � 2021-3, SoC Labs (www.soclabs.org)
//-----------------------------------------------------------------------------
//-----------------------------------------------------------------------------
// Abstract : Verilog Command File for Top-level Accelerator Wrapper
// Abstract : Verilog Command File for Accelerator CMSDK AHB IP
//-----------------------------------------------------------------------------
// ============= Verilog library extensions ===========
+libext+.v+.vlib
// ============= Wrapper IP Filelist ========================
-f $(PROJECT_DIR)/flist/wrapper/wrapper_ip.flist
// ============= Accelerator Module search path =============
-y $(PROJECT_DIR)/wrapper/src/
+incdir+$(PROJECT_DIR)/wrapper/src/
// CMSDK AHB Slave Mux IP
//$(ARM_IP_LIBRARY_PATH)/latest/Cortex-M0-QS/Corstone-101-logical/cmsdk_ahb_slave_mux/verilog/cmsdk_ahb_slave_mux.v
// Add the source files related to your custom wrapper
// $(PROJECT_DIR)/wrapper/src/your_wrapper.v
// CMSDK AHB Default Slave IP
//$(ARM_IP_LIBRARY_PATH)/latest/Cortex-M0-QS/Corstone-101-logical/cmsdk_ahb_default_slave/verilog/cmsdk_ahb_default_slave.v
\ No newline at end of file
......@@ -16,6 +16,7 @@
+libext+.v+.vlib
// ============= Accelerator Module search path =============
+incdir+$(ARM_IP_LIBRARY_PATH)/latest/Corstone-101/logical/cmsdk_ahb_fileread_masters/verilog
-y $(ARM_IP_LIBRARY_PATH)/latest/Corstone-101/logical/cmsdk_ahb_fileread_masters/verilog
//$(ARM_IP_LIBRARY_PATH)/latest/Corstone-101/logical/cmsdk_ahb_fileread_masters/verilog/cmsdk_ahb_filereadcore.v
//$(ARM_IP_LIBRARY_PATH)/latest/Corstone-101/logical/cmsdk_ahb_fileread_masters/verilog/cmsdk_ahb_fileread_funnel.v
//$(ARM_IP_LIBRARY_PATH)/latest/Corstone-101/logical/cmsdk_ahb_fileread_masters/verilog/cmsdk_ahb_fileread_master32.v
//$(ARM_IP_LIBRARY_PATH)/latest/Corstone-101/logical/cmsdk_ahb_fileread_masters/verilog/cmsdk_ahb_fileread_master64.v
//-----------------------------------------------------------------------------
// Generic Library Filelist
// Accelerator Wrapper CMSDK Filelist
// A joint work commissioned on behalf of SoC Labs, under Arm Academic Access license.
//
// Contributors
......@@ -9,17 +9,14 @@
// Copyright � 2021-3, SoC Labs (www.soclabs.org)
//-----------------------------------------------------------------------------
//-----------------------------------------------------------------------------
// Abstract : Verilog Command File for Generic Library
// Abstract : Verilog Command File for Accelerator CMSDK AHB VIP
//-----------------------------------------------------------------------------
// ============= Verilog library extensions ===========
+libext+.v+.vlib
// ============= Accelerator Module search path =============
$(GENERIC_LIB_TECH_DIR)/pads/verilog/PAD_INOUT8MA_NOE.v
$(GENERIC_LIB_TECH_DIR)/pads/verilog/PAD_VDDIO.v
$(GENERIC_LIB_TECH_DIR)/pads/verilog/PAD_VSSIO.v
$(GENERIC_LIB_TECH_DIR)/pads/verilog/PAD_VDDSOC.v
$(GENERIC_LIB_TECH_DIR)/pads/verilog/PAD_VSS.v
$(GENERIC_LIB_TECH_DIR)/mem/verilog/SROM_Ax32.v
$(GENERIC_LIB_TECH_DIR)/sync/verilog/SYNCHRONIZER_EDGES.v
\ No newline at end of file
//$(ARM_IP_LIBRARY_PATH)/latest/Cortex-M0-QS/Corstone-101-logical/cmsdk_ahb_fileread_masters/verilog/cmsdk_ahb_filereadcore.v
//$(ARM_IP_LIBRARY_PATH)/latest/Cortex-M0-QS/Corstone-101-logical/cmsdk_ahb_fileread_masters/verilog/cmsdk_ahb_fileread_funnel.v
//$(ARM_IP_LIBRARY_PATH)/latest/Cortex-M0-QS/Corstone-101-logical/cmsdk_ahb_fileread_masters/verilog/cmsdk_ahb_fileread_master32.v
//$(ARM_IP_LIBRARY_PATH)/latest/Cortex-M0-QS/Corstone-101-logical/cmsdk_ahb_fileread_masters/verilog/cmsdk_ahb_fileread_master64.v
//-----------------------------------------------------------------------------
// Accelerator Wrapper CMSDK Filelist
// CMSDK APB IP Filelist
// A joint work commissioned on behalf of SoC Labs, under Arm Academic Access license.
//
// Contributors
......@@ -15,13 +15,8 @@
// ============= Verilog library extensions ===========
+libext+.v+.vlib
// ============= Accelerator Module search path =============
-y $(ARM_IP_LIBRARY_PATH)/latest/Corstone-101/logical/cmsdk_ahb_to_apb/verilog
-y $(ARM_IP_LIBRARY_PATH)/latest/Corstone-101/logical/cmsdk_apb3_eg_slave/verilog
+incdir+$(ARM_IP_LIBRARY_PATH)/latest/Corstone-101/logical/cmsdk_ahb_to_apb/verilog
+incdir+$(ARM_IP_LIBRARY_PATH)/latest/Corstone-101/logical/cmsdk_apb3_eg_slave/verilog
$(ARM_IP_LIBRARY_PATH)/latest/Corstone-101/logical/cmsdk_ahb_to_apb/verilog/cmsdk_ahb_to_apb.v
$(ARM_IP_LIBRARY_PATH)/latest/Corstone-101/logical/cmsdk_apb3_eg_slave/verilog/cmsdk_apb3_eg_slave_interface.v
$(ARM_IP_LIBRARY_PATH)/latest/Corstone-101/logical/cmsdk_apb3_eg_slave/verilog/cmsdk_apb3_eg_slave_reg.v
$(ARM_IP_LIBRARY_PATH)/latest/Corstone-101/logical/cmsdk_apb3_eg_slave/verilog/cmsdk_apb3_eg_slave.v
// ============= CMSDK APB IP search path =============
//$(ARM_IP_LIBRARY_PATH)/latest/Corstone-101/logical/cmsdk_ahb_to_apb/verilog/cmsdk_ahb_to_apb.v
//$(ARM_IP_LIBRARY_PATH)/latest/Corstone-101/logical/cmsdk_apb3_eg_slave/verilog/cmsdk_apb3_eg_slave_interface.v
//$(ARM_IP_LIBRARY_PATH)/latest/Corstone-101/logical/cmsdk_apb3_eg_slave/verilog/cmsdk_apb3_eg_slave_reg.v
//$(ARM_IP_LIBRARY_PATH)/latest/Corstone-101/logical/cmsdk_apb3_eg_slave/verilog/cmsdk_apb3_eg_slave.v
//-----------------------------------------------------------------------------
// NanoSoC DMA-230 Filelist
// CMSDK APB IP Filelist
// A joint work commissioned on behalf of SoC Labs, under Arm Academic Access license.
//
// Contributors
......@@ -9,18 +9,14 @@
// Copyright � 2021-3, SoC Labs (www.soclabs.org)
//-----------------------------------------------------------------------------
//-----------------------------------------------------------------------------
// Abstract : Verilog Command File for Arm DMA-230
// Abstract : Verilog Command File for Accelerator CMSDK AHB IP
//-----------------------------------------------------------------------------
// ============= Verilog library extensions ===========
+libext+.v+.vlib
// ============= DMA-230 search path =============
+incdir+$(PROJECT_DIR)/system/defines/pl230
$(ARM_IP_LIBRARY_PATH)/latest/DMA-230/logical/pl230_ahb_ctrl.v
$(ARM_IP_LIBRARY_PATH)/latest/DMA-230/logical/pl230_apb_regs.v
$(ARM_IP_LIBRARY_PATH)/latest/DMA-230/logical/pl230_dma_data.v
$(ARM_IP_LIBRARY_PATH)/latest/DMA-230/logical/pl230_udma.v
$(ARM_IP_LIBRARY_PATH)/latest/DMA-230/logical/pl230_undefs.v
// ============= CMSDK APB IP search path =============
//$(ARM_IP_LIBRARY_PATH)/latest/Cortex-M0-QS/Corstone-101-logical/cmsdk_ahb_to_apb/verilog/cmsdk_ahb_to_apb.v
//$(ARM_IP_LIBRARY_PATH)/latest/Cortex-M0-QS/Corstone-101-logical/cmsdk_apb3_eg_slave/verilog/cmsdk_apb3_eg_slave_interface.v
//$(ARM_IP_LIBRARY_PATH)/latest/Cortex-M0-QS/Corstone-101-logical/cmsdk_apb3_eg_slave/verilog/cmsdk_apb3_eg_slave_reg.v
//$(ARM_IP_LIBRARY_PATH)/latest/Cortex-M0-QS/Corstone-101-logical/cmsdk_apb3_eg_slave/verilog/cmsdk_apb3_eg_slave.v
//-----------------------------------------------------------------------------
// NanoSoC Corstone-101 Filelist
// A joint work commissioned on behalf of SoC Labs, under Arm Academic Access license.
//
// Contributors
//
// David Mapstone (d.a.mapstone@soton.ac.uk)
//
// Copyright � 2021-3, SoC Labs (www.soclabs.org)
//-----------------------------------------------------------------------------
//-----------------------------------------------------------------------------
// Abstract : Verilog Command File for Arm Corstone-101
//-----------------------------------------------------------------------------
// ============= Verilog library extensions ===========
+libext+.v+.vlib
// ============= Corstone-101 search path =============
+incdir+$(ARM_IP_LIBRARY_PATH)/latest/Corstone-101/logical/cmsdk_apb_dualtimers/verilog
+incdir+$(ARM_IP_LIBRARY_PATH)/latest/Corstone-101/logical/cmsdk_apb_watchdog/verilog
+incdir+$(ARM_IP_LIBRARY_PATH)/latest/Corstone-101/logical/models/memories/
-y $(ARM_IP_LIBRARY_PATH)/latest/Corstone-101/logical/cmsdk_apb_timer/verilog
-y $(ARM_IP_LIBRARY_PATH)/latest/Corstone-101/logical/cmsdk_apb_dualtimers/verilog
-y $(ARM_IP_LIBRARY_PATH)/latest/Corstone-101/logical/cmsdk_apb_uart/verilog
-y $(ARM_IP_LIBRARY_PATH)/latest/Corstone-101/logical/cmsdk_apb_watchdog/verilog
-y $(ARM_IP_LIBRARY_PATH)/latest/Corstone-101/logical/cmsdk_apb_slave_mux/verilog
-y $(ARM_IP_LIBRARY_PATH)/latest/Corstone-101/logical/cmsdk_apb_subsystem/verilog
-y $(ARM_IP_LIBRARY_PATH)/latest/Corstone-101/logical/cmsdk_ahb_slave_mux/verilog
-y $(ARM_IP_LIBRARY_PATH)/latest/Corstone-101/logical/cmsdk_ahb_default_slave/verilog
-y $(ARM_IP_LIBRARY_PATH)/latest/Corstone-101/logical/cmsdk_ahb_gpio/verilog
-y $(ARM_IP_LIBRARY_PATH)/latest/Corstone-101/logical/cmsdk_ahb_to_apb/verilog
-y $(ARM_IP_LIBRARY_PATH)/latest/Corstone-101/logical/cmsdk_iop_gpio/verilog
-y $(ARM_IP_LIBRARY_PATH)/latest/Corstone-101/logical/models/clkgate
-y $(ARM_IP_LIBRARY_PATH)/latest/Corstone-101/logical/models/memories/
-y $(ARM_IP_LIBRARY_PATH)/latest/Corstone-101/logical/cmsdk_ahb_to_sram/verilog
\ No newline at end of file
//-----------------------------------------------------------------------------
// NanoSoC Corstone-101 VIP Filelist
// A joint work commissioned on behalf of SoC Labs, under Arm Academic Access license.
//
// Contributors
//
// David Mapstone (d.a.mapstone@soton.ac.uk)
//
// Copyright � 2021-3, SoC Labs (www.soclabs.org)
//-----------------------------------------------------------------------------
//-----------------------------------------------------------------------------
// Abstract : Verilog Command File for Arm Corstone-101 VIP
//-----------------------------------------------------------------------------
// ============= Verilog library extensions ===========
+libext+.v+.vlib
// ============= DMA-230 search path =============
+incdir+$(ARM_IP_LIBRARY_PATH)/latest/Corstone-101/logical/cmsdk_debug_tester/verilog
+incdir+$(ARM_IP_LIBRARY_PATH)/latest/Corstone-101/logical/models/protocol_checkers/AhbLitePC/verilog
+incdir+$(ARM_IP_LIBRARY_PATH)/latest/Corstone-101/logical/models/protocol_checkers/ApbPC/verilog
-y $(ARM_IP_LIBRARY_PATH)/latest/Corstone-101/logical/cmsdk_debug_tester/verilog
-y $(ARM_IP_LIBRARY_PATH)/latest/Corstone-101/logical/models/protocol_checkers/AhbLitePC/verilog
-y $(ARM_IP_LIBRARY_PATH)/latest/Corstone-101/logical/models/protocol_checkers/ApbPC/verilog
\ No newline at end of file
//-----------------------------------------------------------------------------
// NanoSoC Cortex-M0 Filelist
// A joint work commissioned on behalf of SoC Labs, under Arm Academic Access license.
//
// Contributors
//
// David Mapstone (d.a.mapstone@soton.ac.uk)
//
// Copyright � 2021-3, SoC Labs (www.soclabs.org)
//-----------------------------------------------------------------------------
//-----------------------------------------------------------------------------
// Abstract : Verilog Command File for Arm Cortex-M0
//-----------------------------------------------------------------------------
// ============= Verilog library extensions ===========
+libext+.v+.vlib
// ============= Accelerator Module search path =============
-y $(ARM_IP_LIBRARY_PATH)/latest/Cortex-M0/logical/cortexm0/verilog
-y $(ARM_IP_LIBRARY_PATH)/latest/Cortex-M0/logical/cortexm0_dap/verilog
-y $(ARM_IP_LIBRARY_PATH)/latest/Cortex-M0/logical/cortexm0_integration/verilog
-y $(ARM_IP_LIBRARY_PATH)/latest/Cortex-M0/logical/models/cells
-y $(ARM_IP_LIBRARY_PATH)/latest/Cortex-M0/logical/models/wrappers
-y $(ARM_IP_LIBRARY_PATH)/latest/Cortex-M0/logical/ualdis/verilog
+incdir+$(ARM_IP_LIBRARY_PATH)/latest/Cortex-M0/logical/cortexm0/verilog
+incdir+$(ARM_IP_LIBRARY_PATH)/latest/Cortex-M0/logical/cortexm0_dap/verilog
+incdir+$(ARM_IP_LIBRARY_PATH)/latest/Cortex-M0/logical/cortexm0_integration/verilog
+incdir+$(ARM_IP_LIBRARY_PATH)/latest/Cortex-M0/logical/models/cells
+incdir+$(ARM_IP_LIBRARY_PATH)/latest/Cortex-M0/logical/models/wrappers
+incdir+$(ARM_IP_LIBRARY_PATH)/latest/Cortex-M0/logical/ualdis/verilog
//-----------------------------------------------------------------------------
// NanoSoC Chip Related IP Filelist
// A joint work commissioned on behalf of SoC Labs, under Arm Academic Access license.
//
// Contributors
//
// David Mapstone (d.a.mapstone@soton.ac.uk)
//
// Copyright � 2021-3, SoC Labs (www.soclabs.org)
//-----------------------------------------------------------------------------
//-----------------------------------------------------------------------------
// Abstract : Verilog Command File for NanoSoC Bus Matrix IP
//-----------------------------------------------------------------------------
// ============= Verilog library extensions ===========
+libext+.v+.vlib
// ============= NanoSoC Bus Matrix IP search path =============
$(NANOSOC_TECH_DIR)/system/src/verilog/nanosoc_chip.v
$(NANOSOC_TECH_DIR)/system/src/verilog/nanosoc_chip_pads.v
$(NANOSOC_TECH_DIR)/system/src/verilog/nanosoc_cpu.v
$(NANOSOC_TECH_DIR)/system/src/verilog/nanosoc_sysio.v
$(NANOSOC_TECH_DIR)/system/src/verilog/nanosoc_sys_ahb_decode.v
$(NANOSOC_TECH_DIR)/system/src/verilog/nanosoc_apb_subsystem.v
$(NANOSOC_TECH_DIR)/system/src/verilog/nanosoc_ahb_cs_rom_table.v
$(NANOSOC_TECH_DIR)/system/src/verilog/nanosoc_mcu_pin_mux.v
$(NANOSOC_TECH_DIR)/system/src/verilog/nanosoc_mcu_stclkctrl.v
$(NANOSOC_TECH_DIR)/system/src/verilog/nanosoc_mcu_clkctrl.v
$(NANOSOC_TECH_DIR)/system/src/verilog/nanosoc_mcu_sysctrl.v
$(NANOSOC_TECH_DIR)/system/src/verilog/nanosoc_apb_usrt.v
$(NANOSOC_TECH_DIR)/system/src/verilog/nanosoc_ahb_bootrom.v
$(NANOSOC_TECH_DIR)/system/src/bootrom/verilog/bootrom.v
$(NANOSOC_TECH_DIR)/system/aes/src/nanosoc_acc_wrapper.v
+incdir+$(PROJECT_DIR)/secworks-aes/src/rtl
$(NANOSOC_TECH_DIR)/system/aes/src/soclabs_ahb_aes128_ctrl.v
//-----------------------------------------------------------------------------
// NanoSoC Testbench Filelist
// A joint work commissioned on behalf of SoC Labs, under Arm Academic Access license.
//
// Contributors
//
// David Mapstone (d.a.mapstone@soton.ac.uk)
//
// Copyright � 2021-3, SoC Labs (www.soclabs.org)
//-----------------------------------------------------------------------------
//-----------------------------------------------------------------------------
// Abstract : Verilog Command File for NanoSoC Testbench
//-----------------------------------------------------------------------------
// ============= Verilog library extensions ===========
+libext+.v+.vlib
// ============= NanoSoC Testbench search path =============
+incdir+$(NANOSOC_TECH_DIR)/system/verif/verilog/
// - Top-level testbench
$(NANOSOC_TECH_DIR)/system/verif/verilog/nanosoc_tb.v
// - Testbench components
$(NANOSOC_TECH_DIR)/system/verif/verilog/nanosoc_clkreset.v
$(NANOSOC_TECH_DIR)/system/verif/verilog/nanosoc_uart_capture.v
$(NANOSOC_TECH_DIR)/system/verif/verilog/nanosoc_axi_stream_io_8_txd_from_file.v
$(NANOSOC_TECH_DIR)/system/verif/verilog/nanosoc_ft1248x1_to_axi_streamio_v1_0.v
$(NANOSOC_TECH_DIR)/system/verif/verilog/nanosoc_axi_stream_io_8_rxd_to_file.v
$(NANOSOC_TECH_DIR)/system/verif/verilog/nanosoc_track_tb_iostream.v
$(NANOSOC_TECH_DIR)/system/verif/verilog/nanosoc_ft1248x1_track.v
$(NANOSOC_TECH_DIR)/system/verif/verilog/nanosoc_dma_log_to_file.v
$(NANOSOC_TECH_DIR)/system/aes/verif/aes128_log_to_file.v