diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index d4f2711629936a6e401f5ab07f38b8f2d5609236..7e3c059636052a46bd3611107a3f56fda0f772ef 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -23,7 +23,9 @@ stages: # List of stages for jobs, and their order of execution build-job-Z2: # This job runs in the build stage, which runs first. stage: build script: + # setup vivado environment for Vivado 2021.1 - source /tools/Xilinx/Vivado/2021.1/.settings64-Vivado.sh + # move and unpack the arm ip into the arm-AAA-ip folder, below the working directory - cp -r /home/gitlab-runner/arm-AAA-ip ../arm-AAA-ip - cd ../arm-AAA-ip/Corstone-101_Foundation_IP/ - tar -xf BP210-r1p1-00rel0.tar.gz @@ -31,6 +33,7 @@ build-job-Z2: # This job runs in the build stage, which runs first. - tar -xf AT510-r0p0-03rel2.tar.gz - cd ../DMA-230_MicroDMA_Controller/ - tar -xf PL230-r0p0-02rel2-1.tar.gz + # move to fpga_imp directory and run the fpga build script for pynq z2 - cd ../../nanosoc/Cortex-M0/nanosoc/systems/mcu/fpga_imp/ - if source ./build_fpga_pynq_z2.scr; then - FILE = ./pynq_export/pz2/pynq/overlays/soclabs/design_1.bit @@ -40,10 +43,12 @@ build-job-Z2: # This job runs in the build stage, which runs first. - echo "Build failed" - fi - fi + # cleanup arm-AAA-ip directory - cd ../../../../../../ - rm -r arm-AAA-ip artifacts: paths: + # Keep the generated bit and hwh file from fpga build script - ./Cortex-M0/nanosoc/systems/mcu/fpga_imp/pynq_export/pz2/pynq/overlays/soclabs/design_1.bit - ./Cortex-M0/nanosoc/systems/mcu/fpga_imp/pynq_export/pz2/pynq/overlays/soclabs/design_1.hwh - ./Cortex-M0/nanosoc/systems/mcu/fpga_imp/CI_verification/load_bitfile.py @@ -53,6 +58,7 @@ build-job-Z2: # This job runs in the build stage, which runs first. build-job-ZCU104: # This job runs in the build stage, which runs first. stage: build script: + # move and unpack the arm ip into the arm-AAA-ip folder, below the working directory - cp -r /home/dwn1c21/arm-AAA-ip ../arm-AAA-ip - cd ../arm-AAA-ip/Corstone-101_Foundation_IP/ - tar -xf BP210-r1p1-00rel0.tar.gz @@ -60,6 +66,7 @@ build-job-ZCU104: # This job runs in the build stage, which runs first. - tar -xf AT510-r0p0-03rel2.tar.gz - cd ../DMA-230_MicroDMA_Controller/ - tar -xf PL230-r0p0-02rel2-1.tar.gz + # move to fpga_imp directory and run the fpga build script for pynq z2 - cd ../../nanosoc/Cortex-M0/nanosoc/systems/mcu/fpga_imp/ - if source ./build_fpga_pynq_zcu104.scr; then - FILE = ./pynq_export/pz104/pynq/overlays/soclabs/design_1.bit @@ -69,10 +76,12 @@ build-job-ZCU104: # This job runs in the build stage, which runs first. - echo "Build failed" - fi - fi + # cleanup arm-AAA-ip directory - cd ../../../../../../ - rm -r arm-AAA-ip artifacts: paths: + # Keep the generated bit and hwh file from fpga build script - ./Cortex-M0/nanosoc/systems/mcu/fpga_imp/pynq_export/pz104/pynq/overlays/soclabs/design_1.bit - ./Cortex-M0/nanosoc/systems/mcu/fpga_imp/pynq_export/pz104/pynq/overlays/soclabs/design_1.hwh - ./Cortex-M0/nanosoc/systems/mcu/fpga_imp/CI_verification/load_bitfile.py @@ -84,18 +93,31 @@ deploy-job-Z2: # This job runs in the deploy stage. environment: production script: - echo "Deploying application to Z2" + # use smbclient to transfer accross the bit, hwh and python script files to the z2 xilinx board + # could probably set this up as scp with RSA keys in future - smbclient //192.168.2.99/xilinx -m SMB3 -U xilinx%xilinx -c 'cd ./pynq/overlays/soclabs/ ; put ./Cortex-M0/nanosoc/systems/mcu/fpga_imp/pynq_export/pz2/pynq/overlays/soclabs/design_1.bit ./design_1.bit' - smbclient //192.168.2.99/xilinx -m SMB3 -U xilinx%xilinx -c 'cd ./pynq/overlays/soclabs/ ; put ./Cortex-M0/nanosoc/systems/mcu/fpga_imp/pynq_export/pz2/pynq/overlays/soclabs/design_1.hwh ./design_1.hwh' - cd ./Cortex-M0/nanosoc/systems/mcu/fpga_imp/CI_verification - smbclient //192.168.2.99/xilinx -m SMB3 -U xilinx%xilinx -c 'put ./load_bitfile.py ./load_bitfile.py' + # get root access on host machine, this was found to be needed because other screen would not work + # however a more elegant solution would be better - echo gitrunner1 | sudo -S su + # open detatched terminal with connection to the Xilinx Z2 board - sudo screen -S zynq -dm /dev/ttyUSB1 115200 + # get root access on xilinx board, this is needed because the python script won't run without + # being root. - sudo screen -r zynq -X stuff "sudo -S su \n" + # setup pynq environment - sudo screen -r zynq -X stuff "source /etc/profile.d/pynq_venv.sh \n" - sudo screen -r zynq -X stuff "source /etc/profile.d/xrt_setup.sh \n" - sudo screen -r zynq -X stuff "source /etc/profile.d/boardname.sh \n" + # run load_bitfile: this loads the overlay and checks that it has been loaded + # script will output "Overlay Loaded" if successful - sudo screen -r zynq -X stuff "python3 load_bitfile.py > tmp \n" + # sleep 1 minute: this is needed as currently the terminal running the CI/CD script will not wait + # for the python script to finish. A more elegant solution should be implemented in future - sleep 60 + # copy over the tmp file back to host machine and check if "Overlay Loaded" has been outputed f - smbclient //192.168.2.99/xilinx -m SMB3 -U xilinx%xilinx -c 'get tmp' - if (grep -r "Overlay Loaded" ./tmp) - then @@ -105,6 +127,7 @@ deploy-job-Z2: # This job runs in the deploy stage. - exit 1 - fi after_script: + # cleanup: remove detached terminal screen - echo gitrunner1 | sudo -S su - sudo screen -X -S zynq quit tags: @@ -115,17 +138,23 @@ deploy-job-ZCU104: # This job runs in the deploy stage. environment: production script: - echo "Deploying application to ZCU104" + # start a detached terminal so that the xilinx environment can be opened without interferring w - screen -dmS zynq -L -Logfile screenlog - sleep 5 + # copy over vlab.py and vkey and then connect to ZCU104 board + # FUTURE Work: need to add error handling for if the board does not connect, + # could grep from screenlog to see if successfully connected - screen -r zynq -X stuff "cp -r /home/dwn1c21/FPGA/. ./ \n" - screen -r zynq -X stuff "./ZCU104_connect.sh \n" - sleep 10 + # use scp to copy over bit files and python script - screen -r zynq -X stuff "scp -i ~/.ssh/id_rsa dwn1c21@soclabs.soton.ac.uk:~/builds/wzndG1mA/0/soclabs/nanosoc/Cortex-M0/nanosoc/systems/mcu/fpga_imp/CI_verification/load_bitfile.py ./ \n" - sleep 2 - screen -r zynq -X stuff "scp -i ~/.ssh/id_rsa dwn1c21@soclabs.soton.ac.uk:~/builds/wzndG1mA/0/soclabs/nanosoc/Cortex-M0/nanosoc/systems/mcu/fpga_imp/pynq_export/pz104/pynq/overlays/soclabs/design_1.bit ./pynq/overlays/soclabs/design_1.bit \n" - sleep 2 - screen -r zynq -X stuff "scp -i ~/.ssh/id_rsa dwn1c21@soclabs.soton.ac.uk:~/builds/wzndG1mA/0/soclabs/nanosoc/Cortex-M0/nanosoc/systems/mcu/fpga_imp/pynq_export/pz104/pynq/overlays/soclabs/design_1.hwh ./pynq/overlays/soclabs/design_1.hwh \n" - sleep 2 + # Need root access to load the overlay onto the FPGA - screen -r zynq -X stuff "sudo su\n" - sleep 1 - screen -r zynq -X stuff "xilinx\n" @@ -133,10 +162,14 @@ deploy-job-ZCU104: # This job runs in the deploy stage. - screen -r zynq -X stuff "source /etc/profile.d/xrt_setup.sh \n" - screen -r zynq -X stuff "source /etc/profile.d/boardname.sh \n" - sleep 5 + # run load_bitfile: this loads the overlay and checks that it has been loaded + # script will output "Overlay Loaded" if successful - screen -r zynq -X stuff "python3 load_bitfile.py \n" - sleep 40 + # deactivate the pynq virtual environment and exit root access - screen -r zynq -X stuff "deactivate \n" - screen -r zynq -X stuff "exit \n" + # test the screenlog for "Overlay Loaded" - if (grep -r "Overlay Loaded" ./screenlog) - then - echo "Bit file loaded successfully" @@ -145,6 +178,7 @@ deploy-job-ZCU104: # This job runs in the deploy stage. - exit 1 - fi after_script: + # cleanup xilinx directories and quit screen - screen -r zynq -X stuff "rm load_bitfile.py \n" - screen -r zynq -X stuff "rm artifacts.zip \n" - screen -r zynq -X stuff "rm -r ./Cortex-M0 \n"