rng: Add Turris Mox rTWM RNG driver
[pandora-u-boot.git] / .azure-pipelines.yml
1 variables:
2   windows_vm: windows-2019
3   ubuntu_vm: ubuntu-22.04
4   macos_vm: macOS-12
5   ci_runner_image: trini/u-boot-gitlab-ci-runner:jammy-20240125-12Feb2024
6   # Add '-u 0' options for Azure pipelines, otherwise we get "permission
7   # denied" error when it tries to "useradd -m -u 1001 vsts_azpcontainer",
8   # since our $(ci_runner_image) user is not root.
9   container_option: -u 0
10   work_dir: /u
11
12 stages:
13 - stage: testsuites
14   jobs:
15   - job: tools_only_windows
16     displayName: 'Ensure host tools build for Windows'
17     pool:
18       vmImage: $(windows_vm)
19     steps:
20       - powershell: |
21           (New-Object Net.WebClient).DownloadFile("https://github.com/msys2/msys2-installer/releases/download/2021-06-04/msys2-base-x86_64-20210604.sfx.exe", "sfx.exe")
22         displayName: 'Install MSYS2'
23       - script: |
24           sfx.exe -y -o%CD:~0,2%\
25           %CD:~0,2%\msys64\usr\bin\bash -lc " "
26           %CD:~0,2%\msys64\usr\bin\bash -lc "pacman --noconfirm -Syuu"
27           %CD:~0,2%\msys64\usr\bin\bash -lc "pacman --noconfirm -Syuu"
28         displayName: 'Update MSYS2'
29       - script: |
30           %CD:~0,2%\msys64\usr\bin\bash -lc "pacman --noconfirm --needed -Sy make gcc bison flex diffutils openssl-devel libgnutls-devel libutil-linux-devel"
31         displayName: 'Install Toolchain'
32       - script: |
33           echo make tools-only_defconfig tools-only > build-tools.sh
34           %CD:~0,2%\msys64\usr\bin\bash -lc "bash build-tools.sh"
35         displayName: 'Build Host Tools'
36         env:
37           # Tell MSYS2 we need a POSIX emulation layer
38           MSYSTEM: MSYS
39           # Tell MSYS2 not to ‘cd’ our startup directory to HOME
40           CHERE_INVOKING: yes
41
42   - job: tools_only_macOS
43     displayName: 'Ensure host tools build for macOS X'
44     pool:
45       vmImage: $(macos_vm)
46     steps:
47       - script: brew install make ossp-uuid
48         displayName: Brew install dependencies
49       - script: |
50           gmake tools-only_config tools-only \
51             HOSTCFLAGS="-I/usr/local/opt/openssl@1.1/include" \
52             HOSTLDFLAGS="-L/usr/local/opt/openssl@1.1/lib" \
53             -j$(sysctl -n hw.logicalcpu)
54         displayName: 'Perform tools-only build'
55
56   - job: check_for_new_CONFIG_symbols_outside_Kconfig
57     displayName: 'Check for new CONFIG symbols outside Kconfig'
58     pool:
59       vmImage: $(ubuntu_vm)
60     container:
61       image: $(ci_runner_image)
62       options: $(container_option)
63     steps:
64       # If grep succeeds and finds a match the test fails as we should
65       # have no matches.
66       - script: git grep -E '^#[[:blank:]]*(define|undef)[[:blank:]]*CONFIG_'
67                   :^doc/ :^arch/arm/dts/ :^scripts/kconfig/lkc.h
68                   :^include/linux/kconfig.h :^tools/ && exit 1 || exit 0
69
70   - job: docs
71     displayName: 'Build documentation'
72     pool:
73       vmImage: $(ubuntu_vm)
74     container:
75       image: $(ci_runner_image)
76       options: $(container_option)
77     steps:
78       - script: |
79           virtualenv -p /usr/bin/python3 /tmp/venvhtml
80           . /tmp/venvhtml/bin/activate
81           pip install -r doc/sphinx/requirements.txt
82           make htmldocs KDOC_WERROR=1
83           make infodocs
84
85   - job: maintainers
86     displayName: 'Ensure all configs have MAINTAINERS entries'
87     pool:
88       vmImage: $(ubuntu_vm)
89     container:
90       image: $(ci_runner_image)
91       options: $(container_option)
92     steps:
93       - script: |
94           ./tools/buildman/buildman --maintainer-check
95
96   - job: tools_only
97     displayName: 'Ensure host tools and env tools build'
98     pool:
99       vmImage: $(ubuntu_vm)
100     container:
101       image: $(ci_runner_image)
102       options: $(container_option)
103     steps:
104       - script: |
105           make tools-only_config tools-only -j$(nproc)
106           make mrproper
107           make tools-only_config envtools -j$(nproc)
108
109   - job: utils
110     displayName: 'Run binman, buildman, dtoc, Kconfig and patman testsuites'
111     pool:
112       vmImage: $(ubuntu_vm)
113     steps:
114       - script: |
115           cat << "EOF" > build.sh
116           cd $(work_dir)
117           git config --global user.name "Azure Pipelines"
118           git config --global user.email bmeng.cn@gmail.com
119           git config --global --add safe.directory $(work_dir)
120           export USER=azure
121           virtualenv -p /usr/bin/python3 /tmp/venv
122           . /tmp/venv/bin/activate
123           pip install -r test/py/requirements.txt
124           pip install -r tools/buildman/requirements.txt
125           export UBOOT_TRAVIS_BUILD_DIR=/tmp/tools-only
126           export PYTHONPATH=${UBOOT_TRAVIS_BUILD_DIR}/scripts/dtc/pylibfdt
127           export PATH=${UBOOT_TRAVIS_BUILD_DIR}/scripts/dtc:${PATH}
128           ./tools/buildman/buildman -T0 -o ${UBOOT_TRAVIS_BUILD_DIR} -w --board tools-only
129           set -ex
130           ./tools/binman/binman --toolpath ${UBOOT_TRAVIS_BUILD_DIR}/tools test
131           ./tools/buildman/buildman -t
132           ./tools/dtoc/dtoc -t
133           ./tools/patman/patman test
134           make O=${UBOOT_TRAVIS_BUILD_DIR} testconfig
135           EOF
136           cat build.sh
137           # We cannot use "container" like other jobs above, as buildman
138           # seems to hang forever with pre-configured "container" environment
139           docker run -v $PWD:$(work_dir) $(ci_runner_image) /bin/bash $(work_dir)/build.sh
140
141   - job: pylint
142     displayName: Check for any pylint regressions
143     pool:
144       vmImage: $(ubuntu_vm)
145     container:
146       image: $(ci_runner_image)
147       options: $(container_option)
148     steps:
149       - script: |
150           git config --global --add safe.directory $(work_dir)
151           export USER=azure
152           pip install -r test/py/requirements.txt
153           pip install -r tools/buildman/requirements.txt
154           pip install asteval pylint==2.12.2 pyopenssl
155           export PATH=${PATH}:~/.local/bin
156           echo "[MASTER]" >> .pylintrc
157           echo "load-plugins=pylint.extensions.docparams" >> .pylintrc
158           export UBOOT_TRAVIS_BUILD_DIR=/tmp/tools-only
159           ./tools/buildman/buildman -T0 -o ${UBOOT_TRAVIS_BUILD_DIR} -w --board tools-only
160           set -ex
161           pylint --version
162           export PYTHONPATH=${UBOOT_TRAVIS_BUILD_DIR}/scripts/dtc/pylibfdt
163           make pylint_err
164
165   - job: check_for_pre_schema_tags
166     displayName: 'Check for pre-schema driver model tags'
167     pool:
168       vmImage: $(ubuntu_vm)
169     container:
170       image: $(ci_runner_image)
171       options: $(container_option)
172     steps:
173       # If grep succeeds and finds a match the test fails as we should
174       # have no matches.
175       - script: git grep u-boot,dm- -- '*.dts*' && exit 1 || exit 0
176
177   - job: check_packing_of_python_tools
178     displayName: 'Check we can package the Python tools'
179     pool:
180       vmImage: $(ubuntu_vm)
181     container:
182       image: $(ci_runner_image)
183       options: $(container_option)
184     steps:
185       - script: make pip
186
187   - job: create_test_py_wrapper_script
188     displayName: 'Create and stage a wrapper for test.py runs'
189     pool:
190       vmImage: $(ubuntu_vm)
191     steps:
192       - checkout: none
193       - script: |
194           cat << EOF > test.sh
195           #!/bin/bash
196           set -ex
197           # the below corresponds to .gitlab-ci.yml "before_script"
198           cd \${WORK_DIR}
199           git config --global --add safe.directory \${WORK_DIR}
200           git clone --depth=1 https://source.denx.de/u-boot/u-boot-test-hooks /tmp/uboot-test-hooks
201           ln -s travis-ci /tmp/uboot-test-hooks/bin/\`hostname\`
202           ln -s travis-ci /tmp/uboot-test-hooks/py/\`hostname\`
203           grub-mkimage --prefix=\"\" -o ~/grub_x86.efi -O i386-efi normal echo lsefimmap lsefi lsefisystab efinet tftp minicmd
204           grub-mkimage --prefix=\"\" -o ~/grub_x64.efi -O x86_64-efi normal echo lsefimmap lsefi lsefisystab efinet tftp minicmd
205           if [[ "\${TEST_PY_BD}" == "qemu-riscv32_spl" ]]; then
206               wget -O - https://github.com/riscv-software-src/opensbi/releases/download/v1.3.1/opensbi-1.3.1-rv-bin.tar.xz | tar -C /tmp -xJ;
207               export OPENSBI=/tmp/opensbi-1.3.1-rv-bin/share/opensbi/ilp32/generic/firmware/fw_dynamic.bin;
208           fi
209           if [[ "\${TEST_PY_BD}" == "qemu-riscv64_spl" ]] || [[ "\${TEST_PY_BD}" == "sifive_unleashed" ]]; then
210               wget -O - https://github.com/riscv-software-src/opensbi/releases/download/v1.3.1/opensbi-1.3.1-rv-bin.tar.xz | tar -C /tmp -xJ;
211               export OPENSBI=/tmp/opensbi-1.3.1-rv-bin/share/opensbi/lp64/generic/firmware/fw_dynamic.bin;
212           fi
213           # the below corresponds to .gitlab-ci.yml "script"
214           cd \${WORK_DIR}
215           export UBOOT_TRAVIS_BUILD_DIR=/tmp/\${TEST_PY_BD}
216           if [ -n "\${BUILD_ENV}" ]; then
217               export \${BUILD_ENV};
218           fi
219           pip install -r tools/buildman/requirements.txt
220           tools/buildman/buildman -o \${UBOOT_TRAVIS_BUILD_DIR} -w -E -W -e --board \${TEST_PY_BD} \${OVERRIDE}
221           cp ~/grub_x86.efi \${UBOOT_TRAVIS_BUILD_DIR}/
222           cp ~/grub_x64.efi \${UBOOT_TRAVIS_BUILD_DIR}/
223           cp /opt/grub/grubriscv64.efi \${UBOOT_TRAVIS_BUILD_DIR}/grub_riscv64.efi
224           cp /opt/grub/grubaa64.efi \${UBOOT_TRAVIS_BUILD_DIR}/grub_arm64.efi
225           cp /opt/grub/grubarm.efi \${UBOOT_TRAVIS_BUILD_DIR}/grub_arm.efi
226           # create sdcard / spi-nor images for sifive unleashed using genimage
227           if [[ "\${TEST_PY_BD}" == "sifive_unleashed" ]]; then
228               mkdir -p root;
229               cp \${UBOOT_TRAVIS_BUILD_DIR}/spl/u-boot-spl.bin .;
230               cp \${UBOOT_TRAVIS_BUILD_DIR}/u-boot.itb .;
231               rm -rf tmp;
232               genimage --inputpath . --config board/sifive/unleashed/genimage_sdcard.cfg;
233               cp images/sdcard.img \${UBOOT_TRAVIS_BUILD_DIR}/;
234               rm -rf tmp;
235               genimage --inputpath . --config board/sifive/unleashed/genimage_spi-nor.cfg;
236               cp images/spi-nor.img \${UBOOT_TRAVIS_BUILD_DIR}/;
237           fi
238           if [[ "\${TEST_PY_BD}" == "coreboot" ]]; then
239               cp /opt/coreboot/coreboot.rom \${UBOOT_TRAVIS_BUILD_DIR}/coreboot.rom;
240               /opt/coreboot/cbfstool \${UBOOT_TRAVIS_BUILD_DIR}/coreboot.rom remove -n fallback/payload;
241               /opt/coreboot/cbfstool \${UBOOT_TRAVIS_BUILD_DIR}/coreboot.rom add-flat-binary -f \${UBOOT_TRAVIS_BUILD_DIR}/u-boot.bin -n fallback/payload -c LZMA -l 0x1110000 -e 0x1110000;
242           fi
243           virtualenv -p /usr/bin/python3 /tmp/venv
244           . /tmp/venv/bin/activate
245           pip install -r test/py/requirements.txt
246           pip install pytest-azurepipelines
247           export PATH=/opt/qemu/bin:/tmp/uboot-test-hooks/bin:\${PATH}
248           export PYTHONPATH=/tmp/uboot-test-hooks/py/travis-ci
249           # "\${var:+"-k \$var"}" expands to "" if \$var is empty, "-k \$var" if not
250           ./test/py/test.py -ra -o cache_dir="\$UBOOT_TRAVIS_BUILD_DIR"/.pytest_cache --bd \${TEST_PY_BD} \${TEST_PY_ID} \${TEST_PY_TEST_SPEC:+"-k \${TEST_PY_TEST_SPEC}"} --build-dir "\$UBOOT_TRAVIS_BUILD_DIR" --report-dir "\$UBOOT_TRAVIS_BUILD_DIR"
251           # the below corresponds to .gitlab-ci.yml "after_script"
252           rm -rf /tmp/uboot-test-hooks /tmp/venv
253           EOF
254       - task: CopyFiles@2
255         displayName: 'Copy test.sh for later usage'
256         inputs:
257           contents: 'test.sh'
258           targetFolder: '$(Build.ArtifactStagingDirectory)'
259       - publish: '$(Build.ArtifactStagingDirectory)/test.sh'
260         displayName: 'Publish test.sh'
261         artifact: testsh
262
263 - stage: test_py_sandbox
264   jobs:
265   - job: test_py_sandbox
266     displayName: 'test.py for sandbox'
267     pool:
268       vmImage: $(ubuntu_vm)
269     strategy:
270       matrix:
271         sandbox:
272           TEST_PY_BD: "sandbox"
273         sandbox_asan:
274           TEST_PY_BD: "sandbox"
275           OVERRIDE: "-a ASAN"
276           TEST_PY_TEST_SPEC: "version"
277         sandbox_clang:
278           TEST_PY_BD: "sandbox"
279           OVERRIDE: "-O clang-16"
280         sandbox_clang_asan:
281           TEST_PY_BD: "sandbox"
282           OVERRIDE: "-O clang-16 -a ASAN"
283           TEST_PY_TEST_SPEC: "version"
284         sandbox64:
285           TEST_PY_BD: "sandbox64"
286         sandbox64_clang:
287           TEST_PY_BD: "sandbox64"
288           OVERRIDE: "-O clang-16"
289         sandbox_spl:
290           TEST_PY_BD: "sandbox_spl"
291           TEST_PY_TEST_SPEC: "test_ofplatdata or test_handoff or test_spl"
292         sandbox_vpl:
293           TEST_PY_BD: "sandbox_vpl"
294           TEST_PY_TEST_SPEC: "vpl or test_spl"
295         sandbox_noinst:
296           TEST_PY_BD: "sandbox_noinst"
297           TEST_PY_TEST_SPEC: "test_ofplatdata or test_handoff or test_spl"
298         sandbox_noinst_load_fit_full:
299           TEST_PY_BD: "sandbox_noinst"
300           TEST_PY_TEST_SPEC: "test_ofplatdata or test_handoff or test_spl"
301           OVERRIDE: "-a CONFIG_SPL_LOAD_FIT_FULL=y"
302         sandbox_flattree:
303           TEST_PY_BD: "sandbox_flattree"
304         sandbox_trace:
305           TEST_PY_BD: "sandbox"
306           BUILD_ENV: "FTRACE=1 NO_LTO=1"
307           TEST_PY_TEST_SPEC: "trace"
308           OVERRIDE: "-a CONFIG_TRACE=y -a CONFIG_TRACE_EARLY=y -a CONFIG_TRACE_EARLY_SIZE=0x01000000 -a CONFIG_TRACE_BUFFER_SIZE=0x02000000"
309     steps:
310       - download: current
311         artifact: testsh
312       - script: |
313           # make current directory writeable to uboot user inside the container
314           # as sandbox testing need create files like spi flash images, etc.
315           # (TODO: clean up this in the future)
316           chmod 777 .
317           chmod 755 $(Pipeline.Workspace)/testsh/test.sh
318           # Filesystem tests need extra docker args to run
319           set --
320           # mount -o loop needs the loop devices
321           if modprobe loop; then
322               for d in $(find /dev -maxdepth 1 -name 'loop*'); do
323                   set -- "$@" --device $d:$d
324               done
325           fi
326           # Needed for mount syscall (for guestmount as well)
327           set -- "$@" --cap-add SYS_ADMIN
328           # Default apparmor profile denies mounts
329           set -- "$@" --security-opt apparmor=unconfined
330           # Some tests using libguestfs-tools need the fuse device to run
331           docker run "$@" --device /dev/fuse:/dev/fuse \
332                          -v $PWD:$(work_dir) \
333                          -v $(Pipeline.Workspace):$(Pipeline.Workspace) \
334                          -e WORK_DIR="${WORK_DIR}" \
335                          -e TEST_PY_BD="${TEST_PY_BD}" \
336                          -e TEST_PY_ID="${TEST_PY_ID}" \
337                          -e TEST_PY_TEST_SPEC="${TEST_PY_TEST_SPEC}" \
338                          -e OVERRIDE="${OVERRIDE}" \
339                          -e BUILD_ENV="${BUILD_ENV}" $(ci_runner_image) \
340                          $(Pipeline.Workspace)/testsh/test.sh
341
342 - stage: test_py_qemu
343   jobs:
344   - job: test_py_qemu
345     displayName: 'test.py for QEMU platforms'
346     pool:
347       vmImage: $(ubuntu_vm)
348     strategy:
349       matrix:
350         coreboot:
351           TEST_PY_BD: "coreboot"
352           TEST_PY_ID: "--id qemu"
353           TEST_PY_TEST_SPEC: "not sleep"
354         evb_ast2500:
355           TEST_PY_BD: "evb-ast2500"
356           TEST_PY_ID: "--id qemu"
357         evb_ast2600:
358           TEST_PY_BD: "evb-ast2600"
359           TEST_PY_ID: "--id qemu"
360         vexpress_ca9x4:
361           TEST_PY_BD: "vexpress_ca9x4"
362           TEST_PY_ID: "--id qemu"
363         integratorcp_cm926ejs:
364           TEST_PY_BD: "integratorcp_cm926ejs"
365           TEST_PY_ID: "--id qemu"
366           TEST_PY_TEST_SPEC: "not sleep"
367         qemu_arm:
368           TEST_PY_BD: "qemu_arm"
369           TEST_PY_TEST_SPEC: "not sleep"
370         qemu_arm64:
371           TEST_PY_BD: "qemu_arm64"
372           TEST_PY_TEST_SPEC: "not sleep"
373         qemu_m68k:
374           TEST_PY_BD: "M5208EVBE"
375           TEST_PY_ID: "--id qemu"
376           TEST_PY_TEST_SPEC: "not sleep and not efi"
377           OVERRIDE: "-a CONFIG_M68K_QEMU=y -a ~CONFIG_MCFTMR"
378         qemu_malta:
379           TEST_PY_BD: "malta"
380           TEST_PY_ID: "--id qemu"
381           TEST_PY_TEST_SPEC: "not sleep and not efi"
382         qemu_maltael:
383           TEST_PY_BD: "maltael"
384           TEST_PY_ID: "--id qemu"
385           TEST_PY_TEST_SPEC: "not sleep and not efi"
386         qemu_malta64:
387           TEST_PY_BD: "malta64"
388           TEST_PY_ID: "--id qemu"
389           TEST_PY_TEST_SPEC: "not sleep and not efi"
390         qemu_malta64el:
391           TEST_PY_BD: "malta64el"
392           TEST_PY_ID: "--id qemu"
393           TEST_PY_TEST_SPEC: "not sleep and not efi"
394         qemu_ppce500:
395           TEST_PY_BD: "qemu-ppce500"
396           TEST_PY_TEST_SPEC: "not sleep"
397         qemu_riscv32:
398           TEST_PY_BD: "qemu-riscv32"
399           TEST_PY_TEST_SPEC: "not sleep"
400         qemu_riscv64:
401           TEST_PY_BD: "qemu-riscv64"
402           TEST_PY_TEST_SPEC: "not sleep"
403         qemu_riscv32_spl:
404           TEST_PY_BD: "qemu-riscv32_spl"
405           TEST_PY_TEST_SPEC: "not sleep"
406         qemu_riscv64_spl:
407           TEST_PY_BD: "qemu-riscv64_spl"
408           TEST_PY_TEST_SPEC: "not sleep"
409         qemu_x86:
410           TEST_PY_BD: "qemu-x86"
411           TEST_PY_TEST_SPEC: "not sleep"
412         qemu_x86_64:
413           TEST_PY_BD: "qemu-x86_64"
414           TEST_PY_TEST_SPEC: "not sleep"
415         r2dplus_i82557c:
416           TEST_PY_BD: "r2dplus"
417           TEST_PY_ID: "--id i82557c_qemu"
418         r2dplus_pcnet:
419           TEST_PY_BD: "r2dplus"
420           TEST_PY_ID: "--id pcnet_qemu"
421         r2dplus_rtl8139:
422           TEST_PY_BD: "r2dplus"
423           TEST_PY_ID: "--id rtl8139_qemu"
424         r2dplus_tulip:
425           TEST_PY_BD: "r2dplus"
426           TEST_PY_ID: "--id tulip_qemu"
427         sifive_unleashed_sdcard:
428           TEST_PY_BD: "sifive_unleashed"
429           TEST_PY_ID: "--id sdcard_qemu"
430         sifive_unleashed_spi-nor:
431           TEST_PY_BD: "sifive_unleashed"
432           TEST_PY_ID: "--id spi-nor_qemu"
433         xilinx_zynq_virt:
434           TEST_PY_BD: "xilinx_zynq_virt"
435           TEST_PY_ID: "--id qemu"
436           TEST_PY_TEST_SPEC: "not sleep"
437         xilinx_versal_virt:
438           TEST_PY_BD: "xilinx_versal_virt"
439           TEST_PY_ID: "--id qemu"
440           TEST_PY_TEST_SPEC: "not sleep"
441         xtfpga:
442           TEST_PY_BD: "xtfpga"
443           TEST_PY_ID: "--id qemu"
444           TEST_PY_TEST_SPEC: "not sleep"
445     steps:
446       - download: current
447         artifact: testsh
448       - script: |
449           # make current directory writeable to uboot user inside the container
450           # as sandbox testing need create files like spi flash images, etc.
451           # (TODO: clean up this in the future)
452           chmod 777 .
453           chmod 755 $(Pipeline.Workspace)/testsh/test.sh
454           # Some tests using libguestfs-tools need the fuse device to run
455           docker run "$@" --device /dev/fuse:/dev/fuse \
456                          -v $PWD:$(work_dir) \
457                          -v $(Pipeline.Workspace):$(Pipeline.Workspace) \
458                          -e WORK_DIR="${WORK_DIR}" \
459                          -e TEST_PY_BD="${TEST_PY_BD}" \
460                          -e TEST_PY_ID="${TEST_PY_ID}" \
461                          -e TEST_PY_TEST_SPEC="${TEST_PY_TEST_SPEC}" \
462                          -e OVERRIDE="${OVERRIDE}" \
463                          -e BUILD_ENV="${BUILD_ENV}" $(ci_runner_image) \
464                          $(Pipeline.Workspace)/testsh/test.sh
465         retryCountOnTaskFailure: 2 # QEMU may be too slow, etc.
466
467 - stage: world_build
468   jobs:
469   - job: build_the_world
470     timeoutInMinutes: 0 # Use the maximum allowed
471     displayName: 'Build the World'
472     pool:
473       vmImage: $(ubuntu_vm)
474     strategy:
475       # Use almost the same target division in .travis.yml, only merged
476       # 3 small build jobs (arc/microblaze/xtensa) into one.
477       matrix:
478         am33xx_at91_kirkwood_mvebu_omap:
479           BUILDMAN: "am33xx at91_kirkwood mvebu omap -x siemens"
480         amlogic_bcm_boundary_engicam_siemens_technexion_oradex:
481           BUILDMAN: "amlogic bcm boundary engicam siemens technexion toradex -x mips"
482         arm_nxp_minus_imx:
483           BUILDMAN: "freescale -x powerpc,m68k,imx,mx"
484         imx:
485           BUILDMAN: "mx imx -x boundary,engicam,technexion,toradex"
486         rk:
487           BUILDMAN: "rk"
488         sunxi:
489           BUILDMAN: "sunxi"
490         powerpc:
491           BUILDMAN: "powerpc"
492         arm_catch_all:
493           BUILDMAN: "arm -x aarch64,am33xx,at91,bcm,ls1,kirkwood,mvebu,omap,rk,siemens,mx,sunxi,technexion,toradex"
494         aarch64_catch_all:
495           BUILDMAN: "aarch64 -x amlogic,bcm,engicam,imx,ls1,ls2,lx216,mvebu,rk,siemens,sunxi,toradex"
496         everything_but_arm_and_powerpc:
497           BUILDMAN: "-x arm,powerpc"
498     steps:
499       - script: |
500           cat << EOF > build.sh
501           set -ex
502           cd ${WORK_DIR}
503           # make environment variables available as tests are running inside a container
504           export BUILDMAN="${BUILDMAN}"
505           git config --global --add safe.directory ${WORK_DIR}
506           pip install -r tools/buildman/requirements.txt
507           EOF
508           cat << "EOF" >> build.sh
509           if [[ "${BUILDMAN}" != "" ]]; then
510               ret=0;
511               tools/buildman/buildman -o /tmp -PEWM ${BUILDMAN} ${OVERRIDE} || ret=$?;
512               if [[ $ret -ne 0 ]]; then
513                   tools/buildman/buildman -o /tmp -seP ${BUILDMAN};
514                   exit $ret;
515               fi;
516           fi
517           EOF
518           cat build.sh
519           docker run -v $PWD:$(work_dir) $(ci_runner_image) /bin/bash $(work_dir)/build.sh