Merge tag 'u-boot-clk-23Oct2019' of https://gitlab.denx.de/u-boot/custodians/u-boot-clk
[oweals/u-boot.git] / .azure-pipelines.yml
1 variables:
2   windows_vm: vs2015-win2012r2
3   ubuntu_vm: ubuntu-18.04
4   ci_runner_image: trini/u-boot-gitlab-ci-runner:bionic-20190912.1-03Oct2019
5   # Add '-u 0' options for Azure pipelines, otherwise we get "permission
6   # denied" error when it tries to "useradd -m -u 1001 vsts_azpcontainer",
7   # since our $(ci_runner_image) user is not root.
8   container_option: -u 0
9   work_dir: /u
10
11 jobs:
12   - job: tools_only_windows
13     displayName: 'Ensure host tools build for Windows'
14     pool:
15       vmImage: $(windows_vm)
16     strategy:
17       matrix:
18         i686:
19           MSYS_DIR: msys32
20           BASE_REPO: msys2-ci-base-i686
21         x86_64:
22           MSYS_DIR: msys64
23           BASE_REPO: msys2-ci-base
24     steps:
25       - script: |
26           git clone https://github.com/msys2/$(BASE_REPO).git %CD:~0,2%\$(MSYS_DIR)
27         displayName: 'Install MSYS2'
28       - script: |
29           set PATH=%CD:~0,2%\$(MSYS_DIR)\usr\bin;C:\Windows\system32;C:\Windows;C:\Windows\System32\Wbem
30           %CD:~0,2%\$(MSYS_DIR)\usr\bin\pacman --noconfirm -Syyuu
31         displayName: 'Update MSYS2'
32       - script: |
33           set PATH=%CD:~0,2%\$(MSYS_DIR)\usr\bin;C:\Windows\system32;C:\Windows;C:\Windows\System32\Wbem
34           %CD:~0,2%\$(MSYS_DIR)\usr\bin\pacman --noconfirm --needed -S make gcc bison diffutils openssl-devel
35         displayName: 'Install Toolchain'
36       - script: |
37           set PATH=C:\Windows\system32;C:\Windows;C:\Windows\System32\Wbem
38           echo make tools-only_defconfig tools-only NO_SDL=1 > build-tools.sh
39           %CD:~0,2%\$(MSYS_DIR)\usr\bin\bash -lc "bash build-tools.sh"
40         displayName: 'Build Host Tools'
41         env:
42           # Tell MSYS2 we need a POSIX emulation layer
43           MSYSTEM: MSYS
44           # Tell MSYS2 not to ‘cd’ our startup directory to HOME
45           CHERE_INVOKING: yes
46
47   - job: cppcheck
48     displayName: 'Static code analysis with cppcheck'
49     pool:
50       vmImage: $(ubuntu_vm)
51     container:
52       image: $(ci_runner_image)
53       options: $(container_option)
54     steps:
55       - script: cppcheck --force --quiet --inline-suppr .
56
57   - job: todo
58     displayName: 'Search for TODO within source tree'
59     pool:
60       vmImage: $(ubuntu_vm)
61     container:
62       image: $(ci_runner_image)
63       options: $(container_option)
64     steps:
65       - script: grep -r TODO .
66       - script: grep -r FIXME .
67       - script: grep -r HACK . | grep -v HACKKIT
68
69   - job: sloccount
70     displayName: 'Some statistics about the code base'
71     pool:
72       vmImage: $(ubuntu_vm)
73     container:
74       image: $(ci_runner_image)
75       options: $(container_option)
76     steps:
77       - script: sloccount .
78
79   - job: maintainers
80     displayName: 'Ensure all configs have MAINTAINERS entries'
81     pool:
82       vmImage: $(ubuntu_vm)
83     container:
84       image: $(ci_runner_image)
85       options: $(container_option)
86     steps:
87       - script: |
88           if [ `./tools/genboardscfg.py -f 2>&1 | wc -l` -ne 0 ]; then exit 1; fi
89
90   - job: tools_only
91     displayName: 'Ensure host tools build'
92     pool:
93       vmImage: $(ubuntu_vm)
94     container:
95       image: $(ci_runner_image)
96       options: $(container_option)
97     steps:
98       - script: |
99           make tools-only_config tools-only -j$(nproc)
100
101   - job: envtools
102     displayName: 'Ensure env tools build'
103     pool:
104       vmImage: $(ubuntu_vm)
105     container:
106       image: $(ci_runner_image)
107       options: $(container_option)
108     steps:
109       - script: |
110           make tools-only_config envtools -j$(nproc)
111
112   - job: utils
113     displayName: 'Run binman, buildman, dtoc and patman testsuites'
114     pool:
115       vmImage: $(ubuntu_vm)
116     steps:
117       - script: |
118           cat << EOF > build.sh
119           set -ex
120           cd ${WORK_DIR}
121           EOF
122           cat << "EOF" >> build.sh
123           git config --global user.name "Azure Pipelines"
124           git config --global user.email bmeng.cn@gmail.com
125           export USER=azure
126           virtualenv /tmp/venv
127           . /tmp/venv/bin/activate
128           pip install pyelftools
129           export UBOOT_TRAVIS_BUILD_DIR=/tmp/.bm-work/sandbox_spl
130           export PYTHONPATH=${UBOOT_TRAVIS_BUILD_DIR}/scripts/dtc/pylibfdt
131           export PATH=${UBOOT_TRAVIS_BUILD_DIR}/scripts/dtc:${PATH}
132           ./tools/buildman/buildman -o /tmp -P sandbox_spl
133           ./tools/binman/binman --toolpath ${UBOOT_TRAVIS_BUILD_DIR}/tools test
134           ./tools/buildman/buildman -t
135           ./tools/dtoc/dtoc -t
136           ./tools/patman/patman --test
137           EOF
138           cat build.sh
139           # We cannot use "container" like other jobs above, as buildman
140           # seems to hang forever with pre-configured "container" environment
141           docker run -v $PWD:$(work_dir) $(ci_runner_image) /bin/bash $(work_dir)/build.sh
142
143   - job: test_py
144     displayName: 'test.py'
145     pool:
146       vmImage: $(ubuntu_vm)
147     strategy:
148       matrix:
149         sandbox:
150           TEST_PY_BD: "sandbox"
151           BUILDMAN: "^sandbox$"
152         sandbox_spl:
153           TEST_PY_BD: "sandbox_spl"
154           TEST_PY_TEST_SPEC: "test_ofplatdata"
155           BUILDMAN: "^sandbox_spl$"
156         sandbox_flattree:
157           TEST_PY_BD: "sandbox_flattree"
158           BUILDMAN: "^sandbox_flattree$"
159         evb_ast2500:
160           TEST_PY_BD: "evb-ast2500"
161           TEST_PY_ID: "--id qemu"
162           BUILDMAN: "^evb-ast2500$"
163         vexpress_ca15_tc2:
164           TEST_PY_BD: "vexpress_ca15_tc2"
165           TEST_PY_ID: "--id qemu"
166           BUILDMAN: "^vexpress_ca15_tc2$"
167         vexpress_ca9x4:
168           TEST_PY_BD: "vexpress_ca9x4"
169           TEST_PY_ID: "--id qemu"
170           BUILDMAN: "^vexpress_ca9x4$"
171         integratorcp_cm926ejs:
172           TEST_PY_BD: "integratorcp_cm926ejs"
173           TEST_PY_ID: "--id qemu"
174           TEST_PY_TEST_SPEC: "not sleep"
175           BUILDMAN: "^integratorcp_cm926ejs$"
176         qemu_arm:
177           TEST_PY_BD: "qemu_arm"
178           TEST_PY_TEST_SPEC: "not sleep"
179           BUILDMAN: "^qemu_arm$"
180         qemu_arm64:
181           TEST_PY_BD: "qemu_arm64"
182           TEST_PY_TEST_SPEC: "not sleep"
183           BUILDMAN: "^qemu_arm64$"
184         qemu_mips:
185           TEST_PY_BD: "qemu_mips"
186           TEST_PY_TEST_SPEC: "not sleep"
187           BUILDMAN: "^qemu_mips$"
188         qemu_mipsel:
189           TEST_PY_BD: "qemu_mipsel"
190           TEST_PY_TEST_SPEC: "not sleep"
191           BUILDMAN: "^qemu_mipsel$"
192         qemu_mips64:
193           TEST_PY_BD: "qemu_mips64"
194           TEST_PY_TEST_SPEC: "not sleep"
195           BUILDMAN: "^qemu_mips64$"
196         qemu_mips64el:
197           TEST_PY_BD: "qemu_mips64el"
198           TEST_PY_TEST_SPEC: "not sleep"
199           BUILDMAN: "^qemu_mips64el$"
200         qemu_ppce500:
201           TEST_PY_BD: "qemu-ppce500"
202           TEST_PY_TEST_SPEC: "not sleep"
203           BUILDMAN: "^qemu-ppce500$"
204         qemu_riscv64:
205           TEST_PY_BD: "qemu-riscv64"
206           TEST_PY_TEST_SPEC: "not sleep"
207           BUILDMAN: "^qemu-riscv64$"
208         qemu_x86:
209           TEST_PY_BD: "qemu-x86"
210           TEST_PY_TEST_SPEC: "not sleep"
211           BUILDMAN: "^qemu-x86$"
212         qemu_x86_64:
213           TEST_PY_BD: "qemu-x86_64"
214           TEST_PY_TEST_SPEC: "not sleep"
215           BUILDMAN: "^qemu-x86_64$"
216         zynq_zc702:
217           TEST_PY_BD: "zynq_zc702"
218           TEST_PY_ID: "--id qemu"
219           TEST_PY_TEST_SPEC: "not sleep"
220           BUILDMAN: "^zynq_zc702$"
221         xilinx_versal_virt:
222           TEST_PY_BD: "xilinx_versal_virt"
223           TEST_PY_ID: "--id qemu"
224           TEST_PY_TEST_SPEC: "not sleep"
225           BUILDMAN: "^xilinx_versal_virt$"
226         xtfpga:
227           TEST_PY_BD: "xtfpga"
228           TEST_PY_ID: "--id qemu"
229           TEST_PY_TEST_SPEC: "not sleep"
230           BUILDMAN: "^xtfpga$"
231     steps:
232       - script: |
233           cat << EOF > test.sh
234           set -ex
235           # make environment variables available as tests are running inside a container
236           export WORK_DIR="${WORK_DIR}"
237           export TEST_PY_BD="${TEST_PY_BD}"
238           export TEST_PY_ID="${TEST_PY_ID}"
239           export TEST_PY_TEST_SPEC="${TEST_PY_TEST_SPEC}"
240           export BUILDMAN="${BUILDMAN}"
241           EOF
242           cat << "EOF" >> test.sh
243           # the below corresponds to .gitlab-ci.yml "before_script"
244           cd ${WORK_DIR}
245           git clone --depth=1 git://github.com/swarren/uboot-test-hooks.git /tmp/uboot-test-hooks
246           ln -s travis-ci /tmp/uboot-test-hooks/bin/`hostname`
247           ln -s travis-ci /tmp/uboot-test-hooks/py/`hostname`
248           virtualenv /tmp/venv
249           . /tmp/venv/bin/activate
250           pip install pytest==2.8.7
251           pip install python-subunit
252           pip install coverage
253           grub-mkimage --prefix=\"\" -o ~/grub_x86.efi -O i386-efi normal  echo lsefimmap lsefi lsefisystab efinet tftp minicmd
254           grub-mkimage --prefix=\"\" -o ~/grub_x64.efi -O x86_64-efi normal  echo lsefimmap lsefi lsefisystab efinet tftp minicmd
255           mkdir ~/grub2-arm
256           cd ~/grub2-arm; wget -O - http://download.opensuse.org/ports/armv7hl/distribution/leap/42.2/repo/oss/suse/armv7hl/grub2-arm-efi-2.02~beta2-87.1.armv7hl.rpm | rpm2cpio | cpio -di
257           mkdir ~/grub2-arm64
258           cd ~/grub2-arm64; wget -O - http://download.opensuse.org/ports/aarch64/distribution/leap/42.2/repo/oss/suse/aarch64/grub2-arm64-efi-2.02~beta2-87.1.aarch64.rpm | rpm2cpio | cpio -di
259           # the below corresponds to .gitlab-ci.yml "script"
260           cd ${WORK_DIR}
261           if [[ "${BUILDMAN}" != "" ]]; then
262               ret=0;
263               tools/buildman/buildman -o /tmp -P -E ${BUILDMAN} ${OVERRIDE} || ret=$?;
264               if [[ $ret -ne 0 && $ret -ne 129 ]]; then
265                   tools/buildman/buildman -o /tmp -sdeP ${BUILDMAN};
266                   exit $ret;
267               fi;
268           fi
269           export UBOOT_TRAVIS_BUILD_DIR=/tmp/.bm-work/${TEST_PY_BD};
270           export PATH=/opt/qemu/bin:/tmp/uboot-test-hooks/bin:/usr/bin:/bin;
271           export PYTHONPATH=/tmp/uboot-test-hooks/py/travis-ci;
272           if [[ "${TEST_PY_BD}" != "" ]]; then
273               ./test/py/test.py --bd ${TEST_PY_BD} ${TEST_PY_ID} -k "${TEST_PY_TEST_SPEC:-not a_test_which_does_not_exist}" --build-dir "$UBOOT_TRAVIS_BUILD_DIR";
274               ret=$?;
275               if [[ $ret -ne 0 ]]; then
276                   exit $ret;
277               fi;
278           fi
279           # the below corresponds to .gitlab-ci.yml "after_script"
280           rm -rf ~/grub2* /tmp/uboot-test-hooks /tmp/venv
281           EOF
282           cat test.sh
283           # make current directory writeable to uboot user inside the container
284           # as sandbox testing need create files like spi flash images, etc.
285           # (TODO: clean up this in the future)
286           chmod 777 .
287           docker run -v $PWD:$(work_dir) $(ci_runner_image) /bin/bash $(work_dir)/test.sh
288
289   - job: build_the_world
290     displayName: 'Build the World'
291     pool:
292       vmImage: $(ubuntu_vm)
293     strategy:
294       # Use almost the same target division in .travis.yml, only merged
295       # 4 small build jobs (arc/microblaze/nds32/xtensa) into one.
296       matrix:
297         arc_microblaze_nds32_xtensa:
298           BUILDMAN: "arc microblaze nds32 xtensa"
299         arm11_arm7_arm920t_arm946es:
300           BUILDMAN: "arm11 arm7 arm920t arm946es"
301         arm926ejs:
302           BUILDMAN: "arm926ejs -x freescale,siemens,at91,kirkwood,spear,omap"
303         at91_non_armv7:
304           BUILDMAN: "at91 -x armv7"
305         at91_non_arm926ejs:
306           BUILDMAN: "at91 -x arm926ejs"
307         boundary_engicam_toradex:
308           BUILDMAN: "boundary engicam toradex"
309         arm_bcm:
310           BUILDMAN: "bcm -x mips"
311         nxp_arm32:
312           BUILDMAN: "freescale -x powerpc,m68k,aarch64"
313         nxp_aarch64_ls101x:
314           BUILDMAN: "freescale&aarch64&ls101"
315         nxp_aarch64_ls102x:
316           BUILDMAN: "freescale&aarch64&ls102"
317         nxp_aarch64_ls104x:
318           BUILDMAN: "freescale&aarch64&ls104"
319         nxp_aarch64_ls108x:
320           BUILDMAN: "freescale&aarch64&ls108"
321         nxp_aarch64_ls20xx:
322           BUILDMAN: "freescale&aarch64&ls20"
323         nxp_aarch64_lx216x:
324           BUILDMAN: "freescale&aarch64&lx216"
325         imx6:
326           BUILDMAN: "mx6 -x boundary,engicam,freescale,technexion,toradex"
327         imx:
328           BUILDMAN: "mx -x mx6,freescale,technexion,toradex"
329         keystone2_keystone3:
330           BUILDMAN: "k2 k3"
331         samsung_socfpga:
332           BUILDMAN: "samsung socfpga"
333         spear:
334           BUILDMAN: "spear"
335         sun4i:
336           BUILDMAN: "sun4i"
337         sun5i:
338           BUILDMAN: "sun5i"
339         sun6i:
340           BUILDMAN: "sun6i"
341         sun7i:
342           BUILDMAN: "sun7i"
343         sun8i_32bit:
344           BUILDMAN: "sun8i&armv7"
345         sun8i_64bit:
346           BUILDMAN: "sun8i&aarch64"
347         sun9i:
348           BUILDMAN: "sun9i"
349         sun50i:
350           BUILDMAN: "sun50i"
351         arm_catch_all:
352           BUILDMAN: "arm -x arm11,arm7,arm9,aarch64,at91,bcm,freescale,kirkwood,mvebu,siemens,tegra,uniphier,mx,samsung,sunxi,am33xx,omap,rockchip,toradex,socfpga,k2,k3,zynq"
353         sandbox_x86:
354           BUILDMAN: "sandbox x86"
355         technexion:
356           BUILDMAN: "technexion"
357         kirkwood:
358           BUILDMAN: "kirkwood"
359         mvebu:
360           BUILDMAN: "mvebu"
361         m68k:
362           BUILDMAN: "m68k"
363         mips:
364           BUILDMAN: "mips"
365         non_fsl_ppc:
366           BUILDMAN: "powerpc -x freescale"
367         mpc85xx_freescale:
368           BUILDMAN: "mpc85xx&freescale -x t208xrdb -x t4qds -x t102* -x p1_p2_rdb_pc -x p1010rdb -x corenet_ds -x b4860qds -x bsc91*"
369         t208xrdb_corenet_ds:
370           BUILDMAN: "t208xrdb corenet_ds"
371         fsl_ppc:
372           BUILDMAN: "t4qds b4860qds mpc83xx&freescale mpc86xx&freescale"
373         t102x:
374           BUILDMAN: "t102*"
375         p1_p2_rdb_pc:
376           BUILDMAN: "p1_p2_rdb_pc"
377         p1010rdb_bsc91:
378           BUILDMAN: "p1010rdb bsc91"
379         siemens:
380           BUILDMAN: "siemens"
381         tegra:
382           BUILDMAN: "tegra -x toradex"
383         am33xx_no_siemens:
384           BUILDMAN: "am33xx -x siemens"
385         omap:
386           BUILDMAN: "omap"
387         uniphier:
388           BUILDMAN: "uniphier"
389         aarch64_catch_all:
390           BUILDMAN: "aarch64 -x bcm,k3,tegra,ls1,ls2,mvebu,uniphier,sunxi,samsung,rockchip,versal,zynq"
391         rockchip:
392           BUILDMAN: "rockchip"
393         sh:
394           BUILDMAN: "sh -x arm"
395         zynq:
396           BUILDMAN: "zynq&armv7"
397         zynqmp_versal:
398           BUILDMAN: "versal|zynqmp&aarch64"
399         riscv:
400           BUILDMAN: "riscv"
401     steps:
402       - script: |
403           cat << EOF > build.sh
404           set -ex
405           cd ${WORK_DIR}
406           # make environment variables available as tests are running inside a container
407           export BUILDMAN="${BUILDMAN}"
408           EOF
409           cat << "EOF" >> build.sh
410           if [[ "${BUILDMAN}" != "" ]]; then
411               ret=0;
412               tools/buildman/buildman -o /tmp -P -E ${BUILDMAN} ${OVERRIDE} || ret=$?;
413               if [[ $ret -ne 0 && $ret -ne 129 ]]; then
414                   tools/buildman/buildman -o /tmp -sdeP ${BUILDMAN};
415                   exit $ret;
416               fi;
417           fi
418           EOF
419           cat build.sh
420           docker run -v $PWD:$(work_dir) $(ci_runner_image) /bin/bash $(work_dir)/build.sh