1 # Licensed to the Apache Software Foundation (ASF) under one
2 # or more contributor license agreements. See the NOTICE file
3 # distributed with this work for additional information
4 # regarding copyright ownership. The ASF licenses this file
5 # to you under the Apache License, Version 2.0 (the
6 # "License"); you may not use this file except in compliance
7 # with the License. You may obtain a copy of the License at
9 # http://www.apache.org/licenses/LICENSE-2.0
11 # Unless required by applicable law or agreed to in writing,
12 # software distributed under the License is distributed on an
13 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14 # KIND, either express or implied. See the License for the
15 # specific language governing permissions and limitations
21 # The docker compose file is parametrized using environment variables, the
22 # defaults are set in .env file.
25 # $ ARCH=arm64v8 docker-compose build ubuntu-cpp
26 # $ ARCH=arm64v8 docker-compose run ubuntu-cpp
32 # In order to enable coredumps for the C++ tests run by CTest either with
33 # command `make unittest` or `ctest --output-on-failure` the correct coredump
34 # patterns must be set.
35 # The kernel settings are coming from the host, so while it can be enabled from
36 # a running container using --priviled option the change will affect all other
37 # containers, so prefer setting it explicitly, directly on the host.
38 # WARNING: setting this will affect the host machine.
41 # $ sudo sysctl -w kernel.core_pattern=core.%e.%p
43 # macOS host running Docker for Mac (won't persist between restarts):
44 # $ screen ~/Library/Containers/com.docker.docker/Data/vms/0/tty
45 # # echo "core.%e.%p" > /proc/sys/kernel/core_pattern
47 # The setup attempts to generate coredumps by default, but the correct paths
48 # above must be set. In order to disable the coredump generation set
49 # ULIMIT_CORE environment variable to 0 before running docker-compose
50 # (or by setting it in .env file):
52 # ULIMIT_CORE=0 docker-compose run --rm conda-cpp
54 # See more in cpp/build-support/run-test.sh::print_coredumps
59 CCACHE_COMPILERCHECK: content
61 CCACHE_COMPRESSLEVEL: 6
65 # CPU/memory limit presets to pass to Docker.
67 # Usage: archery docker run --resource-limit=github <image>
69 # Note that exporting ARCHERY_DOCKER_BIN="sudo docker" is likely required,
70 # unless Docker is configured with cgroups v2 (else Docker will silently
73 # These values emulate GitHub Actions:
74 # https://docs.github.com/en/actions/using-github-hosted-runners/about-github-hosted-runners
76 # Note we use cpuset and not cpus since Ninja only detects and limits
77 # parallelism given the former
86 # This section is used by the archery tool to enable building nested images,
87 # so it is enough to call:
88 # archery run debian-ruby
89 # instead of a seguence of docker-compose commands:
90 # docker-compose build debian-cpp
91 # docker-compose build debian-c-glib
92 # docker-compose build debian-ruby
93 # docker-compose run --rm debian-ruby
95 # Each node must be either a string scalar of a list containing the
96 # descendant images if any. Archery checks that all node has a corresponding
97 # service entry, so any new image/service must be listed here.
100 - conda-cpp-hiveserver2
103 - conda-python-pandas
107 - conda-python-turbodbc
108 - conda-python-kartothek
117 - debian-go-cgo-python
129 - ubuntu-python-sdist-test
136 - ubuntu-cpp-sanitizer
137 - ubuntu-cpp-thread-sanitizer
146 - python-wheel-manylinux-2010
147 - python-wheel-manylinux-2014:
148 - java-jni-manylinux-2014
149 - python-wheel-manylinux-test-imports
150 - python-wheel-manylinux-test-unittests
151 - python-wheel-windows-vs2017
152 - python-wheel-windows-test
156 name: ${ARCH}-conda-ccache
158 name: ${ARCH}-debian-${DEBIAN}-ccache
160 name: ${ARCH}-ubuntu-${UBUNTU}-ccache
162 name: ${ARCH}-fedora-${FEDORA}-ccache
164 name: ${ARCH}-debian-${DEBIAN}-rust
167 python-wheel-manylinux2010-ccache:
168 name: python-wheel-manylinux2010-ccache
169 python-wheel-manylinux2014-ccache:
170 name: python-wheel-manylinux2014-ccache
171 python-wheel-windows-clcache:
172 name: python-wheel-windows-clcache
176 ################################# C++ #######################################
178 # docker-compose run -e ARROW_BUILD_TYPE=release conda-cpp|debian-cpp|...
180 # docker-compose run -e ARROW_BUILD_STATIC=OFF conda-cpp|debian-cpp|...
182 # docker-compose run \
183 # -e ARROW_BUILD_SHARED=OFF \
184 # -e ARROW_TEST_LINKAGE=static \
185 # conda-cpp|debian-cpp|...
188 # Base image for conda builds.
191 # docker-compose build con
192 # docker-compose run --rm conda
194 # ARCH: amd64, arm32v7
195 image: ${REPO}:${ARCH}-conda
198 dockerfile: ci/docker/conda.dockerfile
200 - ${REPO}:${ARCH}-conda
208 # C++ build in conda environment, including the doxygen docs.
211 # docker-compose build conda
212 # docker-compose build conda-cpp
213 # docker-compose run --rm conda-cpp
215 # ARCH: amd64, arm32v7
216 image: ${REPO}:${ARCH}-conda-cpp
219 dockerfile: ci/docker/conda-cpp.dockerfile
221 - ${REPO}:${ARCH}-conda-cpp
225 shm_size: &shm-size 2G
230 ARROW_BUILD_BENCHMARKS: "ON"
231 ARROW_BUILD_EXAMPLES: "ON"
232 ARROW_ENABLE_TIMING_TESTS: # inherit
234 ARROW_USE_LD_GOLD: "ON"
235 ARROW_USE_PRECOMPILED_HEADERS: "ON"
236 volumes: &conda-volumes
238 - ${DOCKER_VOLUME_PREFIX}conda-ccache:/ccache:delegated
239 command: &cpp-conda-command
240 ["/arrow/ci/scripts/cpp_build.sh /arrow /build true &&
241 /arrow/ci/scripts/cpp_test.sh /arrow /build"]
245 # docker-compose build conda
246 # docker-compose build conda-cpp
247 # docker-compose run --rm conda-cpp-valgrind
249 # ARCH: amd64, arm32v7
250 image: ${REPO}:${ARCH}-conda-cpp
253 dockerfile: ci/docker/conda-cpp.dockerfile
255 - ${REPO}:${ARCH}-conda-cpp
263 ARROW_CXXFLAGS: "-Og" # Shrink test runtime by enabling minimal optimizations
264 ARROW_ENABLE_TIMING_TESTS: # inherit
267 ARROW_JEMALLOC: "OFF"
268 ARROW_RUNTIME_SIMD_LEVEL: "AVX2" # AVX512 not supported by Valgrind (ARROW-9851)
270 ARROW_TEST_MEMCHECK: "ON"
271 ARROW_USE_LD_GOLD: "ON"
272 BUILD_WARNING_LEVEL: "PRODUCTION"
273 volumes: *conda-volumes
274 command: *cpp-conda-command
278 # docker-compose build debian-cpp
279 # docker-compose run --rm debian-cpp
281 # ARCH: amd64, arm64v8, ...
283 image: ${REPO}:${ARCH}-debian-${DEBIAN}-cpp
286 dockerfile: ci/docker/debian-${DEBIAN}-cpp.dockerfile
288 - ${REPO}:${ARCH}-debian-${DEBIAN}-cpp
296 ARROW_ENABLE_TIMING_TESTS: # inherit
298 volumes: &debian-volumes
300 - ${DOCKER_VOLUME_PREFIX}debian-ccache:/ccache:delegated
301 command: &cpp-command >
303 /arrow/ci/scripts/cpp_build.sh /arrow /build &&
304 /arrow/ci/scripts/cpp_test.sh /arrow /build"
308 # docker-compose build ubuntu-cpp
309 # docker-compose run --rm ubuntu-cpp
311 # ARCH: amd64, arm64v8, s390x, ...
312 # UBUNTU: 18.04, 20.04
313 image: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-cpp
316 dockerfile: ci/docker/ubuntu-${UBUNTU}-cpp.dockerfile
318 - ${REPO}:${ARCH}-ubuntu-${UBUNTU}-cpp
321 base: "${ARCH}/ubuntu:${UBUNTU}"
322 clang_tools: ${CLANG_TOOLS}
324 gcc_version: ${GCC_VERSION}
329 ARROW_ENABLE_TIMING_TESTS: # inherit
331 volumes: &ubuntu-volumes
333 - ${DOCKER_VOLUME_PREFIX}ubuntu-ccache:/ccache:delegated
334 command: *cpp-command
337 # Arrow build with BUNDLED dependencies
338 image: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-cpp-minimal
341 dockerfile: ci/docker/ubuntu-${UBUNTU}-cpp-minimal.dockerfile
343 - ${REPO}:${ARCH}-ubuntu-${UBUNTU}-cpp-minimal
346 base: "${ARCH}/ubuntu:${UBUNTU}"
351 ARROW_DEPENDENCY_SOURCE: BUNDLED
352 CMAKE_GENERATOR: "Unix Makefiles"
353 volumes: *ubuntu-volumes
354 command: *cpp-command
358 # docker-compose build cuda-cpp
359 # docker-compose run --rm cuda-cpp
360 # Also need to edit the host docker configuration as follows:
361 # https://github.com/docker/compose/issues/6691#issuecomment-561504928
364 # CUDA: 9.1, 10.0, 10.1
365 image: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-cuda-${CUDA}-cpp
368 dockerfile: ci/docker/ubuntu-${UBUNTU}-cpp.dockerfile
370 - ${REPO}:${ARCH}-ubuntu-${UBUNTU}-cuda-${CUDA}-cpp
373 base: nvidia/cuda:${CUDA}-devel-ubuntu${UBUNTU}
374 clang_tools: ${CLANG_TOOLS}
381 volumes: *ubuntu-volumes
382 command: *cpp-command
384 ubuntu-cpp-sanitizer:
386 # docker-compose build ubuntu-cpp-sanitizer
387 # docker-compose run --rm ubuntu-cpp-sanitizer
389 # ARCH: amd64, arm64v8, ...
390 # UBUNTU: 18.04, 20.04
391 image: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-cpp
397 dockerfile: ci/docker/ubuntu-${UBUNTU}-cpp.dockerfile
399 - ${REPO}:${ARCH}-ubuntu-${UBUNTU}-cpp
402 clang_tools: ${CLANG_TOOLS}
405 volumes: *ubuntu-volumes
408 CC: clang-${CLANG_TOOLS}
409 CXX: clang++-${CLANG_TOOLS}
410 ARROW_BUILD_STATIC: "OFF"
411 ARROW_ENABLE_TIMING_TESTS: # inherit
412 ARROW_FUZZING: "ON" # Check fuzz regressions
413 ARROW_JEMALLOC: "OFF"
417 ARROW_USE_UBSAN: "ON"
418 # utf8proc 2.1.0 in Ubuntu Bionic has test failures
419 utf8proc_SOURCE: "BUNDLED"
420 command: *cpp-command
422 ubuntu-cpp-thread-sanitizer:
424 # docker-compose build ubuntu-cpp-thread-sanitizer
425 # docker-compose run --rm ubuntu-cpp-thread-sanitizer
427 # ARCH: amd64, arm64v8, ...
428 # UBUNTU: 18.04, 20.04
429 image: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-cpp
432 dockerfile: ci/docker/ubuntu-${UBUNTU}-cpp.dockerfile
434 - ${REPO}:${ARCH}-ubuntu-${UBUNTU}-cpp
437 clang_tools: ${CLANG_TOOLS}
440 volumes: *ubuntu-volumes
443 CC: clang-${CLANG_TOOLS}
444 CXX: clang++-${CLANG_TOOLS}
445 ARROW_BUILD_STATIC: "OFF"
446 ARROW_ENABLE_TIMING_TESTS: # inherit
448 ARROW_JEMALLOC: "OFF"
452 command: *cpp-command
456 # docker-compose build fedora-cpp
457 # docker-compose run --rm fedora-cpp
459 # ARCH: amd64, arm64v8, ...
461 image: ${REPO}:${ARCH}-fedora-${FEDORA}-cpp
464 dockerfile: ci/docker/fedora-${FEDORA}-cpp.dockerfile
466 - ${REPO}:${ARCH}-fedora-${FEDORA}-cpp
474 ARROW_ENABLE_TIMING_TESTS: # inherit
476 Protobuf_SOURCE: "BUNDLED" # Need Protobuf >= 3.15
477 volumes: &fedora-volumes
479 - ${DOCKER_VOLUME_PREFIX}fedora-ccache:/ccache:delegated
480 command: *cpp-command
482 ############################### C GLib ######################################
486 # docker-compose build debian-cpp
487 # docker-compose build debian-c-glib
488 # docker-compose run --rm debian-c-glib
490 # ARCH: amd64, arm64v8, ...
492 image: ${REPO}:${ARCH}-debian-${DEBIAN}-c-glib
495 dockerfile: ci/docker/linux-apt-c-glib.dockerfile
497 - ${REPO}:${ARCH}-debian-${DEBIAN}-c-glib
499 base: ${REPO}:${ARCH}-debian-${DEBIAN}-cpp
504 ARROW_GLIB_GTK_DOC: "true"
505 volumes: *debian-volumes
506 command: &c-glib-command >
508 /arrow/ci/scripts/cpp_build.sh /arrow /build &&
509 /arrow/ci/scripts/c_glib_build.sh /arrow /build &&
510 /arrow/ci/scripts/c_glib_test.sh /arrow /build"
514 # docker-compose build ubuntu-cpp
515 # docker-compose build ubuntu-c-glib
516 # docker-compose run --rm ubuntu-c-glib
518 # ARCH: amd64, arm64v8, ...
519 # UBUNTU: 18.04, 20.04
520 image: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-c-glib
523 dockerfile: ci/docker/linux-apt-c-glib.dockerfile
525 - ${REPO}:${ARCH}-ubuntu-${UBUNTU}-c-glib
527 base: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-cpp
532 ARROW_GLIB_GTK_DOC: "true"
533 volumes: *ubuntu-volumes
534 command: *c-glib-command
536 ############################### Ruby ########################################
537 # Until Ruby is the only dependent implementation on top of C Glib we can
538 # test C Glib and Ruby in one pass. This is an optimization to avoid
539 # redundant (one for C GLib and one for Ruby doing the same work twice)
540 # builds on CI services.
544 # docker-compose build debian-cpp
545 # docker-compose build debian-c-glib
546 # docker-compose build debian-ruby
547 # docker-compose run --rm debian-ruby
549 # ARCH: amd64, arm64v8, ...
551 image: ${REPO}:${ARCH}-debian-${DEBIAN}-ruby
554 dockerfile: ci/docker/linux-apt-ruby.dockerfile
556 - ${REPO}:${ARCH}-debian-${DEBIAN}-ruby
558 base: ${REPO}:${ARCH}-debian-${DEBIAN}-c-glib
563 volumes: *debian-volumes
564 command: &ruby-command >
566 /arrow/ci/scripts/cpp_build.sh /arrow /build &&
567 /arrow/ci/scripts/c_glib_build.sh /arrow /build &&
568 /arrow/ci/scripts/c_glib_test.sh /arrow /build &&
569 /arrow/ci/scripts/ruby_test.sh /arrow /build"
573 # docker-compose build ubuntu-cpp
574 # docker-compose build ubuntu-c-glib
575 # docker-compose build ubuntu-ruby
576 # docker-compose run --rm ubuntu-ruby
578 # ARCH: amd64, arm64v8, ...
579 # UBUNTU: 18.04, 20.04
580 image: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-ruby
583 dockerfile: ci/docker/linux-apt-ruby.dockerfile
585 - ${REPO}:${ARCH}-ubuntu-${UBUNTU}-ruby
587 base: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-c-glib
592 volumes: *ubuntu-volumes
593 command: *ruby-command
595 ############################### Python ######################################
599 # docker-compose build conda
600 # docker-compose build conda-cpp
601 # docker-compose build conda-python
602 # docker-compose run --rm conda-python
604 # ARCH: amd64, arm32v7
605 # PYTHON: 3.6, 3.7, 3.8, 3.9
606 image: ${REPO}:${ARCH}-conda-python-${PYTHON}
609 dockerfile: ci/docker/conda-python.dockerfile
611 - ${REPO}:${ARCH}-conda-python-${PYTHON}
619 volumes: *conda-volumes
620 command: &python-conda-command
621 ["/arrow/ci/scripts/cpp_build.sh /arrow /build &&
622 /arrow/ci/scripts/python_build.sh /arrow /build &&
623 /arrow/ci/scripts/python_test.sh /arrow"]
627 # docker-compose build cuda-cpp
628 # docker-compose build cuda-python
629 # docker-compose run --rm cuda-python
632 # CUDA: 8.0, 10.0, ...
633 image: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-cuda-${CUDA}-python-3
636 dockerfile: ci/docker/linux-apt-python-3.dockerfile
638 - ${REPO}:${ARCH}-ubuntu-${UBUNTU}-cuda-${CUDA}-python-3
640 base: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-cuda-${CUDA}-cpp
645 volumes: *ubuntu-volumes
646 command: &python-command >
648 /arrow/ci/scripts/cpp_build.sh /arrow /build &&
649 /arrow/ci/scripts/python_build.sh /arrow /build &&
650 /arrow/ci/scripts/python_test.sh /arrow"
654 # docker-compose build debian-cpp
655 # docker-compose build debian-python
656 # docker-compose run --rm debian-python
658 # ARCH: amd64, arm64v8, ...
660 image: ${REPO}:${ARCH}-debian-${DEBIAN}-python-3
663 dockerfile: ci/docker/linux-apt-python-3.dockerfile
665 - ${REPO}:${ARCH}-debian-${DEBIAN}-python-3
667 base: ${REPO}:${ARCH}-debian-${DEBIAN}-cpp
671 volumes: *debian-volumes
672 command: *python-command
676 # docker-compose build ubuntu-cpp
677 # docker-compose build ubuntu-python
678 # docker-compose run --rm ubuntu-python
680 # ARCH: amd64, arm64v8, ...
681 # UBUNTU: 18.04, 20.04
682 image: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-python-3
685 dockerfile: ci/docker/linux-apt-python-3.dockerfile
687 - ${REPO}:${ARCH}-ubuntu-${UBUNTU}-python-3
689 base: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-cpp
693 volumes: *ubuntu-volumes
694 command: *python-command
698 # docker-compose build fedora-cpp
699 # docker-compose build fedora-python
700 # docker-compose run --rm fedora-python
702 # ARCH: amd64, arm64v8, ...
704 image: ${REPO}:${ARCH}-fedora-${FEDORA}-python-3
707 dockerfile: ci/docker/linux-dnf-python-3.dockerfile
709 - ${REPO}:${ARCH}-fedora-${FEDORA}-python-3
711 base: ${REPO}:${ARCH}-fedora-${FEDORA}-cpp
715 Protobuf_SOURCE: "BUNDLED" # Need Protobuf >= 3.15
716 volumes: *fedora-volumes
717 command: *python-command
719 ############################ Python sdist ###################################
723 # docker-compose build python-sdist
724 # docker-compose run --rm python-sdist
726 # PYARROW_VERSION: The pyarrow version for sdist such as "3.0.0"
727 image: ${REPO}:python-sdist
730 dockerfile: ci/docker/python-sdist.dockerfile
732 - ${REPO}:python-sdist
734 PYARROW_VERSION: ${PYARROW_VERSION:-}
737 command: /arrow/ci/scripts/python_sdist_build.sh /arrow
739 ubuntu-python-sdist-test:
741 # docker-compose build ubuntu-cpp
742 # docker-compose build ubuntu-python-sdist-test
743 # docker-compose run --rm ubuntu-python-sdist-test
745 # ARCH: amd64, arm64v8, ...
746 # PYARROW_VERSION: The test target pyarrow version such as "3.0.0"
747 # UBUNTU: 18.04, 20.04
748 image: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-python-3
751 dockerfile: ci/docker/linux-apt-python-3.dockerfile
753 - ${REPO}:${ARCH}-ubuntu-${UBUNTU}-python-3
755 base: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-cpp
759 PYARROW_VERSION: ${PYARROW_VERSION:-}
760 volumes: *ubuntu-volumes
764 /arrow/ci/scripts/cpp_build.sh /arrow /build &&
765 /arrow/ci/scripts/python_sdist_test.sh /arrow"
767 ############################ Python wheels ##################################
769 # See available versions at:
770 # https://quay.io/repository/pypa/manylinux2010_x86_64?tab=tags
771 # only amd64 arch is supported
772 python-wheel-manylinux-2010:
773 image: ${REPO}:${ARCH}-python-${PYTHON}-wheel-manylinux-2010-vcpkg-${VCPKG}
776 arch_alias: ${ARCH_ALIAS}
777 arch_short_alias: ${ARCH_SHORT_ALIAS}
778 base: quay.io/pypa/manylinux2010_${ARCH_ALIAS}:2021-10-11-14ac00e
782 dockerfile: ci/docker/python-wheel-manylinux-201x.dockerfile
784 - ${REPO}:${ARCH}-python-${PYTHON}-wheel-manylinux-2010-vcpkg-${VCPKG}
787 MANYLINUX_VERSION: 2010
790 - ${DOCKER_VOLUME_PREFIX}python-wheel-manylinux2010-ccache:/ccache:delegated
791 command: /arrow/ci/scripts/python_wheel_manylinux_build.sh
793 # See available versions at:
794 # https://quay.io/repository/pypa/manylinux2014_x86_64?tab=tags
795 python-wheel-manylinux-2014:
796 image: ${REPO}:${ARCH}-python-${PYTHON}-wheel-manylinux-2014-vcpkg-${VCPKG}
799 arch_alias: ${ARCH_ALIAS}
800 arch_short_alias: ${ARCH_SHORT_ALIAS}
801 base: quay.io/pypa/manylinux2014_${ARCH_ALIAS}:2021-10-11-14ac00e
805 dockerfile: ci/docker/python-wheel-manylinux-201x.dockerfile
807 - ${REPO}:${ARCH}-python-${PYTHON}-wheel-manylinux-2014-vcpkg-${VCPKG}
810 MANYLINUX_VERSION: 2014
813 - ${DOCKER_VOLUME_PREFIX}python-wheel-manylinux2014-ccache:/ccache:delegated
814 command: /arrow/ci/scripts/python_wheel_manylinux_build.sh
816 python-wheel-manylinux-test-imports:
817 image: ${ARCH}/python:${PYTHON}
823 CHECK_UNITTESTS: "OFF"
824 command: /arrow/ci/scripts/python_wheel_unix_test.sh /arrow
826 python-wheel-manylinux-test-unittests:
827 image: ${REPO}:${ARCH}-python-${PYTHON}-wheel-manylinux-test
833 dockerfile: ci/docker/python-wheel-manylinux-test.dockerfile
835 - ${REPO}:${ARCH}-python-${PYTHON}-wheel-manylinux-test
841 CHECK_UNITTESTS: "ON"
842 command: /arrow/ci/scripts/python_wheel_unix_test.sh /arrow
844 python-wheel-windows-vs2017:
845 # The windows images must be built locally and pushed to a remote registry:
846 # export REPO=ghcr.io/ursacomputing/arrow
847 # PYTHON=3.6 archery docker build --no-pull --using-docker-cli python-wheel-windows-vs2017
848 # PYTHON=3.7 archery docker build --no-pull --using-docker-cli python-wheel-windows-vs2017
849 # PYTHON=3.8 archery docker build --no-pull --using-docker-cli python-wheel-windows-vs2017
850 # PYTHON=3.9 archery docker build --no-pull --using-docker-cli python-wheel-windows-vs2017
851 # PYTHON=3.6 archery docker push python-wheel-windows-vs2017
852 # PYTHON=3.7 archery docker push python-wheel-windows-vs2017
853 # PYTHON=3.8 archery docker push python-wheel-windows-vs2017
854 # PYTHON=3.9 archery docker push python-wheel-windows-vs2017
855 image: ${REPO}:python-${PYTHON}-wheel-windows-vs2017-vcpkg-${VCPKG}
861 dockerfile: ci/docker/python-wheel-windows-vs2017.dockerfile
862 # This should make the pushed images reusable, but the image gets rebuilt.
863 # Uncomment if no local cache is available.
865 # - mcr.microsoft.com/windows/servercore:ltsc2019
866 # - ${REPO}:wheel-windows-vs2017
868 - "${DOCKER_VOLUME_PREFIX}python-wheel-windows-clcache:C:/clcache"
872 command: arrow\\ci\\scripts\\python_wheel_windows_build.bat
874 python-wheel-windows-test:
875 image: python:${PYTHON}-windowsservercore-1809
880 command: arrow\\ci\\scripts\\python_wheel_windows_test.bat
882 java-jni-manylinux-2014:
883 image: ${REPO}:${ARCH}-java-jni-manylinux-2014-vcpkg-${VCPKG}
886 base: ${REPO}:${ARCH}-python-${PYTHON}-wheel-manylinux-2014-vcpkg-${VCPKG}
889 dockerfile: ci/docker/java-jni-manylinux-201x.dockerfile
891 - ${REPO}:${ARCH}-java-jni-manylinux-2014-vcpkg-${VCPKG}
896 - ${DOCKER_VOLUME_PREFIX}python-wheel-manylinux2014-ccache:/ccache:delegated
898 ["pip install -e /arrow/dev/archery &&
899 /arrow/ci/scripts/java_cdata_build.sh /arrow /java-native-build /arrow/java-dist &&
900 /arrow/ci/scripts/java_jni_manylinux_build.sh /arrow /build /arrow/java-dist"]
902 ############################## Integration #################################
905 # Possible $PANDAS parameters:
906 # - `latest`: latest release
907 # - `master`: git master branch, use `docker-compose run --no-cache`
908 # - `<version>`: specific version available on conda-forge
910 # docker-compose build conda
911 # docker-compose build conda-cpp
912 # docker-compose build conda-python
913 # docker-compose build conda-python-pandas
914 # docker-compose run --rm conda-python-pandas
915 image: ${REPO}:${ARCH}-conda-python-${PYTHON}-pandas-${PANDAS}
918 dockerfile: ci/docker/conda-python-pandas.dockerfile
920 - ${REPO}:${ARCH}-conda-python-${PYTHON}-pandas-${PANDAS}
930 volumes: *conda-volumes
931 command: *python-conda-command
934 # Possible $DASK parameters:
935 # - `latest`: latest release
936 # - `master`: git master branch, use `docker-compose run --no-cache`
937 # - `<version>`: specific version available on conda-forge
939 # docker-compose build conda
940 # docker-compose build conda-cpp
941 # docker-compose build conda-python
942 # docker-compose build conda-python-dask
943 # docker-compose run --rm conda-python-dask
944 image: ${REPO}:${ARCH}-conda-python-${PYTHON}-dask-${DASK}
947 dockerfile: ci/docker/conda-python-dask.dockerfile
949 - ${REPO}:${ARCH}-conda-python-${PYTHON}-dask-${DASK}
958 volumes: *conda-volumes
960 ["/arrow/ci/scripts/cpp_build.sh /arrow /build &&
961 /arrow/ci/scripts/python_build.sh /arrow /build &&
962 /arrow/ci/scripts/integration_dask.sh"]
966 # docker-compose build conda
967 # docker-compose build conda-cpp
968 # docker-compose build conda-python
969 # docker-compose build conda-python-jpype
970 # docker-compose run --rm conda-python-jpype
971 image: ${REPO}:${ARCH}-conda-python-${PYTHON}-jpype
974 dockerfile: ci/docker/conda-python-jpype.dockerfile
976 - ${REPO}:${ARCH}-conda-python-${PYTHON}-jpype
986 volumes: *conda-volumes
988 ["/arrow/ci/scripts/cpp_build.sh /arrow /build &&
989 /arrow/ci/scripts/python_build.sh /arrow /build &&
990 /arrow/ci/scripts/java_build.sh /arrow /build &&
991 /arrow/ci/scripts/python_test.sh /arrow"]
993 conda-python-turbodbc:
994 # Possible $TURBODBC parameters:
995 # - `latest`: latest release
996 # - `master`: git master branch, use `docker-compose run --no-cache`
997 # - `<version>`: specific version available under github releases
999 # docker-compose build conda
1000 # docker-compose build conda-cpp
1001 # docker-compose build conda-python
1002 # docker-compose build conda-python-turbodbc
1003 # docker-compose run --rm conda-python-turbodbc
1004 image: ${REPO}:${ARCH}-conda-python-${PYTHON}-turbodbc-${TURBODBC}
1007 dockerfile: ci/docker/conda-python-turbodbc.dockerfile
1009 - ${REPO}:${ARCH}-conda-python-${PYTHON}-turbodbc-${TURBODBC}
1014 turbodbc: ${TURBODBC}
1018 volumes: *conda-volumes
1020 ["/arrow/ci/scripts/cpp_build.sh /arrow /build &&
1021 /arrow/ci/scripts/python_build.sh /arrow /build &&
1022 /arrow/ci/scripts/integration_turbodbc.sh /turbodbc /build"]
1024 conda-python-kartothek:
1025 # Possible $KARTOTHEK parameters:
1026 # - `latest`: latest release
1027 # - `master`: git master branch, use `docker-compose run --no-cache`
1028 # - `<version>`: specific version available under github releases
1030 # docker-compose build conda
1031 # docker-compose build conda-cpp
1032 # docker-compose build conda-python
1033 # docker-compose build conda-python-kartothek
1034 # docker-compose run --rm conda-python-kartothek
1035 image: ${REPO}:${ARCH}-conda-python-${PYTHON}-kartothek-${KARTOTHEK}
1038 dockerfile: ci/docker/conda-python-kartothek.dockerfile
1040 - ${REPO}:${ARCH}-conda-python-${PYTHON}-kartothek-${KARTOTHEK}
1045 kartothek: ${KARTOTHEK}
1049 volumes: *conda-volumes
1051 ["/arrow/ci/scripts/cpp_build.sh /arrow /build &&
1052 /arrow/ci/scripts/python_build.sh /arrow /build &&
1053 /arrow/ci/scripts/integration_kartothek.sh /kartothek /build"]
1055 ################################## R ########################################
1059 # docker-compose build ubuntu-cpp
1060 # docker-compose build ubuntu-r
1061 # docker-compose run ubuntu-r
1062 image: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-r-${R}
1065 dockerfile: ci/docker/linux-apt-r.dockerfile
1067 - ${REPO}:${ARCH}-ubuntu-${UBUNTU}-r-${R}
1071 base: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-cpp
1072 gcc_version: ${GCC_VERSION}
1077 ARROW_R_CXXFLAGS: '-Werror'
1078 LIBARROW_BUILD: 'false'
1080 ARROW_R_DEV: ${ARROW_R_DEV}
1081 volumes: *ubuntu-volumes
1084 /arrow/ci/scripts/cpp_build.sh /arrow /build &&
1085 /arrow/ci/scripts/python_build.sh /arrow /build &&
1086 /arrow/ci/scripts/r_test.sh /arrow"
1090 ARROW_DEPENDENCY_SOURCE: ''
1094 /arrow/ci/scripts/r_test.sh /arrow"
1097 # This lets you test building/installing the arrow R package
1098 # (including building the C++ library) on any Docker image that contains R
1101 # R_ORG=rhub R_IMAGE=ubuntu-gcc-release R_TAG=latest docker-compose build r
1102 # R_ORG=rhub R_IMAGE=ubuntu-gcc-release R_TAG=latest docker-compose run r
1103 image: ${REPO}:r-${R_ORG}-${R_IMAGE}-${R_TAG}
1106 dockerfile: ci/docker/linux-r.dockerfile
1108 - ${REPO}:r-${R_ORG}-${R_IMAGE}-${R_TAG}
1110 base: ${R_ORG}/${R_IMAGE}:${R_TAG}
1111 r_dev: ${ARROW_R_DEV}
1112 devtoolset_version: ${DEVTOOLSET_VERSION}
1116 LIBARROW_DOWNLOAD: "false"
1117 ARROW_SOURCE_HOME: "/arrow"
1118 ARROW_R_DEV: ${ARROW_R_DEV}
1119 # To test for CRAN release, delete ^^ these two env vars so we download the Apache release
1120 ARROW_USE_PKG_CONFIG: "false"
1121 devtoolset_version: ${DEVTOOLSET_VERSION}
1123 - .:/arrow:delegated
1125 /bin/bash -c "/arrow/ci/scripts/r_test.sh /arrow"
1128 # Only 18.04 and amd64 supported
1130 # docker-compose build ubuntu-r-sanitizer
1131 # docker-compose run ubuntu-r-sanitizer
1132 image: ${REPO}:amd64-ubuntu-18.04-r-sanitizer
1134 # LeakSanitizer and gdb requires ptrace(2)
1138 dockerfile: ci/docker/linux-r.dockerfile
1140 - ${REPO}:amd64-ubuntu-18.04-r-sanitizer
1142 base: wch1/r-debug:latest
1147 volumes: *ubuntu-volumes
1150 /arrow/ci/scripts/r_sanitize.sh /arrow"
1153 # Only 18.04 and amd64 supported
1155 # docker-compose build ubuntu-r-valgrind
1156 # docker-compose run ubuntu-r-valgrind
1157 image: ${REPO}:amd64-ubuntu-18.04-r-valgrind
1160 dockerfile: ci/docker/linux-r.dockerfile
1162 - ${REPO}:amd64-ubuntu-18.04-r-valgrind
1164 base: wch1/r-debug:latest
1169 ARROW_R_DEV: ${ARROW_R_DEV}
1170 # AVX512 not supported by Valgrind (similar to ARROW-9851) some runners support AVX512 and some do not
1171 # so some build might pass without this setting, but we want to ensure that we stay to AVX2 regardless of runner.
1172 EXTRA_CMAKE_FLAGS: "-DARROW_RUNTIME_SIMD_LEVEL=AVX2"
1173 volumes: *ubuntu-volumes
1176 /arrow/ci/scripts/r_valgrind.sh /arrow"
1180 # docker-compose build r-revdepcheck
1181 # docker-compose run r-revdepcheck
1182 image: ${REPO}:r-rstudio-r-base-4.0-focal-revdepcheck
1185 dockerfile: ci/docker/linux-r.dockerfile
1187 - ${REPO}:r-rstudio-r-base-4.0-focal-revdepcheck
1189 base: rstudio/r-base:4.0-focal
1190 r_dev: ${ARROW_R_DEV}
1194 LIBARROW_DOWNLOAD: "true"
1195 LIBARROW_MINIMAL: "false"
1196 ARROW_SOURCE_HOME: "/arrow"
1198 volumes: *ubuntu-volumes
1200 /bin/bash -c "/arrow/ci/scripts/r_revdepcheck.sh /arrow"
1204 ################################# Go ########################################
1208 # docker-compose build debian-go
1209 # docker-compose run debian-go
1210 image: ${REPO}:${ARCH}-debian-${DEBIAN}-go-${GO}
1213 dockerfile: ci/docker/debian-${DEBIAN}-go.dockerfile
1215 - ${REPO}:${ARCH}-debian-${DEBIAN}-go-${GO}
1220 volumes: *debian-volumes
1221 command: &go-command >
1223 /arrow/ci/scripts/go_build.sh /arrow &&
1224 /arrow/ci/scripts/go_test.sh /arrow"
1228 # docker-compose build debian-go-cgo
1229 # docker-compose run debian-go-cgo
1230 image: ${REPO}:${ARCH}-debian-${DEBIAN}-go-${GO}-cgo
1233 dockerfile: ci/docker/debian-go-cgo.dockerfile
1235 - ${REPO}:${ARCH}-debian-${DEBIAN}-go-${GO}-cgo
1237 base: ${REPO}:${ARCH}-debian-${DEBIAN}-go-${GO}
1239 volumes: *debian-volumes
1241 ARROW_GO_TESTCGO: "1"
1242 command: *go-command
1244 debian-go-cgo-python:
1246 # docker-compose build debian-go-cgo-python
1247 # docker-compose run debian-go-cgo-python
1248 image: ${REPO}:${ARCH}-debian-${DEBIAN}-go-${GO}-cgo-python
1251 dockerfile: ci/docker/debian-${DEBIAN}-go-cgo-python.dockerfile
1253 - ${REPO}:${ARCH}-debian-${DEBIAN}-go-${GO}-cgo-python
1255 base: ${REPO}:${ARCH}-debian-${DEBIAN}-go-${GO}
1257 volumes: *debian-volumes
1258 command: &go-cgo-python-command >
1260 /arrow/ci/scripts/go_cgo_python_test.sh /arrow"
1262 ############################# JavaScript ####################################
1266 # docker-compose build debian-js
1267 # docker-compose run debian-js
1268 image: ${REPO}:${ARCH}-debian-${DEBIAN}-js-${NODE}
1271 dockerfile: ci/docker/debian-${DEBIAN}-js.dockerfile
1273 - ${REPO}:${ARCH}-debian-${DEBIAN}-js-${NODE}
1278 volumes: *debian-volumes
1279 command: &js-command >
1281 /arrow/ci/scripts/js_build.sh /arrow &&
1282 /arrow/ci/scripts/js_test.sh /arrow"
1284 #################################### C# #####################################
1288 # docker-compose build ubuntu-csharp
1289 # docker-compose run ubuntu-csharp
1290 image: ${REPO}:${ARCH}-ubuntu-18.04-csharp-${DOTNET}
1293 dockerfile: ci/docker/ubuntu-18.04-csharp.dockerfile
1295 - ${REPO}:${ARCH}-ubuntu-18.04-csharp-${DOTNET}
1298 platform: bionic # use bionic-arm64v8 for ARM
1300 volumes: *ubuntu-volumes
1301 command: &csharp-command >
1303 /arrow/ci/scripts/csharp_build.sh /arrow &&
1304 /arrow/ci/scripts/csharp_test.sh /arrow &&
1305 /arrow/ci/scripts/csharp_pack.sh /arrow"
1307 ################################ Java #######################################
1311 # docker-compose build debian-java
1312 # docker-compose run debian-java
1313 image: ${REPO}:${ARCH}-debian-9-java-${JDK}-maven-${MAVEN}
1316 dockerfile: ci/docker/debian-9-java.dockerfile
1318 - ${REPO}:${ARCH}-debian-9-java-${JDK}-maven-${MAVEN}
1324 volumes: &java-volumes
1325 - .:/arrow:delegated
1326 - ${DOCKER_VOLUME_PREFIX}maven-cache:/root/.m2:delegated
1327 command: &java-command >
1329 /arrow/ci/scripts/java_build.sh /arrow /build &&
1330 /arrow/ci/scripts/java_test.sh /arrow /build"
1333 # Includes plasma test, jni for gandiva and orc, and C data interface.
1335 # docker-compose build debian-java
1336 # docker-compose build debian-java-jni
1337 # docker-compose run debian-java-jni
1338 image: ${REPO}:${ARCH}-debian-9-java-${JDK}-maven-${MAVEN}-jni
1341 dockerfile: ci/docker/linux-apt-jni.dockerfile
1343 - ${REPO}:${ARCH}-debian-9-java-${JDK}-maven-${MAVEN}-jni
1345 base: ${REPO}:${ARCH}-debian-9-java-${JDK}-maven-${MAVEN}
1351 - .:/arrow:delegated
1352 - ${DOCKER_VOLUME_PREFIX}maven-cache:/root/.m2:delegated
1353 - ${DOCKER_VOLUME_PREFIX}debian-ccache:/ccache:delegated
1356 /arrow/ci/scripts/cpp_build.sh /arrow /build &&
1357 /arrow/ci/scripts/java_cdata_build.sh /arrow /build/java/c/build /build/java/c &&
1358 /arrow/ci/scripts/java_build.sh /arrow /build &&
1359 /arrow/ci/scripts/java_test.sh /arrow /build"
1361 ############################## Integration ##################################
1365 # docker-compose build conda-cpp
1366 # docker-compose build conda-integration
1367 # docker-compose run conda-integration
1368 image: ${REPO}:${ARCH}-conda-integration
1371 dockerfile: ci/docker/conda-integration.dockerfile
1373 - ${REPO}:${ARCH}-conda-integration
1378 # conda-forge doesn't have 3.5.4 so pinning explicitly, but this should
1379 # be set to ${MAVEN}
1383 volumes: *conda-volumes
1386 # tell archery where the arrow binaries are located
1387 ARROW_CPP_EXE_PATH: /build/cpp/debug
1388 ARCHERY_INTEGRATION_WITH_RUST: 0
1390 ["/arrow/ci/scripts/rust_build.sh /arrow /build &&
1391 /arrow/ci/scripts/cpp_build.sh /arrow /build &&
1392 /arrow/ci/scripts/csharp_build.sh /arrow /build &&
1393 /arrow/ci/scripts/go_build.sh /arrow &&
1394 /arrow/ci/scripts/java_build.sh /arrow /build &&
1395 /arrow/ci/scripts/js_build.sh /arrow /build &&
1396 /arrow/ci/scripts/integration_arrow.sh /arrow /build"]
1398 ################################ Docs #######################################
1402 # docker-compose build ubuntu-cpp
1403 # docker-compose build ubuntu-python
1404 # docker-compose build ubuntu-docs
1405 # docker-compose run --rm ubuntu-docs
1406 image: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-docs
1409 dockerfile: ci/docker/linux-apt-docs.dockerfile
1411 - ${REPO}:${ARCH}-ubuntu-${UBUNTU}-docs
1416 base: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-python-3
1420 ARROW_GLIB_GTK_DOC: "true"
1421 Protobuf_SOURCE: "BUNDLED" # Need Protobuf >= 3.15
1422 volumes: *ubuntu-volumes
1423 command: &docs-command >
1425 /arrow/ci/scripts/cpp_build.sh /arrow /build true &&
1426 /arrow/ci/scripts/c_glib_build.sh /arrow /build &&
1427 /arrow/ci/scripts/python_build.sh /arrow /build &&
1428 /arrow/ci/scripts/java_build.sh /arrow /build true &&
1429 /arrow/ci/scripts/js_build.sh /arrow true &&
1430 /arrow/ci/scripts/r_build.sh /arrow true &&
1431 /arrow/ci/scripts/docs_build.sh /arrow /build"
1433 ################################# Tools #####################################
1437 # docker-compose build ubuntu-cpp
1438 # docker-compose build ubuntu-lint
1439 # docker-compose run ubuntu-lint
1440 image: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-lint
1443 dockerfile: ci/docker/linux-apt-lint.dockerfile
1445 - ${REPO}:${ARCH}-ubuntu-${UBUNTU}-lint
1447 base: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-cpp
1448 clang_tools: ${CLANG_TOOLS}
1451 volumes: *ubuntu-volumes
1452 command: archery lint --all --no-clang-tidy --no-iwyu --no-numpydoc
1454 ######################### Integration Tests #################################
1457 # required for the impala service
1462 POSTGRES_PASSWORD: postgres
1465 # required for the hiveserver and hdfs tests
1466 image: ibisproject/impala:latest
1471 PGPASSWORD: postgres
1488 conda-cpp-hiveserver2:
1490 # docker-compose build conda-cpp
1491 # docker-compose build conda-cpp-hiveserver2
1492 # docker-compose run conda-cpp-hiveserver2
1493 image: ${REPO}:${ARCH}-conda-cpp
1499 ARROW_GANDIVA: "OFF"
1501 ARROW_HIVESERVER2: "ON"
1502 ARROW_HIVESERVER2_TEST_HOST: impala
1504 volumes: *conda-volumes
1506 ["/arrow/ci/scripts/cpp_build.sh /arrow /build &&
1507 /arrow/ci/scripts/integration_hiveserver2.sh /arrow /build"]
1511 # docker-compose build conda-cpp
1512 # docker-compose build conda-python
1513 # docker-compose build conda-python-hdfs
1514 # docker-compose run conda-python-hdfs
1515 image: ${REPO}:${ARCH}-conda-python-${PYTHON}-hdfs-${HDFS}
1518 dockerfile: ci/docker/conda-python-hdfs.dockerfile
1520 - ${REPO}:${ARCH}-conda-python-${PYTHON}-hdfs-${HDFS}
1526 # conda-forge doesn't have 3.5.4 so pinning explicitly, but this should
1527 # be set to ${MAVEN}
1535 ARROW_HDFS_TEST_HOST: impala
1536 ARROW_HDFS_TEST_PORT: 8020
1537 ARROW_HDFS_TEST_USER: hdfs
1539 CMAKE_UNITY_BUILD: "ON"
1541 volumes: &conda-maven-volumes
1542 - .:/arrow:delegated
1543 - ${DOCKER_VOLUME_PREFIX}maven-cache:/root/.m2:delegated
1544 - ${DOCKER_VOLUME_PREFIX}conda-ccache:/ccache:delegated
1546 ["/arrow/ci/scripts/cpp_build.sh /arrow /build &&
1547 /arrow/ci/scripts/python_build.sh /arrow /build &&
1548 /arrow/ci/scripts/integration_hdfs.sh /arrow /build"]
1552 # docker-compose build conda-cpp
1553 # docker-compose build conda-python
1554 # docker-compose build conda-python-spark
1555 # docker-compose run conda-python-spark
1556 image: ${REPO}:${ARCH}-conda-python-${PYTHON}-spark-${SPARK}
1559 dockerfile: ci/docker/conda-python-spark.dockerfile
1561 - ${REPO}:${ARCH}-conda-python-${PYTHON}-spark-${SPARK}
1567 # conda-forge doesn't have 3.5.4 so pinning explicitly, but this should
1568 # be set to ${MAVEN}
1574 volumes: *conda-maven-volumes
1576 ["/arrow/ci/scripts/cpp_build.sh /arrow /build &&
1577 /arrow/ci/scripts/python_build.sh /arrow /build &&
1578 /arrow/ci/scripts/java_build.sh /arrow /build &&
1579 /arrow/ci/scripts/integration_spark.sh /arrow /spark ${TEST_PYARROW_ONLY:-false}"]