]> git.proxmox.com Git - ceph.git/blob - ceph/src/arrow/docker-compose.yml
import quincy 17.2.0
[ceph.git] / ceph / src / arrow / docker-compose.yml
1 # Licensed to the Apache Software Foundation (ASF) under one
2 # or more contributor license agreements. See the NOTICE file
3 # distributed with this work for additional information
4 # regarding copyright ownership. The ASF licenses this file
5 # to you under the Apache License, Version 2.0 (the
6 # "License"); you may not use this file except in compliance
7 # with the License. You may obtain a copy of the License at
8 #
9 # http://www.apache.org/licenses/LICENSE-2.0
10 #
11 # Unless required by applicable law or agreed to in writing,
12 # software distributed under the License is distributed on an
13 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14 # KIND, either express or implied. See the License for the
15 # specific language governing permissions and limitations
16 # under the License.
17
18 # Usage
19 # -----
20 #
21 # The docker compose file is parametrized using environment variables, the
22 # defaults are set in .env file.
23 #
24 # Example:
25 # $ ARCH=arm64v8 docker-compose build ubuntu-cpp
26 # $ ARCH=arm64v8 docker-compose run ubuntu-cpp
27 #
28 #
29 # Coredumps
30 # ---------
31 #
32 # In order to enable coredumps for the C++ tests run by CTest either with
33 # command `make unittest` or `ctest --output-on-failure` the correct coredump
34 # patterns must be set.
35 # The kernel settings are coming from the host, so while it can be enabled from
36 # a running container using --priviled option the change will affect all other
37 # containers, so prefer setting it explicitly, directly on the host.
38 # WARNING: setting this will affect the host machine.
39 #
40 # Linux host:
41 # $ sudo sysctl -w kernel.core_pattern=core.%e.%p
42 #
43 # macOS host running Docker for Mac (won't persist between restarts):
44 # $ screen ~/Library/Containers/com.docker.docker/Data/vms/0/tty
45 # # echo "core.%e.%p" > /proc/sys/kernel/core_pattern
46 #
47 # The setup attempts to generate coredumps by default, but the correct paths
48 # above must be set. In order to disable the coredump generation set
49 # ULIMIT_CORE environment variable to 0 before running docker-compose
50 # (or by setting it in .env file):
51 #
52 # ULIMIT_CORE=0 docker-compose run --rm conda-cpp
53 #
54 # See more in cpp/build-support/run-test.sh::print_coredumps
55
56 version: '3.5'
57
58 x-ccache: &ccache
59 CCACHE_COMPILERCHECK: content
60 CCACHE_COMPRESS: 1
61 CCACHE_COMPRESSLEVEL: 6
62 CCACHE_MAXSIZE: 500M
63 CCACHE_DIR: /ccache
64
65 # CPU/memory limit presets to pass to Docker.
66 #
67 # Usage: archery docker run --resource-limit=github <image>
68 #
69 # Note that exporting ARCHERY_DOCKER_BIN="sudo docker" is likely required,
70 # unless Docker is configured with cgroups v2 (else Docker will silently
71 # ignore the limits).
72 x-limit-presets:
73 # These values emulate GitHub Actions:
74 # https://docs.github.com/en/actions/using-github-hosted-runners/about-github-hosted-runners
75 github:
76 # Note we use cpuset and not cpus since Ninja only detects and limits
77 # parallelism given the former
78 cpuset_cpus: [0, 1]
79 memory: 7g
80
81 x-with-gpus:
82 - ubuntu-cuda-cpp
83 - ubuntu-cuda-python
84
85 x-hierarchy:
86 # This section is used by the archery tool to enable building nested images,
87 # so it is enough to call:
88 # archery run debian-ruby
89 # instead of a seguence of docker-compose commands:
90 # docker-compose build debian-cpp
91 # docker-compose build debian-c-glib
92 # docker-compose build debian-ruby
93 # docker-compose run --rm debian-ruby
94 #
95 # Each node must be either a string scalar of a list containing the
96 # descendant images if any. Archery checks that all node has a corresponding
97 # service entry, so any new image/service must be listed here.
98 - conda:
99 - conda-cpp:
100 - conda-cpp-hiveserver2
101 - conda-cpp-valgrind
102 - conda-python:
103 - conda-python-pandas
104 - conda-python-dask
105 - conda-python-hdfs
106 - conda-python-jpype
107 - conda-python-turbodbc
108 - conda-python-kartothek
109 - conda-python-spark
110 - conda-integration
111 - debian-cpp:
112 - debian-c-glib:
113 - debian-ruby
114 - debian-python
115 - debian-go:
116 - debian-go-cgo
117 - debian-go-cgo-python
118 - debian-java:
119 - debian-java-jni
120 - debian-js
121 - fedora-cpp:
122 - fedora-python
123 - ubuntu-cpp:
124 - ubuntu-c-glib:
125 - ubuntu-ruby
126 - ubuntu-lint
127 - ubuntu-python:
128 - ubuntu-docs
129 - ubuntu-python-sdist-test
130 - ubuntu-r
131 - ubuntu-r-only-r
132 - ubuntu-cpp-bundled
133 - ubuntu-cuda-cpp:
134 - ubuntu-cuda-python
135 - ubuntu-csharp
136 - ubuntu-cpp-sanitizer
137 - ubuntu-cpp-thread-sanitizer
138 - ubuntu-r-sanitizer
139 - ubuntu-r-valgrind
140 - python-sdist
141 - r
142 - r-revdepcheck
143 # helper services
144 - impala
145 - postgres
146 - python-wheel-manylinux-2010
147 - python-wheel-manylinux-2014:
148 - java-jni-manylinux-2014
149 - python-wheel-manylinux-test-imports
150 - python-wheel-manylinux-test-unittests
151 - python-wheel-windows-vs2017
152 - python-wheel-windows-test
153
154 volumes:
155 conda-ccache:
156 name: ${ARCH}-conda-ccache
157 debian-ccache:
158 name: ${ARCH}-debian-${DEBIAN}-ccache
159 ubuntu-ccache:
160 name: ${ARCH}-ubuntu-${UBUNTU}-ccache
161 fedora-ccache:
162 name: ${ARCH}-fedora-${FEDORA}-ccache
163 debian-rust:
164 name: ${ARCH}-debian-${DEBIAN}-rust
165 maven-cache:
166 name: maven-cache
167 python-wheel-manylinux2010-ccache:
168 name: python-wheel-manylinux2010-ccache
169 python-wheel-manylinux2014-ccache:
170 name: python-wheel-manylinux2014-ccache
171 python-wheel-windows-clcache:
172 name: python-wheel-windows-clcache
173
174 services:
175
176 ################################# C++ #######################################
177 # Release build:
178 # docker-compose run -e ARROW_BUILD_TYPE=release conda-cpp|debian-cpp|...
179 # Shared only:
180 # docker-compose run -e ARROW_BUILD_STATIC=OFF conda-cpp|debian-cpp|...
181 # Static only:
182 # docker-compose run \
183 # -e ARROW_BUILD_SHARED=OFF \
184 # -e ARROW_TEST_LINKAGE=static \
185 # conda-cpp|debian-cpp|...
186
187 conda:
188 # Base image for conda builds.
189 #
190 # Usage:
191 # docker-compose build con
192 # docker-compose run --rm conda
193 # Parameters:
194 # ARCH: amd64, arm32v7
195 image: ${REPO}:${ARCH}-conda
196 build:
197 context: .
198 dockerfile: ci/docker/conda.dockerfile
199 cache_from:
200 - ${REPO}:${ARCH}-conda
201 args:
202 arch: ${ARCH}
203 prefix: /opt/conda
204 volumes:
205 - .:/arrow:delegated
206
207 conda-cpp:
208 # C++ build in conda environment, including the doxygen docs.
209 #
210 # Usage:
211 # docker-compose build conda
212 # docker-compose build conda-cpp
213 # docker-compose run --rm conda-cpp
214 # Parameters:
215 # ARCH: amd64, arm32v7
216 image: ${REPO}:${ARCH}-conda-cpp
217 build:
218 context: .
219 dockerfile: ci/docker/conda-cpp.dockerfile
220 cache_from:
221 - ${REPO}:${ARCH}-conda-cpp
222 args:
223 repo: ${REPO}
224 arch: ${ARCH}
225 shm_size: &shm-size 2G
226 ulimits: &ulimits
227 core: ${ULIMIT_CORE}
228 environment:
229 <<: *ccache
230 ARROW_BUILD_BENCHMARKS: "ON"
231 ARROW_BUILD_EXAMPLES: "ON"
232 ARROW_ENABLE_TIMING_TESTS: # inherit
233 ARROW_MIMALLOC: "ON"
234 ARROW_USE_LD_GOLD: "ON"
235 ARROW_USE_PRECOMPILED_HEADERS: "ON"
236 volumes: &conda-volumes
237 - .:/arrow:delegated
238 - ${DOCKER_VOLUME_PREFIX}conda-ccache:/ccache:delegated
239 command: &cpp-conda-command
240 ["/arrow/ci/scripts/cpp_build.sh /arrow /build true &&
241 /arrow/ci/scripts/cpp_test.sh /arrow /build"]
242
243 conda-cpp-valgrind:
244 # Usage:
245 # docker-compose build conda
246 # docker-compose build conda-cpp
247 # docker-compose run --rm conda-cpp-valgrind
248 # Parameters:
249 # ARCH: amd64, arm32v7
250 image: ${REPO}:${ARCH}-conda-cpp
251 build:
252 context: .
253 dockerfile: ci/docker/conda-cpp.dockerfile
254 cache_from:
255 - ${REPO}:${ARCH}-conda-cpp
256 args:
257 repo: ${REPO}
258 arch: ${ARCH}
259 prefix: /opt/conda
260 shm_size: *shm-size
261 environment:
262 <<: *ccache
263 ARROW_CXXFLAGS: "-Og" # Shrink test runtime by enabling minimal optimizations
264 ARROW_ENABLE_TIMING_TESTS: # inherit
265 ARROW_FLIGHT: "OFF"
266 ARROW_GANDIVA: "OFF"
267 ARROW_JEMALLOC: "OFF"
268 ARROW_RUNTIME_SIMD_LEVEL: "AVX2" # AVX512 not supported by Valgrind (ARROW-9851)
269 ARROW_S3: "OFF"
270 ARROW_TEST_MEMCHECK: "ON"
271 ARROW_USE_LD_GOLD: "ON"
272 BUILD_WARNING_LEVEL: "PRODUCTION"
273 volumes: *conda-volumes
274 command: *cpp-conda-command
275
276 debian-cpp:
277 # Usage:
278 # docker-compose build debian-cpp
279 # docker-compose run --rm debian-cpp
280 # Parameters:
281 # ARCH: amd64, arm64v8, ...
282 # DEBIAN: 10, 11
283 image: ${REPO}:${ARCH}-debian-${DEBIAN}-cpp
284 build:
285 context: .
286 dockerfile: ci/docker/debian-${DEBIAN}-cpp.dockerfile
287 cache_from:
288 - ${REPO}:${ARCH}-debian-${DEBIAN}-cpp
289 args:
290 arch: ${ARCH}
291 llvm: ${LLVM}
292 shm_size: *shm-size
293 ulimits: *ulimits
294 environment:
295 <<: *ccache
296 ARROW_ENABLE_TIMING_TESTS: # inherit
297 ARROW_MIMALLOC: "ON"
298 volumes: &debian-volumes
299 - .:/arrow:delegated
300 - ${DOCKER_VOLUME_PREFIX}debian-ccache:/ccache:delegated
301 command: &cpp-command >
302 /bin/bash -c "
303 /arrow/ci/scripts/cpp_build.sh /arrow /build &&
304 /arrow/ci/scripts/cpp_test.sh /arrow /build"
305
306 ubuntu-cpp:
307 # Usage:
308 # docker-compose build ubuntu-cpp
309 # docker-compose run --rm ubuntu-cpp
310 # Parameters:
311 # ARCH: amd64, arm64v8, s390x, ...
312 # UBUNTU: 18.04, 20.04
313 image: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-cpp
314 build:
315 context: .
316 dockerfile: ci/docker/ubuntu-${UBUNTU}-cpp.dockerfile
317 cache_from:
318 - ${REPO}:${ARCH}-ubuntu-${UBUNTU}-cpp
319 args:
320 arch: ${ARCH}
321 base: "${ARCH}/ubuntu:${UBUNTU}"
322 clang_tools: ${CLANG_TOOLS}
323 llvm: ${LLVM}
324 gcc_version: ${GCC_VERSION}
325 shm_size: *shm-size
326 ulimits: *ulimits
327 environment:
328 <<: *ccache
329 ARROW_ENABLE_TIMING_TESTS: # inherit
330 ARROW_MIMALLOC: "ON"
331 volumes: &ubuntu-volumes
332 - .:/arrow:delegated
333 - ${DOCKER_VOLUME_PREFIX}ubuntu-ccache:/ccache:delegated
334 command: *cpp-command
335
336 ubuntu-cpp-bundled:
337 # Arrow build with BUNDLED dependencies
338 image: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-cpp-minimal
339 build:
340 context: .
341 dockerfile: ci/docker/ubuntu-${UBUNTU}-cpp-minimal.dockerfile
342 cache_from:
343 - ${REPO}:${ARCH}-ubuntu-${UBUNTU}-cpp-minimal
344 args:
345 arch: ${ARCH}
346 base: "${ARCH}/ubuntu:${UBUNTU}"
347 shm_size: *shm-size
348 ulimits: *ulimits
349 environment:
350 <<: *ccache
351 ARROW_DEPENDENCY_SOURCE: BUNDLED
352 CMAKE_GENERATOR: "Unix Makefiles"
353 volumes: *ubuntu-volumes
354 command: *cpp-command
355
356 ubuntu-cuda-cpp:
357 # Usage:
358 # docker-compose build cuda-cpp
359 # docker-compose run --rm cuda-cpp
360 # Also need to edit the host docker configuration as follows:
361 # https://github.com/docker/compose/issues/6691#issuecomment-561504928
362 # Parameters:
363 # ARCH: amd64
364 # CUDA: 9.1, 10.0, 10.1
365 image: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-cuda-${CUDA}-cpp
366 build:
367 context: .
368 dockerfile: ci/docker/ubuntu-${UBUNTU}-cpp.dockerfile
369 cache_from:
370 - ${REPO}:${ARCH}-ubuntu-${UBUNTU}-cuda-${CUDA}-cpp
371 args:
372 arch: ${ARCH}
373 base: nvidia/cuda:${CUDA}-devel-ubuntu${UBUNTU}
374 clang_tools: ${CLANG_TOOLS}
375 llvm: ${LLVM}
376 shm_size: *shm-size
377 ulimits: *ulimits
378 environment:
379 <<: *ccache
380 ARROW_CUDA: "ON"
381 volumes: *ubuntu-volumes
382 command: *cpp-command
383
384 ubuntu-cpp-sanitizer:
385 # Usage:
386 # docker-compose build ubuntu-cpp-sanitizer
387 # docker-compose run --rm ubuntu-cpp-sanitizer
388 # Parameters:
389 # ARCH: amd64, arm64v8, ...
390 # UBUNTU: 18.04, 20.04
391 image: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-cpp
392 cap_add:
393 # For LeakSanitizer
394 - SYS_PTRACE
395 build:
396 context: .
397 dockerfile: ci/docker/ubuntu-${UBUNTU}-cpp.dockerfile
398 cache_from:
399 - ${REPO}:${ARCH}-ubuntu-${UBUNTU}-cpp
400 args:
401 arch: ${ARCH}
402 clang_tools: ${CLANG_TOOLS}
403 llvm: ${LLVM}
404 shm_size: *shm-size
405 volumes: *ubuntu-volumes
406 environment:
407 <<: *ccache
408 CC: clang-${CLANG_TOOLS}
409 CXX: clang++-${CLANG_TOOLS}
410 ARROW_BUILD_STATIC: "OFF"
411 ARROW_ENABLE_TIMING_TESTS: # inherit
412 ARROW_FUZZING: "ON" # Check fuzz regressions
413 ARROW_JEMALLOC: "OFF"
414 ARROW_ORC: "OFF"
415 ARROW_S3: "OFF"
416 ARROW_USE_ASAN: "ON"
417 ARROW_USE_UBSAN: "ON"
418 # utf8proc 2.1.0 in Ubuntu Bionic has test failures
419 utf8proc_SOURCE: "BUNDLED"
420 command: *cpp-command
421
422 ubuntu-cpp-thread-sanitizer:
423 # Usage:
424 # docker-compose build ubuntu-cpp-thread-sanitizer
425 # docker-compose run --rm ubuntu-cpp-thread-sanitizer
426 # Parameters:
427 # ARCH: amd64, arm64v8, ...
428 # UBUNTU: 18.04, 20.04
429 image: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-cpp
430 build:
431 context: .
432 dockerfile: ci/docker/ubuntu-${UBUNTU}-cpp.dockerfile
433 cache_from:
434 - ${REPO}:${ARCH}-ubuntu-${UBUNTU}-cpp
435 args:
436 arch: ${ARCH}
437 clang_tools: ${CLANG_TOOLS}
438 llvm: ${LLVM}
439 shm_size: *shm-size
440 volumes: *ubuntu-volumes
441 environment:
442 <<: *ccache
443 CC: clang-${CLANG_TOOLS}
444 CXX: clang++-${CLANG_TOOLS}
445 ARROW_BUILD_STATIC: "OFF"
446 ARROW_ENABLE_TIMING_TESTS: # inherit
447 ARROW_DATASET: "ON"
448 ARROW_JEMALLOC: "OFF"
449 ARROW_ORC: "OFF"
450 ARROW_S3: "OFF"
451 ARROW_USE_TSAN: "ON"
452 command: *cpp-command
453
454 fedora-cpp:
455 # Usage:
456 # docker-compose build fedora-cpp
457 # docker-compose run --rm fedora-cpp
458 # Parameters:
459 # ARCH: amd64, arm64v8, ...
460 # FEDORA: 33
461 image: ${REPO}:${ARCH}-fedora-${FEDORA}-cpp
462 build:
463 context: .
464 dockerfile: ci/docker/fedora-${FEDORA}-cpp.dockerfile
465 cache_from:
466 - ${REPO}:${ARCH}-fedora-${FEDORA}-cpp
467 args:
468 arch: ${ARCH}
469 llvm: ${LLVM}
470 shm_size: *shm-size
471 ulimits: *ulimits
472 environment:
473 <<: *ccache
474 ARROW_ENABLE_TIMING_TESTS: # inherit
475 ARROW_MIMALLOC: "ON"
476 Protobuf_SOURCE: "BUNDLED" # Need Protobuf >= 3.15
477 volumes: &fedora-volumes
478 - .:/arrow:delegated
479 - ${DOCKER_VOLUME_PREFIX}fedora-ccache:/ccache:delegated
480 command: *cpp-command
481
482 ############################### C GLib ######################################
483
484 debian-c-glib:
485 # Usage:
486 # docker-compose build debian-cpp
487 # docker-compose build debian-c-glib
488 # docker-compose run --rm debian-c-glib
489 # Parameters:
490 # ARCH: amd64, arm64v8, ...
491 # DEBIAN: 10, 11
492 image: ${REPO}:${ARCH}-debian-${DEBIAN}-c-glib
493 build:
494 context: .
495 dockerfile: ci/docker/linux-apt-c-glib.dockerfile
496 cache_from:
497 - ${REPO}:${ARCH}-debian-${DEBIAN}-c-glib
498 args:
499 base: ${REPO}:${ARCH}-debian-${DEBIAN}-cpp
500 shm_size: *shm-size
501 ulimits: *ulimits
502 environment:
503 <<: *ccache
504 ARROW_GLIB_GTK_DOC: "true"
505 volumes: *debian-volumes
506 command: &c-glib-command >
507 /bin/bash -c "
508 /arrow/ci/scripts/cpp_build.sh /arrow /build &&
509 /arrow/ci/scripts/c_glib_build.sh /arrow /build &&
510 /arrow/ci/scripts/c_glib_test.sh /arrow /build"
511
512 ubuntu-c-glib:
513 # Usage:
514 # docker-compose build ubuntu-cpp
515 # docker-compose build ubuntu-c-glib
516 # docker-compose run --rm ubuntu-c-glib
517 # Parameters:
518 # ARCH: amd64, arm64v8, ...
519 # UBUNTU: 18.04, 20.04
520 image: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-c-glib
521 build:
522 context: .
523 dockerfile: ci/docker/linux-apt-c-glib.dockerfile
524 cache_from:
525 - ${REPO}:${ARCH}-ubuntu-${UBUNTU}-c-glib
526 args:
527 base: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-cpp
528 shm_size: *shm-size
529 ulimits: *ulimits
530 environment:
531 <<: *ccache
532 ARROW_GLIB_GTK_DOC: "true"
533 volumes: *ubuntu-volumes
534 command: *c-glib-command
535
536 ############################### Ruby ########################################
537 # Until Ruby is the only dependent implementation on top of C Glib we can
538 # test C Glib and Ruby in one pass. This is an optimization to avoid
539 # redundant (one for C GLib and one for Ruby doing the same work twice)
540 # builds on CI services.
541
542 debian-ruby:
543 # Usage:
544 # docker-compose build debian-cpp
545 # docker-compose build debian-c-glib
546 # docker-compose build debian-ruby
547 # docker-compose run --rm debian-ruby
548 # Parameters:
549 # ARCH: amd64, arm64v8, ...
550 # DEBIAN: 10, 11
551 image: ${REPO}:${ARCH}-debian-${DEBIAN}-ruby
552 build:
553 context: .
554 dockerfile: ci/docker/linux-apt-ruby.dockerfile
555 cache_from:
556 - ${REPO}:${ARCH}-debian-${DEBIAN}-ruby
557 args:
558 base: ${REPO}:${ARCH}-debian-${DEBIAN}-c-glib
559 shm_size: *shm-size
560 ulimits: *ulimits
561 environment:
562 <<: *ccache
563 volumes: *debian-volumes
564 command: &ruby-command >
565 /bin/bash -c "
566 /arrow/ci/scripts/cpp_build.sh /arrow /build &&
567 /arrow/ci/scripts/c_glib_build.sh /arrow /build &&
568 /arrow/ci/scripts/c_glib_test.sh /arrow /build &&
569 /arrow/ci/scripts/ruby_test.sh /arrow /build"
570
571 ubuntu-ruby:
572 # Usage:
573 # docker-compose build ubuntu-cpp
574 # docker-compose build ubuntu-c-glib
575 # docker-compose build ubuntu-ruby
576 # docker-compose run --rm ubuntu-ruby
577 # Parameters:
578 # ARCH: amd64, arm64v8, ...
579 # UBUNTU: 18.04, 20.04
580 image: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-ruby
581 build:
582 context: .
583 dockerfile: ci/docker/linux-apt-ruby.dockerfile
584 cache_from:
585 - ${REPO}:${ARCH}-ubuntu-${UBUNTU}-ruby
586 args:
587 base: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-c-glib
588 shm_size: *shm-size
589 ulimits: *ulimits
590 environment:
591 <<: *ccache
592 volumes: *ubuntu-volumes
593 command: *ruby-command
594
595 ############################### Python ######################################
596
597 conda-python:
598 # Usage:
599 # docker-compose build conda
600 # docker-compose build conda-cpp
601 # docker-compose build conda-python
602 # docker-compose run --rm conda-python
603 # Parameters:
604 # ARCH: amd64, arm32v7
605 # PYTHON: 3.6, 3.7, 3.8, 3.9
606 image: ${REPO}:${ARCH}-conda-python-${PYTHON}
607 build:
608 context: .
609 dockerfile: ci/docker/conda-python.dockerfile
610 cache_from:
611 - ${REPO}:${ARCH}-conda-python-${PYTHON}
612 args:
613 repo: ${REPO}
614 arch: ${ARCH}
615 python: ${PYTHON}
616 shm_size: *shm-size
617 environment:
618 <<: *ccache
619 volumes: *conda-volumes
620 command: &python-conda-command
621 ["/arrow/ci/scripts/cpp_build.sh /arrow /build &&
622 /arrow/ci/scripts/python_build.sh /arrow /build &&
623 /arrow/ci/scripts/python_test.sh /arrow"]
624
625 ubuntu-cuda-python:
626 # Usage:
627 # docker-compose build cuda-cpp
628 # docker-compose build cuda-python
629 # docker-compose run --rm cuda-python
630 # Parameters:
631 # ARCH: amd64
632 # CUDA: 8.0, 10.0, ...
633 image: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-cuda-${CUDA}-python-3
634 build:
635 context: .
636 dockerfile: ci/docker/linux-apt-python-3.dockerfile
637 cache_from:
638 - ${REPO}:${ARCH}-ubuntu-${UBUNTU}-cuda-${CUDA}-python-3
639 args:
640 base: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-cuda-${CUDA}-cpp
641 shm_size: *shm-size
642 environment:
643 <<: *ccache
644 ARROW_CUDA: "ON"
645 volumes: *ubuntu-volumes
646 command: &python-command >
647 /bin/bash -c "
648 /arrow/ci/scripts/cpp_build.sh /arrow /build &&
649 /arrow/ci/scripts/python_build.sh /arrow /build &&
650 /arrow/ci/scripts/python_test.sh /arrow"
651
652 debian-python:
653 # Usage:
654 # docker-compose build debian-cpp
655 # docker-compose build debian-python
656 # docker-compose run --rm debian-python
657 # Parameters:
658 # ARCH: amd64, arm64v8, ...
659 # DEBIAN: 10, 11
660 image: ${REPO}:${ARCH}-debian-${DEBIAN}-python-3
661 build:
662 context: .
663 dockerfile: ci/docker/linux-apt-python-3.dockerfile
664 cache_from:
665 - ${REPO}:${ARCH}-debian-${DEBIAN}-python-3
666 args:
667 base: ${REPO}:${ARCH}-debian-${DEBIAN}-cpp
668 shm_size: *shm-size
669 environment:
670 <<: *ccache
671 volumes: *debian-volumes
672 command: *python-command
673
674 ubuntu-python:
675 # Usage:
676 # docker-compose build ubuntu-cpp
677 # docker-compose build ubuntu-python
678 # docker-compose run --rm ubuntu-python
679 # Parameters:
680 # ARCH: amd64, arm64v8, ...
681 # UBUNTU: 18.04, 20.04
682 image: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-python-3
683 build:
684 context: .
685 dockerfile: ci/docker/linux-apt-python-3.dockerfile
686 cache_from:
687 - ${REPO}:${ARCH}-ubuntu-${UBUNTU}-python-3
688 args:
689 base: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-cpp
690 shm_size: *shm-size
691 environment:
692 <<: *ccache
693 volumes: *ubuntu-volumes
694 command: *python-command
695
696 fedora-python:
697 # Usage:
698 # docker-compose build fedora-cpp
699 # docker-compose build fedora-python
700 # docker-compose run --rm fedora-python
701 # Parameters:
702 # ARCH: amd64, arm64v8, ...
703 # FEDORA: 33
704 image: ${REPO}:${ARCH}-fedora-${FEDORA}-python-3
705 build:
706 context: .
707 dockerfile: ci/docker/linux-dnf-python-3.dockerfile
708 cache_from:
709 - ${REPO}:${ARCH}-fedora-${FEDORA}-python-3
710 args:
711 base: ${REPO}:${ARCH}-fedora-${FEDORA}-cpp
712 shm_size: *shm-size
713 environment:
714 <<: *ccache
715 Protobuf_SOURCE: "BUNDLED" # Need Protobuf >= 3.15
716 volumes: *fedora-volumes
717 command: *python-command
718
719 ############################ Python sdist ###################################
720
721 python-sdist:
722 # Usage:
723 # docker-compose build python-sdist
724 # docker-compose run --rm python-sdist
725 # Parameters:
726 # PYARROW_VERSION: The pyarrow version for sdist such as "3.0.0"
727 image: ${REPO}:python-sdist
728 build:
729 context: .
730 dockerfile: ci/docker/python-sdist.dockerfile
731 cache_from:
732 - ${REPO}:python-sdist
733 environment:
734 PYARROW_VERSION: ${PYARROW_VERSION:-}
735 volumes:
736 - .:/arrow:delegated
737 command: /arrow/ci/scripts/python_sdist_build.sh /arrow
738
739 ubuntu-python-sdist-test:
740 # Usage:
741 # docker-compose build ubuntu-cpp
742 # docker-compose build ubuntu-python-sdist-test
743 # docker-compose run --rm ubuntu-python-sdist-test
744 # Parameters:
745 # ARCH: amd64, arm64v8, ...
746 # PYARROW_VERSION: The test target pyarrow version such as "3.0.0"
747 # UBUNTU: 18.04, 20.04
748 image: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-python-3
749 build:
750 context: .
751 dockerfile: ci/docker/linux-apt-python-3.dockerfile
752 cache_from:
753 - ${REPO}:${ARCH}-ubuntu-${UBUNTU}-python-3
754 args:
755 base: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-cpp
756 shm_size: *shm-size
757 environment:
758 <<: *ccache
759 PYARROW_VERSION: ${PYARROW_VERSION:-}
760 volumes: *ubuntu-volumes
761 command: >
762 /bin/bash -c "
763 apt remove -y git &&
764 /arrow/ci/scripts/cpp_build.sh /arrow /build &&
765 /arrow/ci/scripts/python_sdist_test.sh /arrow"
766
767 ############################ Python wheels ##################################
768
769 # See available versions at:
770 # https://quay.io/repository/pypa/manylinux2010_x86_64?tab=tags
771 # only amd64 arch is supported
772 python-wheel-manylinux-2010:
773 image: ${REPO}:${ARCH}-python-${PYTHON}-wheel-manylinux-2010-vcpkg-${VCPKG}
774 build:
775 args:
776 arch_alias: ${ARCH_ALIAS}
777 arch_short_alias: ${ARCH_SHORT_ALIAS}
778 base: quay.io/pypa/manylinux2010_${ARCH_ALIAS}:2021-10-11-14ac00e
779 vcpkg: ${VCPKG}
780 python: ${PYTHON}
781 context: .
782 dockerfile: ci/docker/python-wheel-manylinux-201x.dockerfile
783 cache_from:
784 - ${REPO}:${ARCH}-python-${PYTHON}-wheel-manylinux-2010-vcpkg-${VCPKG}
785 environment:
786 <<: *ccache
787 MANYLINUX_VERSION: 2010
788 volumes:
789 - .:/arrow:delegated
790 - ${DOCKER_VOLUME_PREFIX}python-wheel-manylinux2010-ccache:/ccache:delegated
791 command: /arrow/ci/scripts/python_wheel_manylinux_build.sh
792
793 # See available versions at:
794 # https://quay.io/repository/pypa/manylinux2014_x86_64?tab=tags
795 python-wheel-manylinux-2014:
796 image: ${REPO}:${ARCH}-python-${PYTHON}-wheel-manylinux-2014-vcpkg-${VCPKG}
797 build:
798 args:
799 arch_alias: ${ARCH_ALIAS}
800 arch_short_alias: ${ARCH_SHORT_ALIAS}
801 base: quay.io/pypa/manylinux2014_${ARCH_ALIAS}:2021-10-11-14ac00e
802 vcpkg: ${VCPKG}
803 python: ${PYTHON}
804 context: .
805 dockerfile: ci/docker/python-wheel-manylinux-201x.dockerfile
806 cache_from:
807 - ${REPO}:${ARCH}-python-${PYTHON}-wheel-manylinux-2014-vcpkg-${VCPKG}
808 environment:
809 <<: *ccache
810 MANYLINUX_VERSION: 2014
811 volumes:
812 - .:/arrow:delegated
813 - ${DOCKER_VOLUME_PREFIX}python-wheel-manylinux2014-ccache:/ccache:delegated
814 command: /arrow/ci/scripts/python_wheel_manylinux_build.sh
815
816 python-wheel-manylinux-test-imports:
817 image: ${ARCH}/python:${PYTHON}
818 shm_size: 2G
819 volumes:
820 - .:/arrow:delegated
821 environment:
822 CHECK_IMPORTS: "ON"
823 CHECK_UNITTESTS: "OFF"
824 command: /arrow/ci/scripts/python_wheel_unix_test.sh /arrow
825
826 python-wheel-manylinux-test-unittests:
827 image: ${REPO}:${ARCH}-python-${PYTHON}-wheel-manylinux-test
828 build:
829 args:
830 arch: ${ARCH}
831 python: ${PYTHON}
832 context: .
833 dockerfile: ci/docker/python-wheel-manylinux-test.dockerfile
834 cache_from:
835 - ${REPO}:${ARCH}-python-${PYTHON}-wheel-manylinux-test
836 shm_size: 2G
837 volumes:
838 - .:/arrow:delegated
839 environment:
840 CHECK_IMPORTS: "OFF"
841 CHECK_UNITTESTS: "ON"
842 command: /arrow/ci/scripts/python_wheel_unix_test.sh /arrow
843
844 python-wheel-windows-vs2017:
845 # The windows images must be built locally and pushed to a remote registry:
846 # export REPO=ghcr.io/ursacomputing/arrow
847 # PYTHON=3.6 archery docker build --no-pull --using-docker-cli python-wheel-windows-vs2017
848 # PYTHON=3.7 archery docker build --no-pull --using-docker-cli python-wheel-windows-vs2017
849 # PYTHON=3.8 archery docker build --no-pull --using-docker-cli python-wheel-windows-vs2017
850 # PYTHON=3.9 archery docker build --no-pull --using-docker-cli python-wheel-windows-vs2017
851 # PYTHON=3.6 archery docker push python-wheel-windows-vs2017
852 # PYTHON=3.7 archery docker push python-wheel-windows-vs2017
853 # PYTHON=3.8 archery docker push python-wheel-windows-vs2017
854 # PYTHON=3.9 archery docker push python-wheel-windows-vs2017
855 image: ${REPO}:python-${PYTHON}-wheel-windows-vs2017-vcpkg-${VCPKG}
856 build:
857 args:
858 vcpkg: ${VCPKG}
859 python: ${PYTHON}
860 context: .
861 dockerfile: ci/docker/python-wheel-windows-vs2017.dockerfile
862 # This should make the pushed images reusable, but the image gets rebuilt.
863 # Uncomment if no local cache is available.
864 # cache_from:
865 # - mcr.microsoft.com/windows/servercore:ltsc2019
866 # - ${REPO}:wheel-windows-vs2017
867 volumes:
868 - "${DOCKER_VOLUME_PREFIX}python-wheel-windows-clcache:C:/clcache"
869 - type: bind
870 source: .
871 target: "C:/arrow"
872 command: arrow\\ci\\scripts\\python_wheel_windows_build.bat
873
874 python-wheel-windows-test:
875 image: python:${PYTHON}-windowsservercore-1809
876 volumes:
877 - type: bind
878 source: .
879 target: "C:/arrow"
880 command: arrow\\ci\\scripts\\python_wheel_windows_test.bat
881
882 java-jni-manylinux-2014:
883 image: ${REPO}:${ARCH}-java-jni-manylinux-2014-vcpkg-${VCPKG}
884 build:
885 args:
886 base: ${REPO}:${ARCH}-python-${PYTHON}-wheel-manylinux-2014-vcpkg-${VCPKG}
887 java: 1.8.0
888 context: .
889 dockerfile: ci/docker/java-jni-manylinux-201x.dockerfile
890 cache_from:
891 - ${REPO}:${ARCH}-java-jni-manylinux-2014-vcpkg-${VCPKG}
892 environment:
893 <<: *ccache
894 volumes:
895 - .:/arrow:delegated
896 - ${DOCKER_VOLUME_PREFIX}python-wheel-manylinux2014-ccache:/ccache:delegated
897 command:
898 ["pip install -e /arrow/dev/archery &&
899 /arrow/ci/scripts/java_cdata_build.sh /arrow /java-native-build /arrow/java-dist &&
900 /arrow/ci/scripts/java_jni_manylinux_build.sh /arrow /build /arrow/java-dist"]
901
902 ############################## Integration #################################
903
904 conda-python-pandas:
905 # Possible $PANDAS parameters:
906 # - `latest`: latest release
907 # - `master`: git master branch, use `docker-compose run --no-cache`
908 # - `<version>`: specific version available on conda-forge
909 # Usage:
910 # docker-compose build conda
911 # docker-compose build conda-cpp
912 # docker-compose build conda-python
913 # docker-compose build conda-python-pandas
914 # docker-compose run --rm conda-python-pandas
915 image: ${REPO}:${ARCH}-conda-python-${PYTHON}-pandas-${PANDAS}
916 build:
917 context: .
918 dockerfile: ci/docker/conda-python-pandas.dockerfile
919 cache_from:
920 - ${REPO}:${ARCH}-conda-python-${PYTHON}-pandas-${PANDAS}
921 args:
922 repo: ${REPO}
923 arch: ${ARCH}
924 python: ${PYTHON}
925 numpy: ${NUMPY}
926 pandas: ${PANDAS}
927 shm_size: *shm-size
928 environment:
929 <<: *ccache
930 volumes: *conda-volumes
931 command: *python-conda-command
932
933 conda-python-dask:
934 # Possible $DASK parameters:
935 # - `latest`: latest release
936 # - `master`: git master branch, use `docker-compose run --no-cache`
937 # - `<version>`: specific version available on conda-forge
938 # Usage:
939 # docker-compose build conda
940 # docker-compose build conda-cpp
941 # docker-compose build conda-python
942 # docker-compose build conda-python-dask
943 # docker-compose run --rm conda-python-dask
944 image: ${REPO}:${ARCH}-conda-python-${PYTHON}-dask-${DASK}
945 build:
946 context: .
947 dockerfile: ci/docker/conda-python-dask.dockerfile
948 cache_from:
949 - ${REPO}:${ARCH}-conda-python-${PYTHON}-dask-${DASK}
950 args:
951 repo: ${REPO}
952 arch: ${ARCH}
953 python: ${PYTHON}
954 dask: ${DASK}
955 shm_size: *shm-size
956 environment:
957 <<: *ccache
958 volumes: *conda-volumes
959 command:
960 ["/arrow/ci/scripts/cpp_build.sh /arrow /build &&
961 /arrow/ci/scripts/python_build.sh /arrow /build &&
962 /arrow/ci/scripts/integration_dask.sh"]
963
964 conda-python-jpype:
965 # Usage:
966 # docker-compose build conda
967 # docker-compose build conda-cpp
968 # docker-compose build conda-python
969 # docker-compose build conda-python-jpype
970 # docker-compose run --rm conda-python-jpype
971 image: ${REPO}:${ARCH}-conda-python-${PYTHON}-jpype
972 build:
973 context: .
974 dockerfile: ci/docker/conda-python-jpype.dockerfile
975 cache_from:
976 - ${REPO}:${ARCH}-conda-python-${PYTHON}-jpype
977 args:
978 repo: ${REPO}
979 arch: ${ARCH}
980 python: ${PYTHON}
981 shm_size: *shm-size
982 environment:
983 <<: *ccache
984 ARROW_FLIGHT: "OFF"
985 ARROW_GANDIVA: "OFF"
986 volumes: *conda-volumes
987 command:
988 ["/arrow/ci/scripts/cpp_build.sh /arrow /build &&
989 /arrow/ci/scripts/python_build.sh /arrow /build &&
990 /arrow/ci/scripts/java_build.sh /arrow /build &&
991 /arrow/ci/scripts/python_test.sh /arrow"]
992
993 conda-python-turbodbc:
994 # Possible $TURBODBC parameters:
995 # - `latest`: latest release
996 # - `master`: git master branch, use `docker-compose run --no-cache`
997 # - `<version>`: specific version available under github releases
998 # Usage:
999 # docker-compose build conda
1000 # docker-compose build conda-cpp
1001 # docker-compose build conda-python
1002 # docker-compose build conda-python-turbodbc
1003 # docker-compose run --rm conda-python-turbodbc
1004 image: ${REPO}:${ARCH}-conda-python-${PYTHON}-turbodbc-${TURBODBC}
1005 build:
1006 context: .
1007 dockerfile: ci/docker/conda-python-turbodbc.dockerfile
1008 cache_from:
1009 - ${REPO}:${ARCH}-conda-python-${PYTHON}-turbodbc-${TURBODBC}
1010 args:
1011 repo: ${REPO}
1012 arch: ${ARCH}
1013 python: ${PYTHON}
1014 turbodbc: ${TURBODBC}
1015 shm_size: *shm-size
1016 environment:
1017 <<: *ccache
1018 volumes: *conda-volumes
1019 command:
1020 ["/arrow/ci/scripts/cpp_build.sh /arrow /build &&
1021 /arrow/ci/scripts/python_build.sh /arrow /build &&
1022 /arrow/ci/scripts/integration_turbodbc.sh /turbodbc /build"]
1023
1024 conda-python-kartothek:
1025 # Possible $KARTOTHEK parameters:
1026 # - `latest`: latest release
1027 # - `master`: git master branch, use `docker-compose run --no-cache`
1028 # - `<version>`: specific version available under github releases
1029 # Usage:
1030 # docker-compose build conda
1031 # docker-compose build conda-cpp
1032 # docker-compose build conda-python
1033 # docker-compose build conda-python-kartothek
1034 # docker-compose run --rm conda-python-kartothek
1035 image: ${REPO}:${ARCH}-conda-python-${PYTHON}-kartothek-${KARTOTHEK}
1036 build:
1037 context: .
1038 dockerfile: ci/docker/conda-python-kartothek.dockerfile
1039 cache_from:
1040 - ${REPO}:${ARCH}-conda-python-${PYTHON}-kartothek-${KARTOTHEK}
1041 args:
1042 repo: ${REPO}
1043 arch: ${ARCH}
1044 python: ${PYTHON}
1045 kartothek: ${KARTOTHEK}
1046 shm_size: *shm-size
1047 environment:
1048 <<: *ccache
1049 volumes: *conda-volumes
1050 command:
1051 ["/arrow/ci/scripts/cpp_build.sh /arrow /build &&
1052 /arrow/ci/scripts/python_build.sh /arrow /build &&
1053 /arrow/ci/scripts/integration_kartothek.sh /kartothek /build"]
1054
1055 ################################## R ########################################
1056
1057 ubuntu-r:
1058 # Usage:
1059 # docker-compose build ubuntu-cpp
1060 # docker-compose build ubuntu-r
1061 # docker-compose run ubuntu-r
1062 image: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-r-${R}
1063 build:
1064 context: .
1065 dockerfile: ci/docker/linux-apt-r.dockerfile
1066 cache_from:
1067 - ${REPO}:${ARCH}-ubuntu-${UBUNTU}-r-${R}
1068 args:
1069 arch: ${ARCH}
1070 r: ${R}
1071 base: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-cpp
1072 gcc_version: ${GCC_VERSION}
1073 tz: ${TZ}
1074 shm_size: *shm-size
1075 environment:
1076 <<: *ccache
1077 ARROW_R_CXXFLAGS: '-Werror'
1078 LIBARROW_BUILD: 'false'
1079 NOT_CRAN: 'true'
1080 ARROW_R_DEV: ${ARROW_R_DEV}
1081 volumes: *ubuntu-volumes
1082 command: >
1083 /bin/bash -c "
1084 /arrow/ci/scripts/cpp_build.sh /arrow /build &&
1085 /arrow/ci/scripts/python_build.sh /arrow /build &&
1086 /arrow/ci/scripts/r_test.sh /arrow"
1087
1088 ubuntu-r-only-r:
1089 environment:
1090 ARROW_DEPENDENCY_SOURCE: ''
1091 extends: ubuntu-r
1092 command: >
1093 /bin/bash -c "
1094 /arrow/ci/scripts/r_test.sh /arrow"
1095
1096 r:
1097 # This lets you test building/installing the arrow R package
1098 # (including building the C++ library) on any Docker image that contains R
1099 #
1100 # Usage:
1101 # R_ORG=rhub R_IMAGE=ubuntu-gcc-release R_TAG=latest docker-compose build r
1102 # R_ORG=rhub R_IMAGE=ubuntu-gcc-release R_TAG=latest docker-compose run r
1103 image: ${REPO}:r-${R_ORG}-${R_IMAGE}-${R_TAG}
1104 build:
1105 context: .
1106 dockerfile: ci/docker/linux-r.dockerfile
1107 cache_from:
1108 - ${REPO}:r-${R_ORG}-${R_IMAGE}-${R_TAG}
1109 args:
1110 base: ${R_ORG}/${R_IMAGE}:${R_TAG}
1111 r_dev: ${ARROW_R_DEV}
1112 devtoolset_version: ${DEVTOOLSET_VERSION}
1113 tz: ${TZ}
1114 shm_size: *shm-size
1115 environment:
1116 LIBARROW_DOWNLOAD: "false"
1117 ARROW_SOURCE_HOME: "/arrow"
1118 ARROW_R_DEV: ${ARROW_R_DEV}
1119 # To test for CRAN release, delete ^^ these two env vars so we download the Apache release
1120 ARROW_USE_PKG_CONFIG: "false"
1121 devtoolset_version: ${DEVTOOLSET_VERSION}
1122 volumes:
1123 - .:/arrow:delegated
1124 command: >
1125 /bin/bash -c "/arrow/ci/scripts/r_test.sh /arrow"
1126
1127 ubuntu-r-sanitizer:
1128 # Only 18.04 and amd64 supported
1129 # Usage:
1130 # docker-compose build ubuntu-r-sanitizer
1131 # docker-compose run ubuntu-r-sanitizer
1132 image: ${REPO}:amd64-ubuntu-18.04-r-sanitizer
1133 cap_add:
1134 # LeakSanitizer and gdb requires ptrace(2)
1135 - SYS_PTRACE
1136 build:
1137 context: .
1138 dockerfile: ci/docker/linux-r.dockerfile
1139 cache_from:
1140 - ${REPO}:amd64-ubuntu-18.04-r-sanitizer
1141 args:
1142 base: wch1/r-debug:latest
1143 r_bin: RDsan
1144 tz: ${TZ}
1145 environment:
1146 <<: *ccache
1147 volumes: *ubuntu-volumes
1148 command: >
1149 /bin/bash -c "
1150 /arrow/ci/scripts/r_sanitize.sh /arrow"
1151
1152 ubuntu-r-valgrind:
1153 # Only 18.04 and amd64 supported
1154 # Usage:
1155 # docker-compose build ubuntu-r-valgrind
1156 # docker-compose run ubuntu-r-valgrind
1157 image: ${REPO}:amd64-ubuntu-18.04-r-valgrind
1158 build:
1159 context: .
1160 dockerfile: ci/docker/linux-r.dockerfile
1161 cache_from:
1162 - ${REPO}:amd64-ubuntu-18.04-r-valgrind
1163 args:
1164 base: wch1/r-debug:latest
1165 r_bin: RDvalgrind
1166 tz: ${TZ}
1167 environment:
1168 <<: *ccache
1169 ARROW_R_DEV: ${ARROW_R_DEV}
1170 # AVX512 not supported by Valgrind (similar to ARROW-9851) some runners support AVX512 and some do not
1171 # so some build might pass without this setting, but we want to ensure that we stay to AVX2 regardless of runner.
1172 EXTRA_CMAKE_FLAGS: "-DARROW_RUNTIME_SIMD_LEVEL=AVX2"
1173 volumes: *ubuntu-volumes
1174 command: >
1175 /bin/bash -c "
1176 /arrow/ci/scripts/r_valgrind.sh /arrow"
1177
1178 r-revdepcheck:
1179 # Usage:
1180 # docker-compose build r-revdepcheck
1181 # docker-compose run r-revdepcheck
1182 image: ${REPO}:r-rstudio-r-base-4.0-focal-revdepcheck
1183 build:
1184 context: .
1185 dockerfile: ci/docker/linux-r.dockerfile
1186 cache_from:
1187 - ${REPO}:r-rstudio-r-base-4.0-focal-revdepcheck
1188 args:
1189 base: rstudio/r-base:4.0-focal
1190 r_dev: ${ARROW_R_DEV}
1191 tz: ${TZ}
1192 shm_size: *shm-size
1193 environment:
1194 LIBARROW_DOWNLOAD: "true"
1195 LIBARROW_MINIMAL: "false"
1196 ARROW_SOURCE_HOME: "/arrow"
1197 ARROW_R_DEV: "true"
1198 volumes: *ubuntu-volumes
1199 command: >
1200 /bin/bash -c "/arrow/ci/scripts/r_revdepcheck.sh /arrow"
1201
1202
1203
1204 ################################# Go ########################################
1205
1206 debian-go:
1207 # Usage:
1208 # docker-compose build debian-go
1209 # docker-compose run debian-go
1210 image: ${REPO}:${ARCH}-debian-${DEBIAN}-go-${GO}
1211 build:
1212 context: .
1213 dockerfile: ci/docker/debian-${DEBIAN}-go.dockerfile
1214 cache_from:
1215 - ${REPO}:${ARCH}-debian-${DEBIAN}-go-${GO}
1216 args:
1217 arch: ${ARCH}
1218 go: ${GO}
1219 shm_size: *shm-size
1220 volumes: *debian-volumes
1221 command: &go-command >
1222 /bin/bash -c "
1223 /arrow/ci/scripts/go_build.sh /arrow &&
1224 /arrow/ci/scripts/go_test.sh /arrow"
1225
1226 debian-go-cgo:
1227 # Usage:
1228 # docker-compose build debian-go-cgo
1229 # docker-compose run debian-go-cgo
1230 image: ${REPO}:${ARCH}-debian-${DEBIAN}-go-${GO}-cgo
1231 build:
1232 context: .
1233 dockerfile: ci/docker/debian-go-cgo.dockerfile
1234 cache_from:
1235 - ${REPO}:${ARCH}-debian-${DEBIAN}-go-${GO}-cgo
1236 args:
1237 base: ${REPO}:${ARCH}-debian-${DEBIAN}-go-${GO}
1238 shm_size: *shm-size
1239 volumes: *debian-volumes
1240 environment:
1241 ARROW_GO_TESTCGO: "1"
1242 command: *go-command
1243
1244 debian-go-cgo-python:
1245 # Usage:
1246 # docker-compose build debian-go-cgo-python
1247 # docker-compose run debian-go-cgo-python
1248 image: ${REPO}:${ARCH}-debian-${DEBIAN}-go-${GO}-cgo-python
1249 build:
1250 context: .
1251 dockerfile: ci/docker/debian-${DEBIAN}-go-cgo-python.dockerfile
1252 cache_from:
1253 - ${REPO}:${ARCH}-debian-${DEBIAN}-go-${GO}-cgo-python
1254 args:
1255 base: ${REPO}:${ARCH}-debian-${DEBIAN}-go-${GO}
1256 shm_size: *shm-size
1257 volumes: *debian-volumes
1258 command: &go-cgo-python-command >
1259 /bin/bash -c "
1260 /arrow/ci/scripts/go_cgo_python_test.sh /arrow"
1261
1262 ############################# JavaScript ####################################
1263
1264 debian-js:
1265 # Usage:
1266 # docker-compose build debian-js
1267 # docker-compose run debian-js
1268 image: ${REPO}:${ARCH}-debian-${DEBIAN}-js-${NODE}
1269 build:
1270 context: .
1271 dockerfile: ci/docker/debian-${DEBIAN}-js.dockerfile
1272 cache_from:
1273 - ${REPO}:${ARCH}-debian-${DEBIAN}-js-${NODE}
1274 args:
1275 arch: ${ARCH}
1276 node: ${NODE}
1277 shm_size: *shm-size
1278 volumes: *debian-volumes
1279 command: &js-command >
1280 /bin/bash -c "
1281 /arrow/ci/scripts/js_build.sh /arrow &&
1282 /arrow/ci/scripts/js_test.sh /arrow"
1283
1284 #################################### C# #####################################
1285
1286 ubuntu-csharp:
1287 # Usage:
1288 # docker-compose build ubuntu-csharp
1289 # docker-compose run ubuntu-csharp
1290 image: ${REPO}:${ARCH}-ubuntu-18.04-csharp-${DOTNET}
1291 build:
1292 context: .
1293 dockerfile: ci/docker/ubuntu-18.04-csharp.dockerfile
1294 cache_from:
1295 - ${REPO}:${ARCH}-ubuntu-18.04-csharp-${DOTNET}
1296 args:
1297 dotnet: ${DOTNET}
1298 platform: bionic # use bionic-arm64v8 for ARM
1299 shm_size: *shm-size
1300 volumes: *ubuntu-volumes
1301 command: &csharp-command >
1302 /bin/bash -c "
1303 /arrow/ci/scripts/csharp_build.sh /arrow &&
1304 /arrow/ci/scripts/csharp_test.sh /arrow &&
1305 /arrow/ci/scripts/csharp_pack.sh /arrow"
1306
1307 ################################ Java #######################################
1308
1309 debian-java:
1310 # Usage:
1311 # docker-compose build debian-java
1312 # docker-compose run debian-java
1313 image: ${REPO}:${ARCH}-debian-9-java-${JDK}-maven-${MAVEN}
1314 build:
1315 context: .
1316 dockerfile: ci/docker/debian-9-java.dockerfile
1317 cache_from:
1318 - ${REPO}:${ARCH}-debian-9-java-${JDK}-maven-${MAVEN}
1319 args:
1320 arch: ${ARCH}
1321 jdk: ${JDK}
1322 maven: ${MAVEN}
1323 shm_size: *shm-size
1324 volumes: &java-volumes
1325 - .:/arrow:delegated
1326 - ${DOCKER_VOLUME_PREFIX}maven-cache:/root/.m2:delegated
1327 command: &java-command >
1328 /bin/bash -c "
1329 /arrow/ci/scripts/java_build.sh /arrow /build &&
1330 /arrow/ci/scripts/java_test.sh /arrow /build"
1331
1332 debian-java-jni:
1333 # Includes plasma test, jni for gandiva and orc, and C data interface.
1334 # Usage:
1335 # docker-compose build debian-java
1336 # docker-compose build debian-java-jni
1337 # docker-compose run debian-java-jni
1338 image: ${REPO}:${ARCH}-debian-9-java-${JDK}-maven-${MAVEN}-jni
1339 build:
1340 context: .
1341 dockerfile: ci/docker/linux-apt-jni.dockerfile
1342 cache_from:
1343 - ${REPO}:${ARCH}-debian-9-java-${JDK}-maven-${MAVEN}-jni
1344 args:
1345 base: ${REPO}:${ARCH}-debian-9-java-${JDK}-maven-${MAVEN}
1346 llvm: ${LLVM}
1347 shm_size: *shm-size
1348 environment:
1349 <<: *ccache
1350 volumes:
1351 - .:/arrow:delegated
1352 - ${DOCKER_VOLUME_PREFIX}maven-cache:/root/.m2:delegated
1353 - ${DOCKER_VOLUME_PREFIX}debian-ccache:/ccache:delegated
1354 command:
1355 /bin/bash -c "
1356 /arrow/ci/scripts/cpp_build.sh /arrow /build &&
1357 /arrow/ci/scripts/java_cdata_build.sh /arrow /build/java/c/build /build/java/c &&
1358 /arrow/ci/scripts/java_build.sh /arrow /build &&
1359 /arrow/ci/scripts/java_test.sh /arrow /build"
1360
1361 ############################## Integration ##################################
1362
1363 conda-integration:
1364 # Usage:
1365 # docker-compose build conda-cpp
1366 # docker-compose build conda-integration
1367 # docker-compose run conda-integration
1368 image: ${REPO}:${ARCH}-conda-integration
1369 build:
1370 context: .
1371 dockerfile: ci/docker/conda-integration.dockerfile
1372 cache_from:
1373 - ${REPO}:${ARCH}-conda-integration
1374 args:
1375 repo: ${REPO}
1376 arch: ${ARCH}
1377 jdk: ${JDK}
1378 # conda-forge doesn't have 3.5.4 so pinning explicitly, but this should
1379 # be set to ${MAVEN}
1380 maven: 3.5
1381 node: ${NODE}
1382 go: ${GO}
1383 volumes: *conda-volumes
1384 environment:
1385 <<: *ccache
1386 # tell archery where the arrow binaries are located
1387 ARROW_CPP_EXE_PATH: /build/cpp/debug
1388 ARCHERY_INTEGRATION_WITH_RUST: 0
1389 command:
1390 ["/arrow/ci/scripts/rust_build.sh /arrow /build &&
1391 /arrow/ci/scripts/cpp_build.sh /arrow /build &&
1392 /arrow/ci/scripts/csharp_build.sh /arrow /build &&
1393 /arrow/ci/scripts/go_build.sh /arrow &&
1394 /arrow/ci/scripts/java_build.sh /arrow /build &&
1395 /arrow/ci/scripts/js_build.sh /arrow /build &&
1396 /arrow/ci/scripts/integration_arrow.sh /arrow /build"]
1397
1398 ################################ Docs #######################################
1399
1400 ubuntu-docs:
1401 # Usage:
1402 # docker-compose build ubuntu-cpp
1403 # docker-compose build ubuntu-python
1404 # docker-compose build ubuntu-docs
1405 # docker-compose run --rm ubuntu-docs
1406 image: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-docs
1407 build:
1408 context: .
1409 dockerfile: ci/docker/linux-apt-docs.dockerfile
1410 cache_from:
1411 - ${REPO}:${ARCH}-ubuntu-${UBUNTU}-docs
1412 args:
1413 r: ${R}
1414 jdk: ${JDK}
1415 node: ${NODE}
1416 base: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-python-3
1417 environment:
1418 <<: *ccache
1419 ARROW_CUDA: "ON"
1420 ARROW_GLIB_GTK_DOC: "true"
1421 Protobuf_SOURCE: "BUNDLED" # Need Protobuf >= 3.15
1422 volumes: *ubuntu-volumes
1423 command: &docs-command >
1424 /bin/bash -c "
1425 /arrow/ci/scripts/cpp_build.sh /arrow /build true &&
1426 /arrow/ci/scripts/c_glib_build.sh /arrow /build &&
1427 /arrow/ci/scripts/python_build.sh /arrow /build &&
1428 /arrow/ci/scripts/java_build.sh /arrow /build true &&
1429 /arrow/ci/scripts/js_build.sh /arrow true &&
1430 /arrow/ci/scripts/r_build.sh /arrow true &&
1431 /arrow/ci/scripts/docs_build.sh /arrow /build"
1432
1433 ################################# Tools #####################################
1434
1435 ubuntu-lint:
1436 # Usage:
1437 # docker-compose build ubuntu-cpp
1438 # docker-compose build ubuntu-lint
1439 # docker-compose run ubuntu-lint
1440 image: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-lint
1441 build:
1442 context: .
1443 dockerfile: ci/docker/linux-apt-lint.dockerfile
1444 cache_from:
1445 - ${REPO}:${ARCH}-ubuntu-${UBUNTU}-lint
1446 args:
1447 base: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-cpp
1448 clang_tools: ${CLANG_TOOLS}
1449 environment:
1450 <<: *ccache
1451 volumes: *ubuntu-volumes
1452 command: archery lint --all --no-clang-tidy --no-iwyu --no-numpydoc
1453
1454 ######################### Integration Tests #################################
1455
1456 postgres:
1457 # required for the impala service
1458 image: postgres
1459 ports:
1460 - 5432:5432
1461 environment:
1462 POSTGRES_PASSWORD: postgres
1463
1464 impala:
1465 # required for the hiveserver and hdfs tests
1466 image: ibisproject/impala:latest
1467 hostname: impala
1468 links:
1469 - postgres:postgres
1470 environment:
1471 PGPASSWORD: postgres
1472 ports:
1473 # HDFS
1474 - 9020:9020
1475 - 50070:50070
1476 - 50075:50075
1477 - 8020:8020
1478 - 8042:8042
1479 # Hive
1480 - 9083:9083
1481 # Impala
1482 - 21000:21000
1483 - 21050:21050
1484 - 25000:25000
1485 - 25010:25010
1486 - 25020:25020
1487
1488 conda-cpp-hiveserver2:
1489 # Usage:
1490 # docker-compose build conda-cpp
1491 # docker-compose build conda-cpp-hiveserver2
1492 # docker-compose run conda-cpp-hiveserver2
1493 image: ${REPO}:${ARCH}-conda-cpp
1494 links:
1495 - impala:impala
1496 environment:
1497 <<: *ccache
1498 ARROW_FLIGHT: "OFF"
1499 ARROW_GANDIVA: "OFF"
1500 ARROW_PLASMA: "OFF"
1501 ARROW_HIVESERVER2: "ON"
1502 ARROW_HIVESERVER2_TEST_HOST: impala
1503 shm_size: *shm-size
1504 volumes: *conda-volumes
1505 command:
1506 ["/arrow/ci/scripts/cpp_build.sh /arrow /build &&
1507 /arrow/ci/scripts/integration_hiveserver2.sh /arrow /build"]
1508
1509 conda-python-hdfs:
1510 # Usage:
1511 # docker-compose build conda-cpp
1512 # docker-compose build conda-python
1513 # docker-compose build conda-python-hdfs
1514 # docker-compose run conda-python-hdfs
1515 image: ${REPO}:${ARCH}-conda-python-${PYTHON}-hdfs-${HDFS}
1516 build:
1517 context: .
1518 dockerfile: ci/docker/conda-python-hdfs.dockerfile
1519 cache_from:
1520 - ${REPO}:${ARCH}-conda-python-${PYTHON}-hdfs-${HDFS}
1521 args:
1522 repo: ${REPO}
1523 arch: ${ARCH}
1524 python: ${PYTHON}
1525 jdk: ${JDK}
1526 # conda-forge doesn't have 3.5.4 so pinning explicitly, but this should
1527 # be set to ${MAVEN}
1528 maven: 3.5
1529 hdfs: ${HDFS}
1530 links:
1531 - impala:impala
1532 environment:
1533 <<: *ccache
1534 ARROW_HDFS: "ON"
1535 ARROW_HDFS_TEST_HOST: impala
1536 ARROW_HDFS_TEST_PORT: 8020
1537 ARROW_HDFS_TEST_USER: hdfs
1538 ARROW_S3: "OFF"
1539 CMAKE_UNITY_BUILD: "ON"
1540 shm_size: *shm-size
1541 volumes: &conda-maven-volumes
1542 - .:/arrow:delegated
1543 - ${DOCKER_VOLUME_PREFIX}maven-cache:/root/.m2:delegated
1544 - ${DOCKER_VOLUME_PREFIX}conda-ccache:/ccache:delegated
1545 command:
1546 ["/arrow/ci/scripts/cpp_build.sh /arrow /build &&
1547 /arrow/ci/scripts/python_build.sh /arrow /build &&
1548 /arrow/ci/scripts/integration_hdfs.sh /arrow /build"]
1549
1550 conda-python-spark:
1551 # Usage:
1552 # docker-compose build conda-cpp
1553 # docker-compose build conda-python
1554 # docker-compose build conda-python-spark
1555 # docker-compose run conda-python-spark
1556 image: ${REPO}:${ARCH}-conda-python-${PYTHON}-spark-${SPARK}
1557 build:
1558 context: .
1559 dockerfile: ci/docker/conda-python-spark.dockerfile
1560 cache_from:
1561 - ${REPO}:${ARCH}-conda-python-${PYTHON}-spark-${SPARK}
1562 args:
1563 repo: ${REPO}
1564 arch: ${ARCH}
1565 python: ${PYTHON}
1566 jdk: ${JDK}
1567 # conda-forge doesn't have 3.5.4 so pinning explicitly, but this should
1568 # be set to ${MAVEN}
1569 maven: 3.5
1570 spark: ${SPARK}
1571 shm_size: *shm-size
1572 environment:
1573 <<: *ccache
1574 volumes: *conda-maven-volumes
1575 command:
1576 ["/arrow/ci/scripts/cpp_build.sh /arrow /build &&
1577 /arrow/ci/scripts/python_build.sh /arrow /build &&
1578 /arrow/ci/scripts/java_build.sh /arrow /build &&
1579 /arrow/ci/scripts/integration_spark.sh /arrow /spark ${TEST_PYARROW_ONLY:-false}"]