diff --git a/.azure-pipelines/azure-pipelines-linux.yml b/.azure-pipelines/azure-pipelines-linux.yml
index 8bf9ccf17..06274ee80 100755
--- a/.azure-pipelines/azure-pipelines-linux.yml
+++ b/.azure-pipelines/azure-pipelines-linux.yml
@@ -8,28 +8,28 @@ jobs:
vmImage: ubuntu-latest
strategy:
matrix:
- linux_64_c_compiler_version11cuda_compiler_version11.8cxx_compiler_version11:
- CONFIG: linux_64_c_compiler_version11cuda_compiler_version11.8cxx_compiler_version11
+ linux_64_c_compiler_version11cuda_compilernvcccuda_compiler_version11.8cxx_compiler_version11:
+ CONFIG: linux_64_c_compiler_version11cuda_compilernvcccuda_compiler_version11.8cxx_compiler_version11
UPLOAD_PACKAGES: 'True'
DOCKER_IMAGE: quay.io/condaforge/linux-anvil-x86_64-cuda11.8:ubi8
- linux_64_c_compiler_version13cuda_compiler_versionNonecxx_compiler_version13:
- CONFIG: linux_64_c_compiler_version13cuda_compiler_versionNonecxx_compiler_version13
+ linux_64_c_compiler_version13cuda_compilerNonecuda_compiler_versionNonecxx_compiler_version13:
+ CONFIG: linux_64_c_compiler_version13cuda_compilerNonecuda_compiler_versionNonecxx_compiler_version13
UPLOAD_PACKAGES: 'True'
DOCKER_IMAGE: quay.io/condaforge/linux-anvil-x86_64:alma9
- linux_aarch64_c_compiler_version11cuda_compiler_version11.8cxx_compiler_version11:
- CONFIG: linux_aarch64_c_compiler_version11cuda_compiler_version11.8cxx_compiler_version11
+ linux_aarch64_c_compiler_version11cuda_compilernvcccuda_compiler_version11.8cxx_compiler_version11:
+ CONFIG: linux_aarch64_c_compiler_version11cuda_compilernvcccuda_compiler_version11.8cxx_compiler_version11
UPLOAD_PACKAGES: 'True'
DOCKER_IMAGE: quay.io/condaforge/linux-anvil-x86_64-cuda11.8:ubi8
- linux_aarch64_c_compiler_version13cuda_compiler_versionNonecxx_compiler_version13:
- CONFIG: linux_aarch64_c_compiler_version13cuda_compiler_versionNonecxx_compiler_version13
+ linux_aarch64_c_compiler_version13cuda_compilerNonecuda_compiler_versionNonecxx_compiler_version13:
+ CONFIG: linux_aarch64_c_compiler_version13cuda_compilerNonecuda_compiler_versionNonecxx_compiler_version13
UPLOAD_PACKAGES: 'True'
DOCKER_IMAGE: quay.io/condaforge/linux-anvil-x86_64:alma9
- linux_ppc64le_c_compiler_version11cuda_compiler_version11.8cxx_compiler_version11:
- CONFIG: linux_ppc64le_c_compiler_version11cuda_compiler_version11.8cxx_compiler_version11
+ linux_ppc64le_c_compiler_version11cuda_compilernvcccuda_compiler_version11.8cxx_compiler_version11:
+ CONFIG: linux_ppc64le_c_compiler_version11cuda_compilernvcccuda_compiler_version11.8cxx_compiler_version11
UPLOAD_PACKAGES: 'True'
DOCKER_IMAGE: quay.io/condaforge/linux-anvil-x86_64-cuda11.8:ubi8
- linux_ppc64le_c_compiler_version13cuda_compiler_versionNonecxx_compiler_version13:
- CONFIG: linux_ppc64le_c_compiler_version13cuda_compiler_versionNonecxx_compiler_version13
+ linux_ppc64le_c_compiler_version13cuda_compilerNonecuda_compiler_versionNonecxx_compiler_version13:
+ CONFIG: linux_ppc64le_c_compiler_version13cuda_compilerNonecuda_compiler_versionNonecxx_compiler_version13
UPLOAD_PACKAGES: 'True'
DOCKER_IMAGE: quay.io/condaforge/linux-anvil-x86_64:alma9
timeoutInMinutes: 360
@@ -57,6 +57,12 @@ jobs:
sudo apt-get autoclean -y >& /dev/null
df -h
displayName: Manage disk space
+ - script: |
+ sudo fallocate -l 8GiB /swapfile || true
+ sudo chmod 600 /swapfile || true
+ sudo mkswap /swapfile || true
+ sudo swapon /swapfile || true
+ displayName: Create swap file
# configure qemu binfmt-misc running. This allows us to run docker containers
# embedded qemu-static
- script: |
diff --git a/.azure-pipelines/azure-pipelines-win.yml b/.azure-pipelines/azure-pipelines-win.yml
index c6ed88960..89dcdd379 100755
--- a/.azure-pipelines/azure-pipelines-win.yml
+++ b/.azure-pipelines/azure-pipelines-win.yml
@@ -8,11 +8,11 @@ jobs:
vmImage: windows-2022
strategy:
matrix:
- win_64_cuda_compilerNonecuda_compiler_versionNone:
- CONFIG: win_64_cuda_compilerNonecuda_compiler_versionNone
+ win_64_cuda_compiler_version11.8:
+ CONFIG: win_64_cuda_compiler_version11.8
UPLOAD_PACKAGES: 'True'
- win_64_cuda_compilernvcccuda_compiler_version11.8:
- CONFIG: win_64_cuda_compilernvcccuda_compiler_version11.8
+ win_64_cuda_compiler_versionNone:
+ CONFIG: win_64_cuda_compiler_versionNone
UPLOAD_PACKAGES: 'True'
timeoutInMinutes: 360
variables:
diff --git a/.ci_support/linux_64_c_compiler_version11cuda_compiler_version11.8cxx_compiler_version11.yaml b/.ci_support/linux_64_c_compiler_version11cuda_compilernvcccuda_compiler_version11.8cxx_compiler_version11.yaml
similarity index 82%
rename from .ci_support/linux_64_c_compiler_version11cuda_compiler_version11.8cxx_compiler_version11.yaml
rename to .ci_support/linux_64_c_compiler_version11cuda_compilernvcccuda_compiler_version11.8cxx_compiler_version11.yaml
index 0dc50ed66..f3a7363b8 100644
--- a/.ci_support/linux_64_c_compiler_version11cuda_compiler_version11.8cxx_compiler_version11.yaml
+++ b/.ci_support/linux_64_c_compiler_version11cuda_compilernvcccuda_compiler_version11.8cxx_compiler_version11.yaml
@@ -58,10 +58,26 @@ libutf8proc:
- '2.9'
lz4_c:
- '1.10'
+numpy:
+- '2.0'
+- '2.0'
+- '2.0'
+- '2'
+- '2.0'
openssl:
- '3'
orc:
- 2.0.3
+pin_run_as_build:
+ python:
+ min_pin: x.x
+ max_pin: x.x
+python:
+- 3.10.* *_cpython
+- 3.11.* *_cpython
+- 3.12.* *_cpython
+- 3.13.* *_cp313
+- 3.9.* *_cpython
re2:
- 2024.07.02
snappy:
@@ -76,6 +92,8 @@ zip_keys:
- cuda_compiler
- cuda_compiler_version
- docker_image
+- - python
+ - numpy
zlib:
- '1'
zstd:
diff --git a/.ci_support/linux_64_c_compiler_version13cuda_compiler_versionNonecxx_compiler_version13.yaml b/.ci_support/linux_64_c_compiler_version13cuda_compilerNonecuda_compiler_versionNonecxx_compiler_version13.yaml
similarity index 82%
rename from .ci_support/linux_64_c_compiler_version13cuda_compiler_versionNonecxx_compiler_version13.yaml
rename to .ci_support/linux_64_c_compiler_version13cuda_compilerNonecuda_compiler_versionNonecxx_compiler_version13.yaml
index 68beebe67..e9b5f8713 100644
--- a/.ci_support/linux_64_c_compiler_version13cuda_compiler_versionNonecxx_compiler_version13.yaml
+++ b/.ci_support/linux_64_c_compiler_version13cuda_compilerNonecuda_compiler_versionNonecxx_compiler_version13.yaml
@@ -58,10 +58,26 @@ libutf8proc:
- '2.9'
lz4_c:
- '1.10'
+numpy:
+- '2.0'
+- '2.0'
+- '2.0'
+- '2'
+- '2.0'
openssl:
- '3'
orc:
- 2.0.3
+pin_run_as_build:
+ python:
+ min_pin: x.x
+ max_pin: x.x
+python:
+- 3.10.* *_cpython
+- 3.11.* *_cpython
+- 3.12.* *_cpython
+- 3.13.* *_cp313
+- 3.9.* *_cpython
re2:
- 2024.07.02
snappy:
@@ -76,6 +92,8 @@ zip_keys:
- cuda_compiler
- cuda_compiler_version
- docker_image
+- - python
+ - numpy
zlib:
- '1'
zstd:
diff --git a/.ci_support/linux_aarch64_c_compiler_version11cuda_compiler_version11.8cxx_compiler_version11.yaml b/.ci_support/linux_aarch64_c_compiler_version11cuda_compilernvcccuda_compiler_version11.8cxx_compiler_version11.yaml
similarity index 82%
rename from .ci_support/linux_aarch64_c_compiler_version11cuda_compiler_version11.8cxx_compiler_version11.yaml
rename to .ci_support/linux_aarch64_c_compiler_version11cuda_compilernvcccuda_compiler_version11.8cxx_compiler_version11.yaml
index a92195b92..d22ecca1d 100644
--- a/.ci_support/linux_aarch64_c_compiler_version11cuda_compiler_version11.8cxx_compiler_version11.yaml
+++ b/.ci_support/linux_aarch64_c_compiler_version11cuda_compilernvcccuda_compiler_version11.8cxx_compiler_version11.yaml
@@ -58,10 +58,26 @@ libutf8proc:
- '2.9'
lz4_c:
- '1.10'
+numpy:
+- '2.0'
+- '2.0'
+- '2.0'
+- '2'
+- '2.0'
openssl:
- '3'
orc:
- 2.0.3
+pin_run_as_build:
+ python:
+ min_pin: x.x
+ max_pin: x.x
+python:
+- 3.10.* *_cpython
+- 3.11.* *_cpython
+- 3.12.* *_cpython
+- 3.13.* *_cp313
+- 3.9.* *_cpython
re2:
- 2024.07.02
snappy:
@@ -76,6 +92,8 @@ zip_keys:
- cuda_compiler
- cuda_compiler_version
- docker_image
+- - python
+ - numpy
zlib:
- '1'
zstd:
diff --git a/.ci_support/linux_aarch64_c_compiler_version13cuda_compiler_versionNonecxx_compiler_version13.yaml b/.ci_support/linux_aarch64_c_compiler_version13cuda_compilerNonecuda_compiler_versionNonecxx_compiler_version13.yaml
similarity index 82%
rename from .ci_support/linux_aarch64_c_compiler_version13cuda_compiler_versionNonecxx_compiler_version13.yaml
rename to .ci_support/linux_aarch64_c_compiler_version13cuda_compilerNonecuda_compiler_versionNonecxx_compiler_version13.yaml
index 681d16881..9c0ca8998 100644
--- a/.ci_support/linux_aarch64_c_compiler_version13cuda_compiler_versionNonecxx_compiler_version13.yaml
+++ b/.ci_support/linux_aarch64_c_compiler_version13cuda_compilerNonecuda_compiler_versionNonecxx_compiler_version13.yaml
@@ -58,10 +58,26 @@ libutf8proc:
- '2.9'
lz4_c:
- '1.10'
+numpy:
+- '2.0'
+- '2.0'
+- '2.0'
+- '2'
+- '2.0'
openssl:
- '3'
orc:
- 2.0.3
+pin_run_as_build:
+ python:
+ min_pin: x.x
+ max_pin: x.x
+python:
+- 3.10.* *_cpython
+- 3.11.* *_cpython
+- 3.12.* *_cpython
+- 3.13.* *_cp313
+- 3.9.* *_cpython
re2:
- 2024.07.02
snappy:
@@ -76,6 +92,8 @@ zip_keys:
- cuda_compiler
- cuda_compiler_version
- docker_image
+- - python
+ - numpy
zlib:
- '1'
zstd:
diff --git a/.ci_support/linux_ppc64le_c_compiler_version11cuda_compiler_version11.8cxx_compiler_version11.yaml b/.ci_support/linux_ppc64le_c_compiler_version11cuda_compilernvcccuda_compiler_version11.8cxx_compiler_version11.yaml
similarity index 82%
rename from .ci_support/linux_ppc64le_c_compiler_version11cuda_compiler_version11.8cxx_compiler_version11.yaml
rename to .ci_support/linux_ppc64le_c_compiler_version11cuda_compilernvcccuda_compiler_version11.8cxx_compiler_version11.yaml
index 64d12e0eb..7cc0e9288 100644
--- a/.ci_support/linux_ppc64le_c_compiler_version11cuda_compiler_version11.8cxx_compiler_version11.yaml
+++ b/.ci_support/linux_ppc64le_c_compiler_version11cuda_compilernvcccuda_compiler_version11.8cxx_compiler_version11.yaml
@@ -58,10 +58,26 @@ libutf8proc:
- '2.9'
lz4_c:
- '1.10'
+numpy:
+- '2.0'
+- '2.0'
+- '2.0'
+- '2'
+- '2.0'
openssl:
- '3'
orc:
- 2.0.3
+pin_run_as_build:
+ python:
+ min_pin: x.x
+ max_pin: x.x
+python:
+- 3.10.* *_cpython
+- 3.11.* *_cpython
+- 3.12.* *_cpython
+- 3.13.* *_cp313
+- 3.9.* *_cpython
re2:
- 2024.07.02
snappy:
@@ -76,6 +92,8 @@ zip_keys:
- cuda_compiler
- cuda_compiler_version
- docker_image
+- - python
+ - numpy
zlib:
- '1'
zstd:
diff --git a/.ci_support/linux_ppc64le_c_compiler_version13cuda_compiler_versionNonecxx_compiler_version13.yaml b/.ci_support/linux_ppc64le_c_compiler_version13cuda_compilerNonecuda_compiler_versionNonecxx_compiler_version13.yaml
similarity index 82%
rename from .ci_support/linux_ppc64le_c_compiler_version13cuda_compiler_versionNonecxx_compiler_version13.yaml
rename to .ci_support/linux_ppc64le_c_compiler_version13cuda_compilerNonecuda_compiler_versionNonecxx_compiler_version13.yaml
index 02e433752..8e580bd53 100644
--- a/.ci_support/linux_ppc64le_c_compiler_version13cuda_compiler_versionNonecxx_compiler_version13.yaml
+++ b/.ci_support/linux_ppc64le_c_compiler_version13cuda_compilerNonecuda_compiler_versionNonecxx_compiler_version13.yaml
@@ -58,10 +58,26 @@ libutf8proc:
- '2.9'
lz4_c:
- '1.10'
+numpy:
+- '2.0'
+- '2.0'
+- '2.0'
+- '2'
+- '2.0'
openssl:
- '3'
orc:
- 2.0.3
+pin_run_as_build:
+ python:
+ min_pin: x.x
+ max_pin: x.x
+python:
+- 3.10.* *_cpython
+- 3.11.* *_cpython
+- 3.12.* *_cpython
+- 3.13.* *_cp313
+- 3.9.* *_cpython
re2:
- 2024.07.02
snappy:
@@ -76,6 +92,8 @@ zip_keys:
- cuda_compiler
- cuda_compiler_version
- docker_image
+- - python
+ - numpy
zlib:
- '1'
zstd:
diff --git a/.ci_support/migrations/numpy2.yaml b/.ci_support/migrations/numpy2.yaml
new file mode 100644
index 000000000..d70edeb0d
--- /dev/null
+++ b/.ci_support/migrations/numpy2.yaml
@@ -0,0 +1,74 @@
+__migrator:
+ build_number: 1
+ kind: version
+ commit_message: |
+ Rebuild for numpy 2.0
+
+ TL;DR: The way we build against numpy has changed as of numpy 2.0. This bot
+ PR has updated the recipe to account for the changes (see below for details).
+ The numpy 2.0 package itself is currently only available from a special release
+ channel (`conda-forge/label/numpy_rc`) and will not be available on the main
+ `conda-forge` channel until the release of numpy 2.0 GA.
+
+ The biggest change is that we no longer need to use the oldest available numpy
+ version at build time in order to support old numpy version at runtime - numpy
+ will by default use a compatible ABI for the oldest still-supported numpy versions.
+
+ Additionally, we no longer need to use `{{ pin_compatible("numpy") }}` as a
+ run requirement - this has been handled for more than two years now by a
+ run-export on the numpy package itself. The migrator will therefore remove
+ any occurrences of this.
+
+ However, by default, building against numpy 2.0 will assume that the package
+ is compatible with numpy 2.0, which is not necessarily the case. You should
+ check that the upstream package explicitly supports numpy 2.0, otherwise you
+ need to add a `- numpy <2.0dev0` run requirement until that happens (check numpy
+ issue 26191 for an overview of the most important packages).
+
+ Note that the numpy release candidate promises to be ABI-compatible with the
+ final 2.0 release. This means that building against 2.0.0rc1 produces packages
+ that can be published to our main channels.
+
+ If you already want to use the numpy 2.0 release candidate yourself, you can do
+ ```
+ conda config --add channels conda-forge/label/numpy_rc
+ ```
+ or add this channel to your `.condarc` file directly.
+
+ ### To-Dos:
+ * [ ] Match run-requirements for numpy (i.e. check upstream `pyproject.toml` or however the project specifies numpy compatibility)
+ * If upstream is not yet compatible with numpy 2.0, add `numpy <2.0dev0` upper bound under `run:`.
+ * If upstream is already compatible with numpy 2.0, nothing else should be necessary in most cases.
+ * If upstream requires a minimum numpy version newer than 1.19, you can add `numpy >=x.y` under `run:`.
+ * [ ] Remove any remaining occurrences of `{{ pin_compatible("numpy") }}` that the bot may have missed.
+
+ PS. If the build does not compile anymore, this is almost certainly a sign that
+ the upstream project is not yet ready for numpy 2.0; do not close this PR until
+ a version compatible with numpy 2.0 has been released upstream and on this
+ feedstock (in the meantime, you can keep the bot from reopening this PR in
+ case of git conflicts by marking it as a draft).
+
+ migration_number: 1
+ exclude:
+ # needs local overrides that get stomped on by the migrator, which then fails
+ - scipy
+ # already done, but thinks its unsolvable
+ - pandas
+ ordering:
+ # prefer channels including numpy_rc (otherwise smithy doesn't
+ # know which of the two values should be taken on merge)
+ channel_sources:
+ - conda-forge
+ - conda-forge/label/numpy_rc,conda-forge
+
+# needs to match length of zip {python, python_impl, numpy}
+# as it is in global CBC in order to override it
+numpy:
+ - 1.22 # no py38 support for numpy 2.0
+ - 2.0
+ - 2.0
+ - 2.0
+ - 2.0
+channel_sources:
+ - conda-forge/label/numpy_rc,conda-forge
+migrator_ts: 1713572489.295986
diff --git a/.ci_support/migrations/python313.yaml b/.ci_support/migrations/python313.yaml
new file mode 100644
index 000000000..119bed8a2
--- /dev/null
+++ b/.ci_support/migrations/python313.yaml
@@ -0,0 +1,42 @@
+migrator_ts: 1724712607
+__migrator:
+ commit_message: Rebuild for python 3.13
+ migration_number: 1
+ operation: key_add
+ primary_key: python
+ ordering:
+ python:
+ - 3.6.* *_cpython
+ - 3.7.* *_cpython
+ - 3.8.* *_cpython
+ - 3.9.* *_cpython
+ - 3.10.* *_cpython
+ - 3.11.* *_cpython
+ - 3.12.* *_cpython
+ - 3.13.* *_cp313 # new entry
+ - 3.6.* *_73_pypy
+ - 3.7.* *_73_pypy
+ - 3.8.* *_73_pypy
+ - 3.9.* *_73_pypy
+ paused: false
+ longterm: true
+ pr_limit: 20
+ max_solver_attempts: 3 # this will make the bot retry "not solvable" stuff 12 times
+ exclude:
+ # this shouldn't attempt to modify the python feedstocks
+ - python
+ - pypy3.6
+ - pypy-meta
+ - cross-python
+ - python_abi
+ # see https://github.com/conda-forge/scipy-feedstock/pull/283
+ - scipy
+ exclude_pinned_pkgs: false
+
+python:
+- 3.13.* *_cp313
+# additional entries to add for zip_keys
+numpy:
+- 2
+python_impl:
+- cpython
diff --git a/.ci_support/osx_64_.yaml b/.ci_support/osx_64_.yaml
index 0690a4924..81062765a 100644
--- a/.ci_support/osx_64_.yaml
+++ b/.ci_support/osx_64_.yaml
@@ -56,10 +56,26 @@ lz4_c:
- '1.10'
macos_machine:
- x86_64-apple-darwin13.4.0
+numpy:
+- '2.0'
+- '2.0'
+- '2.0'
+- '2'
+- '2.0'
openssl:
- '3'
orc:
- 2.0.3
+pin_run_as_build:
+ python:
+ min_pin: x.x
+ max_pin: x.x
+python:
+- 3.10.* *_cpython
+- 3.11.* *_cpython
+- 3.12.* *_cpython
+- 3.13.* *_cp313
+- 3.9.* *_cpython
re2:
- 2024.07.02
snappy:
@@ -71,6 +87,8 @@ thrift_cpp:
zip_keys:
- - c_compiler_version
- cxx_compiler_version
+- - python
+ - numpy
zlib:
- '1'
zstd:
diff --git a/.ci_support/osx_arm64_.yaml b/.ci_support/osx_arm64_.yaml
index 2fa1d234b..1918b3525 100644
--- a/.ci_support/osx_arm64_.yaml
+++ b/.ci_support/osx_arm64_.yaml
@@ -56,10 +56,26 @@ lz4_c:
- '1.10'
macos_machine:
- arm64-apple-darwin20.0.0
+numpy:
+- '2.0'
+- '2.0'
+- '2.0'
+- '2'
+- '2.0'
openssl:
- '3'
orc:
- 2.0.3
+pin_run_as_build:
+ python:
+ min_pin: x.x
+ max_pin: x.x
+python:
+- 3.10.* *_cpython
+- 3.11.* *_cpython
+- 3.12.* *_cpython
+- 3.13.* *_cp313
+- 3.9.* *_cpython
re2:
- 2024.07.02
snappy:
@@ -71,6 +87,8 @@ thrift_cpp:
zip_keys:
- - c_compiler_version
- cxx_compiler_version
+- - python
+ - numpy
zlib:
- '1'
zstd:
diff --git a/.ci_support/win_64_cuda_compilernvcccuda_compiler_version11.8.yaml b/.ci_support/win_64_cuda_compiler_version11.8.yaml
similarity index 75%
rename from .ci_support/win_64_cuda_compilernvcccuda_compiler_version11.8.yaml
rename to .ci_support/win_64_cuda_compiler_version11.8.yaml
index ce3f6bc08..541a8ca2e 100644
--- a/.ci_support/win_64_cuda_compilernvcccuda_compiler_version11.8.yaml
+++ b/.ci_support/win_64_cuda_compiler_version11.8.yaml
@@ -44,10 +44,26 @@ libutf8proc:
- '2.9'
lz4_c:
- '1.10'
+numpy:
+- '2.0'
+- '2.0'
+- '2.0'
+- '2'
+- '2.0'
openssl:
- '3'
orc:
- 2.0.3
+pin_run_as_build:
+ python:
+ min_pin: x.x
+ max_pin: x.x
+python:
+- 3.10.* *_cpython
+- 3.11.* *_cpython
+- 3.12.* *_cpython
+- 3.13.* *_cp313
+- 3.9.* *_cpython
re2:
- 2024.07.02
snappy:
@@ -59,6 +75,8 @@ thrift_cpp:
zip_keys:
- - cuda_compiler
- cuda_compiler_version
+- - python
+ - numpy
zlib:
- '1'
zstd:
diff --git a/.ci_support/win_64_cuda_compilerNonecuda_compiler_versionNone.yaml b/.ci_support/win_64_cuda_compiler_versionNone.yaml
similarity index 75%
rename from .ci_support/win_64_cuda_compilerNonecuda_compiler_versionNone.yaml
rename to .ci_support/win_64_cuda_compiler_versionNone.yaml
index c22ad354c..7efeea0d3 100644
--- a/.ci_support/win_64_cuda_compilerNonecuda_compiler_versionNone.yaml
+++ b/.ci_support/win_64_cuda_compiler_versionNone.yaml
@@ -44,10 +44,26 @@ libutf8proc:
- '2.9'
lz4_c:
- '1.10'
+numpy:
+- '2.0'
+- '2.0'
+- '2.0'
+- '2'
+- '2.0'
openssl:
- '3'
orc:
- 2.0.3
+pin_run_as_build:
+ python:
+ min_pin: x.x
+ max_pin: x.x
+python:
+- 3.10.* *_cpython
+- 3.11.* *_cpython
+- 3.12.* *_cpython
+- 3.13.* *_cp313
+- 3.9.* *_cpython
re2:
- 2024.07.02
snappy:
@@ -59,6 +75,8 @@ thrift_cpp:
zip_keys:
- - cuda_compiler
- cuda_compiler_version
+- - python
+ - numpy
zlib:
- '1'
zstd:
diff --git a/.scripts/build_steps.sh b/.scripts/build_steps.sh
index 827a29fbc..1b73db5ff 100755
--- a/.scripts/build_steps.sh
+++ b/.scripts/build_steps.sh
@@ -80,12 +80,6 @@ else
command -v inspect_artifacts >/dev/null 2>&1 && inspect_artifacts --recipe-dir "${RECIPE_ROOT}" -m "${CONFIG_FILE}" || echo "inspect_artifacts needs conda-forge-ci-setup >=4.9.4"
( endgroup "Inspecting artifacts" ) 2> /dev/null
- ( startgroup "Validating outputs" ) 2> /dev/null
-
- validate_recipe_outputs "${FEEDSTOCK_NAME}"
-
- ( endgroup "Validating outputs" ) 2> /dev/null
-
( startgroup "Uploading packages" ) 2> /dev/null
if [[ "${UPLOAD_PACKAGES}" != "False" ]] && [[ "${IS_PR_BUILD}" == "False" ]]; then
diff --git a/.scripts/run_osx_build.sh b/.scripts/run_osx_build.sh
index 0e3c39422..61002c334 100755
--- a/.scripts/run_osx_build.sh
+++ b/.scripts/run_osx_build.sh
@@ -104,12 +104,6 @@ else
command -v inspect_artifacts >/dev/null 2>&1 && inspect_artifacts --recipe-dir ./recipe -m ./.ci_support/${CONFIG}.yaml || echo "inspect_artifacts needs conda-forge-ci-setup >=4.9.4"
( endgroup "Inspecting artifacts" ) 2> /dev/null
- ( startgroup "Validating outputs" ) 2> /dev/null
-
- validate_recipe_outputs "${FEEDSTOCK_NAME}"
-
- ( endgroup "Validating outputs" ) 2> /dev/null
-
( startgroup "Uploading packages" ) 2> /dev/null
if [[ "${UPLOAD_PACKAGES}" != "False" ]] && [[ "${IS_PR_BUILD}" == "False" ]]; then
diff --git a/.scripts/run_win_build.bat b/.scripts/run_win_build.bat
index 283e14388..51cdfbf41 100755
--- a/.scripts/run_win_build.bat
+++ b/.scripts/run_win_build.bat
@@ -106,12 +106,6 @@ if /i "%CI%" == "azure" (
set "TEMP=%UPLOAD_TEMP%"
)
-:: Validate
-call :start_group "Validating outputs"
-validate_recipe_outputs "%FEEDSTOCK_NAME%"
-if !errorlevel! neq 0 exit /b !errorlevel!
-call :end_group
-
if /i "%UPLOAD_PACKAGES%" == "true" (
if /i "%IS_PR_BUILD%" == "false" (
call :start_group "Uploading packages"
diff --git a/README.md b/README.md
index c0f928751..ec93e4238 100644
--- a/README.md
+++ b/README.md
@@ -94,6 +94,42 @@ Package license: Apache-2.0
Summary: C++ libraries for Apache Arrow Substrait
+About pyarrow-core
+------------------
+
+Home: http://github.com/apache/arrow
+
+Package license: Apache-2.0
+
+Summary: Python libraries for Apache Arrow Core
+
+About pyarrow
+-------------
+
+Home: http://github.com/apache/arrow
+
+Package license: Apache-2.0
+
+Summary: Python libraries for Apache Arrow with default capabilities
+
+About pyarrow-all
+-----------------
+
+Home: http://github.com/apache/arrow
+
+Package license: Apache-2.0
+
+Summary: Python libraries for Apache Arrow with all capabilities
+
+About pyarrow-tests
+-------------------
+
+Home: http://github.com/apache/arrow
+
+Package license: Apache-2.0
+
+Summary: Python test files for Apache Arrow
+
Current build status
====================
@@ -112,45 +148,45 @@ Current build status
Variant | Status |
- linux_64_c_compiler_version11cuda_compiler_version11.8cxx_compiler_version11 |
+ linux_64_c_compiler_version11cuda_compilernvcccuda_compiler_version11.8cxx_compiler_version11 |
-
+
|
- linux_64_c_compiler_version13cuda_compiler_versionNonecxx_compiler_version13 |
+ linux_64_c_compiler_version13cuda_compilerNonecuda_compiler_versionNonecxx_compiler_version13 |
-
+
|
- linux_aarch64_c_compiler_version11cuda_compiler_version11.8cxx_compiler_version11 |
+ linux_aarch64_c_compiler_version11cuda_compilernvcccuda_compiler_version11.8cxx_compiler_version11 |
-
+
|
- linux_aarch64_c_compiler_version13cuda_compiler_versionNonecxx_compiler_version13 |
+ linux_aarch64_c_compiler_version13cuda_compilerNonecuda_compiler_versionNonecxx_compiler_version13 |
-
+
|
- linux_ppc64le_c_compiler_version11cuda_compiler_version11.8cxx_compiler_version11 |
+ linux_ppc64le_c_compiler_version11cuda_compilernvcccuda_compiler_version11.8cxx_compiler_version11 |
-
+
|
- linux_ppc64le_c_compiler_version13cuda_compiler_versionNonecxx_compiler_version13 |
+ linux_ppc64le_c_compiler_version13cuda_compilerNonecuda_compiler_versionNonecxx_compiler_version13 |
-
+
|
@@ -168,17 +204,17 @@ Current build status
- win_64_cuda_compilerNonecuda_compiler_versionNone |
+ win_64_cuda_compiler_version11.8 |
-
+
|
- win_64_cuda_compilernvcccuda_compiler_version11.8 |
+ win_64_cuda_compiler_versionNone |
-
+
|
@@ -204,6 +240,10 @@ Current release info
| [![Conda Recipe](https://img.shields.io/badge/recipe-libarrow--gandiva-green.svg)](https://anaconda.org/conda-forge/libarrow-gandiva) | [![Conda Downloads](https://img.shields.io/conda/dn/conda-forge/libarrow-gandiva.svg)](https://anaconda.org/conda-forge/libarrow-gandiva) | [![Conda Version](https://img.shields.io/conda/vn/conda-forge/libarrow-gandiva.svg)](https://anaconda.org/conda-forge/libarrow-gandiva) | [![Conda Platforms](https://img.shields.io/conda/pn/conda-forge/libarrow-gandiva.svg)](https://anaconda.org/conda-forge/libarrow-gandiva) |
| [![Conda Recipe](https://img.shields.io/badge/recipe-libarrow--substrait-green.svg)](https://anaconda.org/conda-forge/libarrow-substrait) | [![Conda Downloads](https://img.shields.io/conda/dn/conda-forge/libarrow-substrait.svg)](https://anaconda.org/conda-forge/libarrow-substrait) | [![Conda Version](https://img.shields.io/conda/vn/conda-forge/libarrow-substrait.svg)](https://anaconda.org/conda-forge/libarrow-substrait) | [![Conda Platforms](https://img.shields.io/conda/pn/conda-forge/libarrow-substrait.svg)](https://anaconda.org/conda-forge/libarrow-substrait) |
| [![Conda Recipe](https://img.shields.io/badge/recipe-libparquet-green.svg)](https://anaconda.org/conda-forge/libparquet) | [![Conda Downloads](https://img.shields.io/conda/dn/conda-forge/libparquet.svg)](https://anaconda.org/conda-forge/libparquet) | [![Conda Version](https://img.shields.io/conda/vn/conda-forge/libparquet.svg)](https://anaconda.org/conda-forge/libparquet) | [![Conda Platforms](https://img.shields.io/conda/pn/conda-forge/libparquet.svg)](https://anaconda.org/conda-forge/libparquet) |
+| [![Conda Recipe](https://img.shields.io/badge/recipe-pyarrow-green.svg)](https://anaconda.org/conda-forge/pyarrow) | [![Conda Downloads](https://img.shields.io/conda/dn/conda-forge/pyarrow.svg)](https://anaconda.org/conda-forge/pyarrow) | [![Conda Version](https://img.shields.io/conda/vn/conda-forge/pyarrow.svg)](https://anaconda.org/conda-forge/pyarrow) | [![Conda Platforms](https://img.shields.io/conda/pn/conda-forge/pyarrow.svg)](https://anaconda.org/conda-forge/pyarrow) |
+| [![Conda Recipe](https://img.shields.io/badge/recipe-pyarrow--all-green.svg)](https://anaconda.org/conda-forge/pyarrow-all) | [![Conda Downloads](https://img.shields.io/conda/dn/conda-forge/pyarrow-all.svg)](https://anaconda.org/conda-forge/pyarrow-all) | [![Conda Version](https://img.shields.io/conda/vn/conda-forge/pyarrow-all.svg)](https://anaconda.org/conda-forge/pyarrow-all) | [![Conda Platforms](https://img.shields.io/conda/pn/conda-forge/pyarrow-all.svg)](https://anaconda.org/conda-forge/pyarrow-all) |
+| [![Conda Recipe](https://img.shields.io/badge/recipe-pyarrow--core-green.svg)](https://anaconda.org/conda-forge/pyarrow-core) | [![Conda Downloads](https://img.shields.io/conda/dn/conda-forge/pyarrow-core.svg)](https://anaconda.org/conda-forge/pyarrow-core) | [![Conda Version](https://img.shields.io/conda/vn/conda-forge/pyarrow-core.svg)](https://anaconda.org/conda-forge/pyarrow-core) | [![Conda Platforms](https://img.shields.io/conda/pn/conda-forge/pyarrow-core.svg)](https://anaconda.org/conda-forge/pyarrow-core) |
+| [![Conda Recipe](https://img.shields.io/badge/recipe-pyarrow--tests-green.svg)](https://anaconda.org/conda-forge/pyarrow-tests) | [![Conda Downloads](https://img.shields.io/conda/dn/conda-forge/pyarrow-tests.svg)](https://anaconda.org/conda-forge/pyarrow-tests) | [![Conda Version](https://img.shields.io/conda/vn/conda-forge/pyarrow-tests.svg)](https://anaconda.org/conda-forge/pyarrow-tests) | [![Conda Platforms](https://img.shields.io/conda/pn/conda-forge/pyarrow-tests.svg)](https://anaconda.org/conda-forge/pyarrow-tests) |
Installing arrow-cpp
====================
@@ -215,16 +255,16 @@ conda config --add channels conda-forge
conda config --set channel_priority strict
```
-Once the `conda-forge` channel has been enabled, `apache-arrow-proc, libarrow, libarrow-acero, libarrow-all, libarrow-dataset, libarrow-flight, libarrow-flight-sql, libarrow-gandiva, libarrow-substrait, libparquet` can be installed with `conda`:
+Once the `conda-forge` channel has been enabled, `apache-arrow-proc, libarrow, libarrow-acero, libarrow-all, libarrow-dataset, libarrow-flight, libarrow-flight-sql, libarrow-gandiva, libarrow-substrait, libparquet, pyarrow, pyarrow-all, pyarrow-core, pyarrow-tests` can be installed with `conda`:
```
-conda install apache-arrow-proc libarrow libarrow-acero libarrow-all libarrow-dataset libarrow-flight libarrow-flight-sql libarrow-gandiva libarrow-substrait libparquet
+conda install apache-arrow-proc libarrow libarrow-acero libarrow-all libarrow-dataset libarrow-flight libarrow-flight-sql libarrow-gandiva libarrow-substrait libparquet pyarrow pyarrow-all pyarrow-core pyarrow-tests
```
or with `mamba`:
```
-mamba install apache-arrow-proc libarrow libarrow-acero libarrow-all libarrow-dataset libarrow-flight libarrow-flight-sql libarrow-gandiva libarrow-substrait libparquet
+mamba install apache-arrow-proc libarrow libarrow-acero libarrow-all libarrow-dataset libarrow-flight libarrow-flight-sql libarrow-gandiva libarrow-substrait libparquet pyarrow pyarrow-all pyarrow-core pyarrow-tests
```
It is possible to list all of the versions of `apache-arrow-proc` available on your platform with `conda`:
diff --git a/conda-forge.yml b/conda-forge.yml
index 6f2952057..fc6a89bae 100644
--- a/conda-forge.yml
+++ b/conda-forge.yml
@@ -1,6 +1,7 @@
azure:
free_disk_space: true
- max_parallel: 20
+ settings_linux:
+ swapfile_size: 8GiB
settings_win:
variables:
CONDA_BLD_PATH: C:\\bld\\
diff --git a/recipe/build-pyarrow.bat b/recipe/build-pyarrow.bat
new file mode 100644
index 000000000..6218eebb9
--- /dev/null
+++ b/recipe/build-pyarrow.bat
@@ -0,0 +1,40 @@
+@echo on
+
+pushd "%SRC_DIR%"\python
+
+SET ARROW_HOME=%LIBRARY_PREFIX%
+SET SETUPTOOLS_SCM_PRETEND_VERSION=%PKG_VERSION%
+SET PYARROW_BUILD_TYPE=release
+SET PYARROW_WITH_ACERO=1
+SET PYARROW_WITH_DATASET=1
+SET PYARROW_WITH_FLIGHT=1
+SET PYARROW_WITH_GANDIVA=1
+SET PYARROW_WITH_GCS=1
+SET PYARROW_WITH_HDFS=1
+SET PYARROW_WITH_ORC=1
+SET PYARROW_WITH_PARQUET=1
+SET PYARROW_WITH_PARQUET_ENCRYPTION=1
+SET PYARROW_WITH_S3=1
+SET PYARROW_WITH_SUBSTRAIT=1
+SET PYARROW_CMAKE_GENERATOR=Ninja
+
+:: Enable CUDA support
+if "%cuda_compiler_version%"=="None" (
+ set "PYARROW_WITH_CUDA=0"
+) else (
+ set "PYARROW_WITH_CUDA=1"
+)
+
+%PYTHON% setup.py ^
+ build_ext ^
+ install --single-version-externally-managed ^
+ --record=record.txt
+if %ERRORLEVEL% neq 0 exit 1
+popd
+
+if [%PKG_NAME%] NEQ [pyarrow-tests] (
+ rd /s /q %SP_DIR%\pyarrow\tests
+)
+
+:: generated by setup.py
+rmdir .\python\build /s /q
diff --git a/recipe/build-pyarrow.sh b/recipe/build-pyarrow.sh
new file mode 100644
index 000000000..8a449b63c
--- /dev/null
+++ b/recipe/build-pyarrow.sh
@@ -0,0 +1,64 @@
+#!/bin/sh
+set -ex
+
+# Build dependencies
+export ARROW_HOME=$PREFIX
+export PARQUET_HOME=$PREFIX
+export SETUPTOOLS_SCM_PRETEND_VERSION=$PKG_VERSION
+export PYARROW_BUILD_TYPE=release
+export PYARROW_WITH_ACERO=1
+export PYARROW_WITH_AZURE=1
+export PYARROW_WITH_DATASET=1
+export PYARROW_WITH_FLIGHT=1
+export PYARROW_WITH_GANDIVA=1
+export PYARROW_WITH_GCS=1
+export PYARROW_WITH_HDFS=1
+export PYARROW_WITH_ORC=1
+export PYARROW_WITH_PARQUET=1
+export PYARROW_WITH_PARQUET_ENCRYPTION=1
+export PYARROW_WITH_S3=1
+export PYARROW_WITH_SUBSTRAIT=1
+export PYARROW_CMAKE_GENERATOR=Ninja
+export PYARROW_CMAKE_OPTIONS="-DARROW_SIMD_LEVEL=NONE"
+BUILD_EXT_FLAGS=""
+
+# Enable CUDA support
+if [[ ! -z "${cuda_compiler_version+x}" && "${cuda_compiler_version}" != "None" ]]; then
+ export PYARROW_WITH_CUDA=1
+ if [[ "${build_platform}" != "${target_platform}" ]]; then
+ export CUDAToolkit_ROOT=${CUDA_HOME}
+ export CMAKE_LIBRARY_PATH=${CONDA_BUILD_SYSROOT}/lib
+ fi
+else
+ export PYARROW_WITH_CUDA=0
+fi
+
+# Resolve: Make Error at cmake_modules/SetupCxxFlags.cmake:338 (message): Unsupported arch flag: -march=.
+if [[ "${target_platform}" == "linux-aarch64" ]]; then
+ export PYARROW_CMAKE_OPTIONS="-DARROW_ARMV8_ARCH=armv8-a ${PYARROW_CMAKE_OPTIONS}"
+fi
+
+if [[ "${target_platform}" == osx-* ]]; then
+ # See https://conda-forge.org/docs/maintainer/knowledge_base.html#newer-c-features-with-old-sdk
+ CXXFLAGS="${CXXFLAGS} -D_LIBCPP_DISABLE_AVAILABILITY"
+fi
+
+if [[ "${target_platform}" == "linux-aarch64" ]] || [[ "${target_platform}" == "linux-ppc64le" ]]; then
+ # Limit number of threads used to avoid hardware oversubscription
+ export CMAKE_BUILD_PARALLEL_LEVEL=4
+fi
+
+cd python
+
+$PYTHON setup.py \
+ build_ext \
+ install --single-version-externally-managed \
+ --record=record.txt
+
+if [[ "$PKG_NAME" != "pyarrow-tests" ]]; then
+ rm -r ${SP_DIR}/pyarrow/tests
+fi
+
+# generated by setup.py
+rm -rf build
+cd ..
diff --git a/recipe/meta.yaml b/recipe/meta.yaml
index 53b6c2159..30f674df6 100644
--- a/recipe/meta.yaml
+++ b/recipe/meta.yaml
@@ -1,4 +1,4 @@
-{% set version = "18.1.0" %}
+{% set version = "19.0.0.rc0" %}
{% set cuda_enabled = cuda_compiler_version != "None" %}
{% set build_ext_version = "5.0.0" %}
{% set build_ext = "cuda" if cuda_enabled else "cpu" %}
@@ -13,29 +13,26 @@ package:
version: {{ version }}
source:
- - url: https://www.apache.org/dyn/closer.lua/arrow/arrow-{{ version }}/apache-arrow-{{ version }}.tar.gz?action=download
- fn: apache-arrow-{{ version }}.tar.gz
- sha256: 2dc8da5f8796afe213ecc5e5aba85bb82d91520eff3cf315784a52d0fa61d7fc
+ - url: https://github.com/apache/arrow/releases/download/apache-arrow-{{ version.replace(".rc", "-rc") }}/apache-arrow-{{ version.split(".rc")[0] }}.tar.gz
+ sha256: f89b93f39954740f7184735ff1e1d3b5be2640396febc872c4955274a011f56b
patches:
- # backport https://github.com/apache/arrow/pull/44621
- - patches/0001-GH-43808-C-skip-0117-in-StrptimeZoneOffset-for-old-g.patch
# skip gcsfs tests due to missing `storage-testbench`
- - patches/0002-disable-gcsfs_test.patch
+ - patches/0001-disable-gcsfs_test.patch
# upstream problems on with s3 tests on osx, see
# https://github.com/apache/arrow/issues/35587
- - patches/0003-skip-NonExistentBucket-test-on-osx.patch
- # backport https://github.com/apache/arrow/pull/45057
- - patches/0004-GH-45053-C-Add-support-for-Boost-1.87.0-45057.patch
+ - patches/0002-skip-NonExistentBucket-test-on-osx.patch
+ # backport https://github.com/apache/arrow/pull/45232
+ - patches/0003-MINOR-C-Disable-Flight-test.patch
# testing-submodules not part of release tarball
- git_url: https://github.com/apache/arrow-testing.git
git_rev: 4d209492d514c2d3cb2d392681b9aa00e6d8da1c
folder: testing
- git_url: https://github.com/apache/parquet-testing.git
- git_rev: cb7a9674142c137367bf75a01b79c6e214a73199
+ git_rev: c7cf1374cf284c0c73024cd1437becea75558bf8
folder: cpp/submodules/parquet-testing
build:
- number: 8
+ number: 0
# for cuda support, building with one version is enough to be compatible with
# all later versions, since arrow is only using libcuda, and not libcudart.
skip: true # [cuda_compiler_version not in ("None", cuda_compiler_version_min)]
@@ -659,6 +656,293 @@ outputs:
- LICENSE.txt
summary: C++ libraries for Apache Parquet
+ - name: pyarrow-core
+ script: build-pyarrow.sh # [unix]
+ script: build-pyarrow.bat # [win]
+ version: {{ version }}
+ build:
+ string: py{{ CONDA_PY }}h{{ PKG_HASH }}_{{ PKG_BUILDNUM }}_{{ build_ext }}
+ ignore_run_exports_from:
+ - {{ compiler("cuda") }} # [cuda_compiler_version != "None"]
+ # we don't need numpy at runtime, just to build
+ - numpy
+ rpaths:
+ - lib/
+ - {{ SP_DIR }}/pyarrow
+ missing_dso_whitelist:
+ # not actually missing, but installed into SP_DIR, see tests
+ - '*/arrow_python.dll' # [win]
+ - '*/arrow_python_flight.dll' # [win]
+ # pyarrow-core builds with the capabilities but we do not ship them
+ # to provide the smaller core functionality.
+ - 'lib/libarrow_acero.*' # [unix]
+ - 'lib/libarrow_dataset.*' # [unix]
+ - 'lib/libarrow_substrait.*' # [unix]
+ - 'lib/libarrow_flight.*' # [unix]
+ - 'lib/libparquet.*' # [unix]
+ - 'lib/libgandiva.*' # [unix]
+ - 'Library/lib/arrow_acero.dll' # [win]
+ - 'Library/lib/arrow_dataset.dll' # [win]
+ - 'Library/lib/arrow_substrait.dll' # [win]
+ - 'Library/lib/arrow_flight.dll' # [win]
+ - 'Library/lib/parquet.dll' # [win]
+ - 'Library/lib/gandiva.dll' # [win]
+ requirements:
+ build:
+ - {{ compiler("c") }}
+ - {{ stdlib("c") }}
+ - {{ compiler("cxx") }}
+ # pyarrow does not require nvcc but it needs to link against libraries in libarrow=*=*cuda
+ - {{ compiler("cuda") }} # [cuda_compiler_version != "None"]
+ - python # [build_platform != target_platform]
+ - cross-python_{{ target_platform }} # [build_platform != target_platform]
+ - cython # [build_platform != target_platform]
+ - numpy # [build_platform != target_platform]
+ - cmake
+ - ninja
+ host:
+ # We add all libarrow package dependencies on host in order
+ # to build pyarrow once with all capabilities.
+ - {{ pin_subpackage("libarrow-all", exact=True) }}
+ - clangdev {{ llvm_version }}
+ - llvmdev {{ llvm_version }}
+ - zlib
+ - cython
+ - numpy
+ - python
+ - setuptools
+ - setuptools-scm
+ run:
+ # We ignore the run-exports from libarrow-all and restrict to only
+ # libarrow, as we don't want the other libraries to be installed when
+ # running for pyarrow-core, where the aim is a low storage footprint.
+ - {{ pin_subpackage("libarrow", exact=True) }}
+ - python
+ # this is redundant with libarrow, but we want smithy to pick up that
+ # cuda_compiler_version_min is present, to populate the CI configs
+ - __cuda >={{ cuda_compiler_version_min }} # [cuda_compiler_version != "None"]
+ run_constrained:
+ - apache-arrow-proc =*={{ build_ext }}
+ # keep lower pin aligned with run_exports from numpy
+ # https://github.com/conda-forge/numpy-feedstock/blob/main/recipe/meta.yaml
+ - numpy >=1.21,<3
+
+ test:
+ imports:
+ - pyarrow
+ # Compute can be imported but the underlying libarrow_acero is not present.
+ - pyarrow.compute
+ - pyarrow.orc
+ - pyarrow.fs
+ - pyarrow._s3fs
+ - pyarrow._hdfs
+ # We can only test importing cuda package but cannot run when a
+ # CUDA device is not available, for instance, when building from CI.
+ # On Windows, we cannot even do that due to `nvcuda.dll` not being found, see
+ # https://conda-forge.org/docs/maintainer/knowledge_base.html#nvcuda-dll-cannot-be-found-on-windows
+ # However, we check below for (at least) the presence of a correctly-compiled module
+ - pyarrow.cuda # [cuda_compiler_version != "None" and not win]
+ commands:
+ # libraries that depend on python (and hence aren't in libarrow itself)
+ - test -f ${SP_DIR}/pyarrow/libarrow_python.so # [linux]
+ - test -f ${SP_DIR}/pyarrow/libarrow_python_flight.so # [linux]
+ - test -f ${SP_DIR}/pyarrow/libarrow_python_parquet_encryption.so # [linux]
+ - test -f ${SP_DIR}/pyarrow/libarrow_python.dylib # [osx]
+ - test -f ${SP_DIR}/pyarrow/libarrow_python_flight.dylib # [osx]
+ - test -f ${SP_DIR}/pyarrow/libarrow_python_parquet_encryption.dylib # [osx]
+ - if not exist %SP_DIR%\pyarrow\arrow_python.dll exit 1 # [win]
+ - if not exist %SP_DIR%\pyarrow\arrow_python_flight.dll exit 1 # [win]
+ - if not exist %SP_DIR%\pyarrow\arrow_python_parquet_encryption.dll exit 1 # [win]
+
+ - test -f ${SP_DIR}/pyarrow/include/arrow/python/pyarrow.h # [unix]
+ - if not exist %SP_DIR%\pyarrow\include\arrow\python\pyarrow.h exit 1 # [win]
+
+ - test ! -f ${SP_DIR}/pyarrow/tests/test_array.py # [unix]
+ - if exist %SP_DIR%/pyarrow/tests/test_array.py exit 1 # [win]
+ # Need to remove dot from PY_VER; %MYVAR:x=y% replaces "x" in %MYVAR% with "y"
+ - if not exist %SP_DIR%/pyarrow/_cuda.cp%PY_VER:.=%-win_amd64.pyd exit 1 # [win and cuda_compiler_version != "None"]
+
+ # Expected not included libraries
+ - test ! -f $PREFIX/lib/libarrow_acero${SHLIB_EXT} # [unix]
+ - test ! -f $PREFIX/lib/libarrow_dataset${SHLIB_EXT} # [unix]
+ - test ! -f $PREFIX/lib/libarrow_flight${SHLIB_EXT} # [unix]
+ - test ! -f $PREFIX/lib/libgandiva${SHLIB_EXT} # [unix]
+ - test ! -f $PREFIX/lib/libparquet${SHLIB_EXT} # [unix]
+
+ about:
+ home: http://github.com/apache/arrow
+ license: Apache-2.0
+ license_file:
+ - LICENSE.txt
+ summary: Python libraries for Apache Arrow Core
+
+ - name: pyarrow
+ version: {{ version }}
+ requirements:
+ host:
+ # only necessary for run-exports
+ - python
+ run:
+ # Default doesn't contain flight, flight-sql and gandiva
+ - {{ pin_subpackage("libarrow-acero", exact=True) }}
+ - {{ pin_subpackage("libarrow-dataset", exact=True) }}
+ - {{ pin_subpackage("libarrow-substrait", exact=True) }}
+ - {{ pin_subpackage("libparquet", exact=True) }}
+ # do not use pin_compatible because pyarrow-core has CUDA/non-CUDA variants
+ - pyarrow-core {{ version }}=*_{{ PKG_BUILDNUM }}_*
+ - python
+
+ test:
+ files:
+ - test_read_parquet.py
+ imports:
+ # default pyarrow contains parquet
+ - pyarrow.dataset
+ - pyarrow.parquet
+ commands:
+ # Expected not included libraries
+ - test ! -f $PREFIX/lib/libarrow_flight${SHLIB_EXT} # [unix]
+ - test ! -f $PREFIX/lib/libgandiva${SHLIB_EXT} # [unix]
+
+ - python test_read_parquet.py
+
+ about:
+ home: http://github.com/apache/arrow
+ license: Apache-2.0
+ license_file:
+ - LICENSE.txt
+ summary: Python libraries for Apache Arrow with default capabilities
+
+ - name: pyarrow-all
+ version: {{ version }}
+ requirements:
+ host:
+ # only necessary for run-exports
+ - python
+ run:
+ - {{ pin_subpackage("libarrow-flight", exact=True) }}
+ - {{ pin_subpackage("libarrow-flight-sql", exact=True) }}
+ - {{ pin_subpackage("libarrow-gandiva", exact=True) }}
+ - pyarrow {{ version }}=*_{{ PKG_BUILDNUM }}
+ - python
+
+ test:
+ imports:
+ - pyarrow.flight
+ - pyarrow.gandiva
+ about:
+ home: http://github.com/apache/arrow
+ license: Apache-2.0
+ license_file:
+ - LICENSE.txt
+ summary: Python libraries for Apache Arrow with all capabilities
+
+ - name: pyarrow-tests
+ script: build-pyarrow.sh # [unix]
+ script: build-pyarrow.bat # [win]
+ version: {{ version }}
+ build:
+ skip: true # [cuda_compiler_version != "None"]
+ requirements:
+ build:
+ - {{ compiler("c") }}
+ - {{ stdlib("c") }}
+ - {{ compiler("cxx") }}
+ - python # [build_platform != target_platform]
+ - cross-python_{{ target_platform }} # [build_platform != target_platform]
+ - cython # [build_platform != target_platform]
+ - numpy # [build_platform != target_platform]
+ - cmake
+ - ninja
+ host:
+ - {{ pin_subpackage("libarrow-all", exact=True) }}
+ - pyarrow-all {{ version }}=*_{{ PKG_BUILDNUM }}
+ - clangdev {{ llvm_version }}
+ - llvmdev {{ llvm_version }}
+ - zlib
+ - cython
+ - numpy
+ - python
+ - setuptools
+ - setuptools-scm
+ run:
+ - pyarrow-all {{ version }}=*_{{ PKG_BUILDNUM }}
+ - python
+
+ {% if not (aarch64 or ppc64le) or py == 311 %}
+ test:
+ requires:
+ # test_cpp_extension_in_python requires a compiler
+ - {{ compiler("cxx") }} # [linux]
+ - pytest
+ - boto3
+ - cffi
+ - cloudpickle
+ - cython
+ - fastparquet
+ - fsspec
+ - hypothesis
+ - minio-server
+ - pandas
+ - s3fs >=2023
+ - scipy
+ - sparse # [py<313]
+ # these are generally (far) behind on migrating abseil/grpc/protobuf,
+ # and using them as test dependencies blocks the migrator unnecessarily
+ # - pytorch
+ # - tensorflow
+ # we're not building java bindings
+ # - jpype1
+ # doesn't get picked up correctly
+ # - libhdfs3
+ source_files:
+ - cpp/submodules/parquet-testing/data
+ - testing/data
+ commands:
+ - cd ${SP_DIR} # [unix]
+ - cd %SP_DIR% # [win]
+ - export ARROW_TEST_DATA="${SRC_DIR}/testing/data" # [unix]
+ - set "ARROW_TEST_DATA=%SRC_DIR%\testing\data" # [win]
+ - export PARQUET_TEST_DATA="${SRC_DIR}/cpp/submodules/parquet-testing/data" # [unix]
+ - set "PARQUET_TEST_DATA=%SRC_DIR%\cpp\submodules\parquet-testing\data" # [win]
+
+ {% set tests_to_skip = "_not_a_real_test" %}
+ # we do not have GPUs in CI --> cannot test cuda
+ {% set tests_to_skip = tests_to_skip + " or test_cuda" + " or test_dlpack_cuda_not_supported"%}
+ # skip tests that raise SIGINT and crash the test suite
+ {% set tests_to_skip = tests_to_skip + " or (test_csv and test_cancellation)" %} # [linux]
+ {% set tests_to_skip = tests_to_skip + " or (test_flight and test_interrupt)" %} # [linux]
+ # skip tests that make invalid(-for-conda) assumptions about the compilers setup
+ {% set tests_to_skip = tests_to_skip + " or test_cython_api" %} # [unix]
+ {% set tests_to_skip = tests_to_skip + " or test_visit_strings" %} # [unix]
+ # skip tests that cannot succeed in emulation
+ {% set tests_to_skip = tests_to_skip + " or test_debug_memory_pool_disabled" %} # [aarch64 or ppc64le]
+ {% set tests_to_skip = tests_to_skip + " or test_env_var_io_thread_count" %} # [aarch64 or ppc64le]
+ # vvvvvvv TESTS THAT SHOULDN'T HAVE TO BE SKIPPED vvvvvvv
+ # https://github.com/apache/arrow/issues/45229
+ {% set tests_to_skip = tests_to_skip + " or test_sparse_coo_tensor_scipy_roundtrip" %}
+ # https://github.com/apache/arrow/issues/43800
+ {% set tests_to_skip = tests_to_skip + " or test_cpp_extension_in_python" %} # [osx]
+ # https://github.com/apache/arrow/issues/43356
+ {% set tests_to_skip = tests_to_skip + " or (test_compute and test_assume_timezone)" %} # [aarch64 or ppc64le]
+ {% set tests_to_skip = tests_to_skip + " or (test_compute and test_strftime)" %} # [aarch64 or ppc64le]
+ {% set tests_to_skip = tests_to_skip + " or (test_compute and test_round_temporal)" %} # [aarch64 or ppc64le]
+ {% set tests_to_skip = tests_to_skip + " or test_extract_datetime_components " %} # [aarch64 or ppc64le]
+ # flaky test that fails regularly on aarch
+ {% set tests_to_skip = tests_to_skip + " or test_feather_format[serial]" %} # [aarch64 or ppc64le]
+ # gandiva tests are segfaulting on ppc
+ {% set tests_to_skip = tests_to_skip + " or test_gandiva" %} # [ppc64le]
+ # ^^^^^^^ TESTS THAT SHOULDN'T HAVE TO BE SKIPPED ^^^^^^^
+ - pytest pyarrow/ -rfEs -k "not ({{ tests_to_skip }})"
+ {% endif %}
+
+ about:
+ home: http://github.com/apache/arrow
+ license: Apache-2.0
+ license_file:
+ - LICENSE.txt
+ summary: Python test files for Apache Arrow
+
about:
home: http://github.com/apache/arrow
license: Apache-2.0
diff --git a/recipe/patches/0001-GH-43808-C-skip-0117-in-StrptimeZoneOffset-for-old-g.patch b/recipe/patches/0001-GH-43808-C-skip-0117-in-StrptimeZoneOffset-for-old-g.patch
deleted file mode 100644
index 719abd7e5..000000000
--- a/recipe/patches/0001-GH-43808-C-skip-0117-in-StrptimeZoneOffset-for-old-g.patch
+++ /dev/null
@@ -1,62 +0,0 @@
-From b43fe5fba8ae2d1508a970d7e8b2f548ff8bf1be Mon Sep 17 00:00:00 2001
-From: h-vetinari
-Date: Fri, 15 Nov 2024 19:38:40 +1100
-Subject: [PATCH 1/4] GH-43808: [C++] skip `-0117` in StrptimeZoneOffset for
- old glibc (#44621)
-
-### Rationale for this change
-
-Enable tests for libarrow in conda-forge: https://github.com/apache/arrow/issues/35587
-
-### What changes are included in this PR?
-
-old glibc does not actually support timezones like `-0117` (used in `StrptimeZoneOffset` test). The exact lower bound for glibc is hard for me to determine; I know that it passes with 2.28 and that it fails with 2.17. Anything in between is an open question. I went with the conservative option here.
-
-### Are these changes tested?
-
-Tested in https://github.com/conda-forge/arrow-cpp-feedstock/pull/1058
-
-### Are there any user-facing changes?
-
-* GitHub Issue: #43808
-
-Lead-authored-by: H. Vetinari
-Co-authored-by: Sutou Kouhei
-Signed-off-by: Sutou Kouhei
----
- cpp/src/arrow/util/value_parsing_test.cc | 19 ++++++++++++++++---
- 1 file changed, 16 insertions(+), 3 deletions(-)
-
-diff --git a/cpp/src/arrow/util/value_parsing_test.cc b/cpp/src/arrow/util/value_parsing_test.cc
-index 7cd1ab1e2..a833d266a 100644
---- a/cpp/src/arrow/util/value_parsing_test.cc
-+++ b/cpp/src/arrow/util/value_parsing_test.cc
-@@ -838,12 +838,25 @@ TEST(TimestampParser, StrptimeZoneOffset) {
- std::string format = "%Y-%d-%m %H:%M:%S%z";
- auto parser = TimestampParser::MakeStrptime(format);
-
-+ std::vector values = {
-+ "2018-01-01 00:00:00+0000",
-+ "2018-01-01 00:00:00+0100",
-+#if defined(__GLIBC__) && defined(__GLIBC_MINOR__)
-+// glibc < 2.28 doesn't support "-0117" timezone offset.
-+// See also: https://github.com/apache/arrow/issues/43808
-+# if ((__GLIBC__ == 2) && (__GLIBC_MINOR__ >= 28)) || (__GLIBC__ >= 3)
-+ "2018-01-01 00:00:00-0117",
-+# endif
-+#else
-+ "2018-01-01 00:00:00-0117",
-+#endif
-+ "2018-01-01 00:00:00+0130"
-+ };
-+
- // N.B. GNU %z supports ISO8601 format while BSD %z supports only
- // +HHMM or -HHMM and POSIX doesn't appear to define %z at all
- for (auto unit : TimeUnit::values()) {
-- for (const std::string value :
-- {"2018-01-01 00:00:00+0000", "2018-01-01 00:00:00+0100",
-- "2018-01-01 00:00:00+0130", "2018-01-01 00:00:00-0117"}) {
-+ for (const std::string& value : values) {
- SCOPED_TRACE(value);
- int64_t converted = 0;
- int64_t expected = 0;
diff --git a/recipe/patches/0002-disable-gcsfs_test.patch b/recipe/patches/0001-disable-gcsfs_test.patch
similarity index 90%
rename from recipe/patches/0002-disable-gcsfs_test.patch
rename to recipe/patches/0001-disable-gcsfs_test.patch
index 5ce6b1a78..d2d95490d 100644
--- a/recipe/patches/0002-disable-gcsfs_test.patch
+++ b/recipe/patches/0001-disable-gcsfs_test.patch
@@ -1,7 +1,7 @@
-From a65aece6fa1332e64722b8abc228003941c4f384 Mon Sep 17 00:00:00 2001
+From 1c066d1ef45141d366caf4507d2fb090689a0a06 Mon Sep 17 00:00:00 2001
From: "H. Vetinari"
Date: Sat, 2 Nov 2024 15:41:34 +1100
-Subject: [PATCH 2/4] disable gcsfs_test
+Subject: [PATCH 1/3] disable gcsfs_test
it cannot work unless we package https://github.com/googleapis/storage-testbench,
which however has extremely tight dependencies on protobuf etc., making it very
diff --git a/recipe/patches/0003-skip-NonExistentBucket-test-on-osx.patch b/recipe/patches/0002-skip-NonExistentBucket-test-on-osx.patch
similarity index 76%
rename from recipe/patches/0003-skip-NonExistentBucket-test-on-osx.patch
rename to recipe/patches/0002-skip-NonExistentBucket-test-on-osx.patch
index 38ab2c370..81d7edeca 100644
--- a/recipe/patches/0003-skip-NonExistentBucket-test-on-osx.patch
+++ b/recipe/patches/0002-skip-NonExistentBucket-test-on-osx.patch
@@ -1,17 +1,17 @@
-From 98de50043dbed77b263ce1a5ee3952930471d601 Mon Sep 17 00:00:00 2001
+From 25e8e519725d4b9219721b7d48105ae649e5792b Mon Sep 17 00:00:00 2001
From: "H. Vetinari"
Date: Sun, 24 Nov 2024 20:22:35 +1100
-Subject: [PATCH 3/4] skip NonExistentBucket test on osx
+Subject: [PATCH 2/3] skip NonExistentBucket test on osx
---
cpp/src/arrow/filesystem/s3fs_test.cc | 4 ++++
1 file changed, 4 insertions(+)
diff --git a/cpp/src/arrow/filesystem/s3fs_test.cc b/cpp/src/arrow/filesystem/s3fs_test.cc
-index 43091aaa9..cf179551f 100644
+index 3082ecb78..ce910e8d1 100644
--- a/cpp/src/arrow/filesystem/s3fs_test.cc
+++ b/cpp/src/arrow/filesystem/s3fs_test.cc
-@@ -375,6 +375,10 @@ TEST_F(S3RegionResolutionTest, RestrictedBucket) {
+@@ -431,6 +431,10 @@ TEST_F(S3RegionResolutionTest, RestrictedBucket) {
}
TEST_F(S3RegionResolutionTest, NonExistentBucket) {
diff --git a/recipe/patches/0003-MINOR-C-Disable-Flight-test.patch b/recipe/patches/0003-MINOR-C-Disable-Flight-test.patch
new file mode 100644
index 000000000..d074a4ad8
--- /dev/null
+++ b/recipe/patches/0003-MINOR-C-Disable-Flight-test.patch
@@ -0,0 +1,28 @@
+From 66a9cf9de4991c3e48c553772763b2eda5a7b6de Mon Sep 17 00:00:00 2001
+From: David Li
+Date: Sun, 12 Jan 2025 20:16:43 -0500
+Subject: [PATCH 3/3] MINOR: [C++] Disable Flight test
+
+---
+ cpp/src/arrow/flight/test_definitions.h | 3 ++-
+ 1 file changed, 2 insertions(+), 1 deletion(-)
+
+diff --git a/cpp/src/arrow/flight/test_definitions.h b/cpp/src/arrow/flight/test_definitions.h
+index 1e0e8c209..375675784 100644
+--- a/cpp/src/arrow/flight/test_definitions.h
++++ b/cpp/src/arrow/flight/test_definitions.h
+@@ -306,12 +306,13 @@ class ARROW_FLIGHT_EXPORT AsyncClientTest : public FlightTest {
+ std::unique_ptr server_;
+ };
+
++// XXX: https://github.com/apache/arrow/issues/45120
+ #define ARROW_FLIGHT_TEST_ASYNC_CLIENT(FIXTURE) \
+ static_assert(std::is_base_of::value, \
+ ARROW_STRINGIFY(FIXTURE) " must inherit from AsyncClientTest"); \
+ TEST_F(FIXTURE, TestGetFlightInfo) { TestGetFlightInfo(); } \
+ TEST_F(FIXTURE, TestGetFlightInfoFuture) { TestGetFlightInfoFuture(); } \
+- TEST_F(FIXTURE, TestListenerLifetime) { TestListenerLifetime(); }
++ TEST_F(FIXTURE, DISABLED_TestListenerLifetime) { TestListenerLifetime(); }
+
+ } // namespace flight
+ } // namespace arrow
diff --git a/recipe/patches/0004-GH-45053-C-Add-support-for-Boost-1.87.0-45057.patch b/recipe/patches/0004-GH-45053-C-Add-support-for-Boost-1.87.0-45057.patch
deleted file mode 100644
index a55ee8928..000000000
--- a/recipe/patches/0004-GH-45053-C-Add-support-for-Boost-1.87.0-45057.patch
+++ /dev/null
@@ -1,49 +0,0 @@
-From 48d6ad09905945f1beb8c3a3c15d5b32a697428c Mon Sep 17 00:00:00 2001
-From: Sutou Kouhei
-Date: Wed, 18 Dec 2024 14:38:09 +0900
-Subject: [PATCH 4/4] GH-45053: [C++] Add support for Boost 1.87.0 (#45057)
-
-### Rationale for this change
-
-Boost 1.87.0 removed `BOOST_PROCESS_V2_ASIO_NAMESPACE`:
-https://github.com/boostorg/process/commit/e827d145424ede0f912b10d4e2800e1da9a2867d
-
-### What changes are included in this PR?
-
-Use `BOOST_PROCESS_V2_NAMESPACE::net` instead.
-
-### Are these changes tested?
-
-Yes.
-
-### Are there any user-facing changes?
-
-No.
-* GitHub Issue: #45053
-
-Authored-by: Sutou Kouhei
-Signed-off-by: Sutou Kouhei
----
- cpp/src/arrow/testing/process.cc | 7 ++++++-
- 1 file changed, 6 insertions(+), 1 deletion(-)
-
-diff --git a/cpp/src/arrow/testing/process.cc b/cpp/src/arrow/testing/process.cc
-index 133768ff0..57df0196c 100644
---- a/cpp/src/arrow/testing/process.cc
-+++ b/cpp/src/arrow/testing/process.cc
-@@ -85,9 +85,14 @@
- # include
-
- # ifdef BOOST_PROCESS_USE_V2
--namespace asio = BOOST_PROCESS_V2_ASIO_NAMESPACE;
- namespace process = BOOST_PROCESS_V2_NAMESPACE;
- namespace filesystem = process::filesystem;
-+// For Boost < 1.87.0
-+# ifdef BOOST_PROCESS_V2_ASIO_NAMESPACE
-+namespace asio = BOOST_PROCESS_V2_ASIO_NAMESPACE;
-+# else
-+namespace asio = process::net;
-+# endif
- # elif defined(BOOST_PROCESS_HAVE_V1)
- namespace process = boost::process::v1;
- namespace filesystem = boost::process::v1::filesystem;
diff --git a/recipe/test_read_parquet.py b/recipe/test_read_parquet.py
new file mode 100644
index 000000000..5f76a4e22
--- /dev/null
+++ b/recipe/test_read_parquet.py
@@ -0,0 +1,5 @@
+import pyarrow as pa
+import pyarrow.parquet as pq
+
+table = pa.Table.from_pydict({"a": [1, 2]})
+pq.write_table(table, "test.parquet")