Skip to content

Commit 31bab21

Browse files
authored
feat(python): build python wheel and public (#813)
* add python wheel workflow * update workflow trigger and test * update * updae * update readme * fix docs * fix poetry lock * fix copilot comment * update
1 parent 2381640 commit 31bab21

8 files changed

Lines changed: 513 additions & 22 deletions

File tree

.github/scripts/update_version.py

Lines changed: 88 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,88 @@
1+
# Licensed to the Apache Software Foundation (ASF) under one
2+
# or more contributor license agreements. See the NOTICE file
3+
# distributed with this work for additional information
4+
# regarding copyright ownership. The ASF licenses this file
5+
# to you under the Apache License, Version 2.0 (the
6+
# "License"); you may not use this file except in compliance
7+
# with the License. You may obtain a copy of the License at
8+
#
9+
# http://www.apache.org/licenses/LICENSE-2.0
10+
#
11+
# Unless required by applicable law or agreed to in writing,
12+
# software distributed under the License is distributed on an
13+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14+
# KIND, either express or implied. See the License for the
15+
# specific language governing permissions and limitations
16+
# under the License.
17+
18+
import json
19+
import os
20+
import re
21+
import sys
22+
import urllib.request
23+
from packaging.version import Version
24+
25+
PACKAGE_NAME = "graphar"
26+
FILE_PATH = "python/pyproject.toml"
27+
URL_TIMEOUT_SECONDS = int(os.getenv("GRAPHAR_VERSION_FETCH_TIMEOUT", "10"))
28+
29+
def get_next_version():
30+
versions = []
31+
urls = [
32+
f"https://pypi.org/pypi/{PACKAGE_NAME}/json",
33+
f"https://test.pypi.org/pypi/{PACKAGE_NAME}/json"
34+
]
35+
36+
print(f"Fetching versions for {PACKAGE_NAME}...")
37+
for url in urls:
38+
try:
39+
with urllib.request.urlopen(url, timeout=URL_TIMEOUT_SECONDS) as r:
40+
data = json.load(r)
41+
versions.extend(data.get("releases", {}).keys())
42+
except Exception as e:
43+
print(
44+
f"Warning: Failed to fetch versions from {url}: {type(e).__name__}: {e}",
45+
file=sys.stderr,
46+
)
47+
48+
if not versions:
49+
return "0.0.1.dev1"
50+
51+
latest = max([Version(v) for v in versions])
52+
print(f"Latest version found: {latest}")
53+
54+
if latest.is_devrelease:
55+
dev_number = latest.dev if latest.dev is not None else 0
56+
return f"{latest.major}.{latest.minor}.{latest.micro}.dev{dev_number + 1}"
57+
else:
58+
return f"{latest.major}.{latest.minor}.{latest.micro + 1}.dev1"
59+
60+
def main():
61+
new_ver = get_next_version()
62+
print(f"Target version: {new_ver}")
63+
64+
try:
65+
with open(FILE_PATH, "r", encoding="utf-8") as f:
66+
content = f.read()
67+
68+
new_content, count = re.subn(
69+
r'(version\s*=\s*")([^"]+)(")',
70+
rf'\g<1>{new_ver}\g<3>',
71+
content
72+
)
73+
74+
if count == 0:
75+
print(f"Error: Could not find 'version' key in {FILE_PATH}")
76+
sys.exit(1)
77+
78+
with open(FILE_PATH, "w", encoding="utf-8") as f:
79+
f.write(new_content)
80+
81+
print(f"Successfully updated {FILE_PATH} to {new_ver}")
82+
83+
except FileNotFoundError:
84+
print(f"Error: File {FILE_PATH} not found.")
85+
sys.exit(1)
86+
87+
if __name__ == "__main__":
88+
main()
Lines changed: 286 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,286 @@
1+
2+
# Licensed to the Apache Software Foundation (ASF) under one
3+
# or more contributor license agreements. See the NOTICE file
4+
# distributed with this work for additional information
5+
# regarding copyright ownership. The ASF licenses this file
6+
# to you under the Apache License, Version 2.0 (the
7+
# "License"); you may not use this file except in compliance
8+
# with the License. You may obtain a copy of the License at
9+
#
10+
# http://www.apache.org/licenses/LICENSE-2.0
11+
#
12+
# Unless required by applicable law or agreed to in writing,
13+
# software distributed under the License is distributed on an
14+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15+
# KIND, either express or implied. See the License for the
16+
# specific language governing permissions and limitations
17+
# under the License.
18+
19+
name: Build Python Wheels
20+
21+
concurrency:
22+
group: python-wheel-${{ github.ref }}
23+
cancel-in-progress: false
24+
25+
on:
26+
# Trigger the workflow on push or pull request,
27+
# but only for the main branch
28+
push:
29+
branches:
30+
- "main"
31+
paths:
32+
- 'cpp/**'
33+
- 'python/**'
34+
- '.github/workflows/python-wheel-workflow.yml'
35+
- '.github/scripts/update_version.py'
36+
pull_request:
37+
branches:
38+
- "main"
39+
paths:
40+
- 'cpp/**'
41+
- 'python/**'
42+
- '.github/workflows/python-wheel-workflow.yml'
43+
- '.github/scripts/update_version.py'
44+
workflow_dispatch:
45+
inputs:
46+
publish_pypi:
47+
description: "Publish to PyPI (manual runs only)"
48+
required: true
49+
default: false
50+
type: boolean
51+
52+
jobs:
53+
build_sdist:
54+
name: Build source distribution
55+
runs-on: ubuntu-22.04
56+
steps:
57+
- uses: actions/checkout@v4
58+
59+
- name: Set up Python
60+
uses: actions/setup-python@v5
61+
with:
62+
python-version: "3.9"
63+
64+
- name: Install dependencies
65+
run: |
66+
python -m pip install --upgrade pip
67+
pip install build twine
68+
- name: update pyproject version
69+
if: github.event_name != 'workflow_dispatch' || github.event.inputs.publish_pypi != 'true'
70+
run: |
71+
python .github/scripts/update_version.py
72+
- name: Build sdist
73+
run: |
74+
# Bundle C++ sources into python/ so the sdist contains them.
75+
rm -rf python/_bundled_cpp
76+
cp -a cpp python/_bundled_cpp
77+
cd python
78+
python -m build --sdist
79+
- name: Store artifacts
80+
uses: actions/upload-artifact@v4
81+
with:
82+
name: sdist
83+
path: python/dist/*
84+
85+
build_wheels:
86+
name: Build wheels on ${{ matrix.runner }}
87+
runs-on: ${{ matrix.runner }}
88+
needs: build_sdist
89+
strategy:
90+
matrix:
91+
include:
92+
# Job 1: Native x86_64 build
93+
- platform: x86_64
94+
runner: ubuntu-latest # This is the standard x86_64 runner
95+
os: linux
96+
manylinux: _2_28
97+
deployment-target: ''
98+
99+
# Job 2: Native aarch64 build
100+
- platform: aarch64
101+
runner: ubuntu-22.04-arm # This is a native ARM64 runner
102+
os: linux
103+
manylinux: _2_28
104+
deployment-target: ''
105+
106+
# Job 3: macOS arm64 build
107+
- platform: arm64
108+
runner: macos-latest
109+
os: macos
110+
deployment-target: '11.0'
111+
112+
113+
env:
114+
CIBW_PLATFORM: ${{ matrix.os }}
115+
CIBW_BUILD: "cp39-* cp310-* cp311-* cp312-* cp313-*"
116+
CIBW_SKIP: "*-musllinux_*"
117+
# Pin arch to the matrix platform
118+
CIBW_ARCHS: ${{ matrix.platform }}
119+
CIBW_MANYLINUX_X86_64_IMAGE: ${{ matrix.os == 'linux' && format('manylinux{0}', matrix.manylinux) || '' }}
120+
CIBW_MANYLINUX_AARCH64_IMAGE: ${{ matrix.os == 'linux' && format('manylinux{0}', matrix.manylinux) || '' }}
121+
CIBW_ENVIRONMENT_WINDOWS: DISTUTILS_USE_SDK=1 MSSdk=1
122+
CIBW_ENVIRONMENT_MACOS: ${{ matrix.os == 'macos' && format('MACOSX_DEPLOYMENT_TARGET={0} CMAKE_OSX_DEPLOYMENT_TARGET={0} CFLAGS=-mmacosx-version-min={0} CXXFLAGS=-mmacosx-version-min={0} LDFLAGS=-mmacosx-version-min={0}', matrix.deployment-target) || '' }}
123+
CIBW_BEFORE_BUILD_LINUX: |
124+
set -eux
125+
if [ -f /etc/system-release-cpe ]; then
126+
ALMA_MAJOR="$(cut -d: -f5 /etc/system-release-cpe | cut -d. -f1)"
127+
else
128+
. /etc/os-release
129+
ALMA_MAJOR="${VERSION_ID%%.*}"
130+
fi
131+
dnf install -y 'dnf-command(config-manager)' || dnf install -y dnf-plugins-core || true
132+
# Follow official Apache Arrow install instructions for AlmaLinux/RHEL-family
133+
dnf install -y epel-release || \
134+
dnf install -y oracle-epel-release-el${ALMA_MAJOR} || \
135+
dnf install -y https://dl.fedoraproject.org/pub/epel/epel-release-latest-${ALMA_MAJOR}.noarch.rpm
136+
dnf install -y https://packages.apache.org/artifactory/arrow/almalinux/${ALMA_MAJOR}/apache-arrow-release-latest.rpm
137+
dnf config-manager --set-enabled epel || :
138+
dnf config-manager --set-enabled powertools || :
139+
dnf config-manager --set-enabled crb || :
140+
dnf config-manager --set-enabled ol${ALMA_MAJOR}_codeready_builder || :
141+
dnf config-manager --set-enabled codeready-builder-for-rhel-${ALMA_MAJOR}-rhui-rpms || :
142+
subscription-manager repos --enable codeready-builder-for-rhel-${ALMA_MAJOR}-$(arch)-rpms || :
143+
# manylinux images may carry older Arrow packages (e.g. arrow1700-*) which
144+
# conflict with the newer packages from the Apache Arrow repo (e.g. arrow2200-*).
145+
# Remove any preinstalled Arrow/Parquet RPMs so we install a consistent set.
146+
dnf remove -y 'arrow*' 'parquet*' || true
147+
# Required for GraphAr C++ build via Arrow CMake packages
148+
dnf install -y --allowerasing \
149+
arrow-devel \
150+
arrow-dataset-devel \
151+
arrow-acero-devel \
152+
parquet-devel \
153+
libcurl-devel re2-devel ccache
154+
steps:
155+
- name: Checkout (needed for some tooling)
156+
uses: actions/checkout@v4
157+
158+
- name: Set up Python
159+
uses: actions/setup-python@v5
160+
with:
161+
python-version: "3.9"
162+
163+
- name: Set up Miniconda (macOS/Windows)
164+
if: matrix.os == 'windows' || matrix.os == 'macos'
165+
uses: conda-incubator/setup-miniconda@v3
166+
with:
167+
auto-activate-base: true
168+
miniforge-version: latest
169+
use-mamba: true
170+
171+
- name: Install Arrow (macOS)
172+
if: matrix.os == 'macos'
173+
shell: bash
174+
run: |
175+
set -euxo pipefail
176+
mamba install -y -c conda-forge arrow-cpp
177+
# Note: CONDA_PREFIX may be unset unless conda is activated in this shell.
178+
# setup-miniconda exports CONDA (base install prefix), which is sufficient here.
179+
echo "CMAKE_PREFIX_PATH=$CONDA" >> "$GITHUB_ENV"
180+
# Optional sanity check: ensure Arrow dylib isn't built for a newer macOS than deployment target.
181+
if command -v otool >/dev/null 2>&1; then
182+
ls -lah "$CONDA/lib" || true
183+
if [ -f "$CONDA/lib/libarrow.dylib" ]; then
184+
otool -l "$CONDA/lib/libarrow.dylib" | (grep -A3 -E 'LC_BUILD_VERSION|LC_VERSION_MIN_MACOSX' || true)
185+
fi
186+
fi
187+
- name: Install Arrow (Windows)
188+
if: matrix.os == 'windows'
189+
shell: pwsh
190+
run: |
191+
mamba install -y -c conda-forge arrow-cpp
192+
Add-Content $env:GITHUB_ENV "CMAKE_PREFIX_PATH=$env:CONDA_PREFIX\\Library"
193+
Add-Content $env:GITHUB_ENV "PATH=$env:CONDA_PREFIX\\Library\\bin;$env:PATH"
194+
- name: Download sdist artifact
195+
uses: actions/download-artifact@v4
196+
with:
197+
name: sdist
198+
path: sdist
199+
200+
- name: Extract sdist
201+
shell: bash
202+
run: |
203+
set -euxo pipefail
204+
ls -lah sdist
205+
SDIST_FILE=""
206+
for f in sdist/*.tar.gz sdist/*.zip; do
207+
if [ -f "$f" ]; then
208+
SDIST_FILE="$f"
209+
break
210+
fi
211+
done
212+
if [ -z "$SDIST_FILE" ]; then
213+
echo "No sdist file found in sdist/" >&2
214+
exit 1
215+
fi
216+
mkdir -p sdist_pkg
217+
case "$SDIST_FILE" in
218+
*.tar.gz) tar -xzf "$SDIST_FILE" -C sdist_pkg ;;
219+
*.zip) unzip -q "$SDIST_FILE" -d sdist_pkg ;;
220+
esac
221+
PKGDIR="$(find sdist_pkg -mindepth 1 -maxdepth 1 -type d | head -n 1)"
222+
if [ -z "$PKGDIR" ]; then
223+
echo "Failed to locate extracted sdist directory" >&2
224+
exit 1
225+
fi
226+
echo "PKGDIR=$PKGDIR" >> "$GITHUB_ENV"
227+
- name: Build wheels
228+
shell: bash
229+
run: |
230+
set -euxo pipefail
231+
python -m pip install --upgrade pip
232+
python -m pip install packaging cibuildwheel
233+
mkdir -p python/dist
234+
python -m cibuildwheel --output-dir python/dist "$PKGDIR"
235+
- name: Store artifacts
236+
uses: actions/upload-artifact@v4
237+
with:
238+
name: wheels-${{ matrix.os }}-${{ matrix.platform }}
239+
path: python/dist/*
240+
241+
upload_test_pypi:
242+
name: Publish to TestPyPI (auto)
243+
needs: [build_wheels, build_sdist]
244+
runs-on: ubuntu-22.04
245+
if: github.event_name == 'push'
246+
permissions:
247+
contents: read
248+
id-token: write
249+
steps:
250+
- name: Download artifacts
251+
uses: actions/download-artifact@v4
252+
with:
253+
path: dist
254+
255+
- name: Move artifacts to correct location
256+
run: |
257+
mkdir -p python/dist
258+
find dist -name "*" -type f -exec mv {} python/dist/ \;
259+
- name: Publish to Test PyPI
260+
uses: pypa/gh-action-pypi-publish@release/v1
261+
with:
262+
repository-url: https://test.pypi.org/legacy/
263+
packages-dir: python/dist/
264+
265+
upload_pypi:
266+
name: Publish (manual)
267+
needs: [build_wheels, build_sdist]
268+
runs-on: ubuntu-22.04
269+
if: github.event_name == 'workflow_dispatch' && inputs.publish_pypi
270+
permissions:
271+
contents: read
272+
id-token: write
273+
steps:
274+
- name: Download artifacts
275+
uses: actions/download-artifact@v4
276+
with:
277+
path: dist
278+
279+
- name: Move artifacts to correct location
280+
run: |
281+
mkdir -p python/dist
282+
find dist -name "*" -type f -exec mv {} python/dist/ \;
283+
- name: Publish to PyPI
284+
uses: pypa/gh-action-pypi-publish@release/v1
285+
with:
286+
packages-dir: python/dist/

0 commit comments

Comments
 (0)