funkwhale/.gitlab-ci.yml

139 lines
4.0 KiB
YAML

variables:
IMAGE_NAME: funkwhale/funkwhale
IMAGE: $IMAGE_NAME:$CI_COMMIT_REF_NAME
IMAGE_LATEST: $IMAGE_NAME:latest
PIP_CACHE_DIR: "$CI_PROJECT_DIR/.pip"
CARGO_HOME: "$CI_PROJECT_DIR/.cargo"
PYTHONDONTWRITEBYTECODE: "true"
REVIEW_DOMAIN: preview.funkwhale.audio
REVIEW_INSTANCE_URL: https://funkwhale.juniorjpdj.pl
DOCKER_HOST: tcp://docker:2375/
DOCKER_DRIVER: overlay2
DOCKER_TLS_CERTDIR: ""
DOCKER_BUILD_PLATFORMS: linux/amd64,linux/arm64,linux/arm/v7
workflow:
rules:
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
- if: $CI_COMMIT_BRANCH && $CI_OPEN_MERGE_REQUESTS
when: never
- if: $CI_COMMIT_BRANCH
stages:
- deploy
- lint
- test
- deps_build
- build
- publish
.time_script: &time_script
|
echo Timer start!
procstarttime=$(date -u '+%s')
s_timestamp() {
while read -r line; do
curtime=$(date -u '+%s')
deltatime=$(( curtime - procstarttime ))
timestamp="$(printf "%03d:%02d" $(( deltatime / 60 )) $(( deltatime % 60 )))"
# by nature BASH might run process subst twice when using >&2 pipes. This is a lazy
# way to avoid dumping two timestamps on the same line:
if [[ "$line" != \[${timestamp%% *}* ]]; then
echo "[$timestamp] $line"
else
echo "$line"
fi
done
}
exec 1> >(s_timestamp)
exec 2> >(s_timestamp)
build_dep_wheels:
cache:
- key: dep-cargo
paths:
- .cargo/registry/index
- .cargo/registry/cache
- .cargo/git/db
- key: dep-pip-$DOCKER_ARCH
paths:
- .pip
stage: deps_build
tags: [docker, $DOCKER_ARCH]
image: ${DOCKER_ARCH}/alpine:3.17
parallel:
matrix:
- DOCKER_ARCH: [amd64, i386, arm64v8, arm32v7]
before_script:
- >
apk add
alpine-sdk
python3-dev
postgresql-dev
libffi-dev
zlib-dev
jpeg-dev
openldap-dev
openssl-dev
cargo
libxml2-dev
libxslt-dev
poetry
py3-pip
patchelf
- pip install auditwheel
- cd api ; poetry export --without-hashes > ../requirements.txt ; cd ..
- python -m venv venv
- venv/bin/pip install --upgrade pip wheel
- venv/bin/pip debug --verbose
script:
# show time stats
- *time_script
# build basic wheels
- venv/bin/pip wheel --check-build-dependencies -w wheelhouse -r requirements.txt
# remove universal wheels (those are probably downloaded from pypi or already uploaded)
- rm -f wheelhouse/*-none-any.whl wheelhouse/*-musllinux_*.whl
- ls -l wheelhouse
# correction of wheel tagging needed to workaround maturin and other 3-rd party packagers problems on multi-arch envs..
# https://github.com/PyO3/maturin/issues/1289 but appears on other arches and packagers too
- |
for wheel in wheelhouse/* ; do
echo "Correcting tags for $wheel"
corrected_tag="$(python3 -c "import auditwheel.wheel_abi; print(auditwheel.wheel_abi.analyze_wheel_abi('$wheel').overall_tag)" || true)"
echo "New tag: $corrected_tag"
[[ "$wheel" = "*$corrected_tag*" ]] && { echo 'Tag already correct or correct tag is unknown - skipping.' ; continue ; }
venv/bin/wheel unpack -d wheeltmp "$wheel"
sed -ri 's/^(Tag: .*)-.*?$/\1-'"$corrected_tag"'/g' wheeltmp/*/*.dist-info/WHEEL
rm -f "$wheel"
venv/bin/wheel pack -d wheelhouse wheeltmp/*
rm -rf wheeltmp
done
# prepare musllinux universal wheels
- |
for wheel in wheelhouse/* ; do
auditwheel repair --strip "$wheel" || true
done
# remove basic wheels as we have universal wheels now
- rm -f wheelhouse/*-linux*.whl wheelhouse/*.linux*.whl
- ls -l wheelhouse
## populate cache with universal wheels for next runs
#- pip install wheelhouse/*.whl
# doesn't work, need to find other way
artifacts:
expire_in: 2 weeks
paths:
- wheelhouse
build_dep_wheels_riscv64:
extends: build_dep_wheels
parallel:
variables:
DOCKER_ARCH: riscv64
image: ${DOCKER_ARCH}/alpine:edge
allow_failure: true