fix(DX): Docker mac compatibility, dynamic DNS + Debian image (#2337 #1691)

This commit is contained in:
jon r 2024-10-21 08:57:15 +00:00 committed by petitminion
parent 4a74c2f5d0
commit 1e6e6923d2
86 changed files with 2977 additions and 2345 deletions

View File

@ -67,3 +67,6 @@ mailhog
*.sqlite3
api/music
api/media
# Docker state
.state

View File

@ -1,23 +0,0 @@
DJANGO_ALLOWED_HOSTS=.funkwhale.test,localhost,nginx,0.0.0.0,127.0.0.1,.gitpod.io
DJANGO_SETTINGS_MODULE=config.settings.local
DJANGO_SECRET_KEY=dev
C_FORCE_ROOT=true
FUNKWHALE_HOSTNAME=localhost
FUNKWHALE_PROTOCOL=http
PYTHONDONTWRITEBYTECODE=true
VUE_PORT=8080
MUSIC_DIRECTORY_PATH=/music
BROWSABLE_API_ENABLED=True
FORWARDED_PROTO=http
LDAP_ENABLED=False
FUNKWHALE_SPA_HTML_ROOT=http://nginx/
PYTHONTRACEMALLOC=0
MEDIA_ROOT=/data/media
# Uncomment this if you're using traefik/https
# FORCE_HTTPS_URLS=True
# Customize to your needs
POSTGRES_VERSION=15
DEBUG=true
TYPESENSE_API_KEY="apikey"

58
.env.example Normal file
View File

@ -0,0 +1,58 @@
# api + celeryworker
DEBUG=True
DEFAULT_FROM_EMAIL=hello@funkwhale.test
FUNKWHALE_DOMAIN=funkwhale.test
FUNKWHALE_PROTOCOL=https
DJANGO_SECRET_KEY=dev
DJANGO_ALLOWED_HOSTS=.funkwhale.test,nginx
DJANGO_SETTINGS_MODULE=config.settings.local
DATABASE_URL=postgresql://postgres@postgres/postgres
CACHE_URL=redis://redis:6379/0
EMAIL_CONFIG=smtp://mailpit.funkwhale.test:1025
FORCE_HTTPS_URLS=True
EXTERNAL_REQUESTS_VERIFY_SSL=false
C_FORCE_ROOT=true
PYTHONDONTWRITEBYTECODE=true
PYTHONTRACEMALLOC=0
# api
FUNKWHALE_SPA_HTML_ROOT=http://nginx/
LDAP_ENABLED=False
BROWSABLE_API_ENABLED=True
# celeryworker
CELERYD_CONCURRENCY=0
# api + nginx
STATIC_ROOT=/staticfiles
MEDIA_ROOT=/data/media
# api + Typesense
TYPESENSE_API_KEY=apikey
# front
HOST=0.0.0.0
VUE_PORT=8080
# nginx
NGINX_MAX_BODY_SIZE=10G
FUNKWHALE_API_HOST=api
FUNKWHALE_API_PORT=5000
FUNKWHALE_FRONT_IP=front
FUNKWHALE_FRONT_PORT=${VUE_PORT}
# postgres
POSTGRES_HOST_AUTH_METHOD=trust

11
.gitignore vendored
View File

@ -93,6 +93,7 @@ docs/_build
front/tauri/gen
/data/
.state
.env
po/*.po
@ -103,16 +104,20 @@ _build
# Docker
docker-bake.*.json
metadata.json
compose/var/test.*
# Linting
.eslintcache
tsconfig.tsbuildinfo
# Vscode
.vscode/
# Nix
.direnv/
.envrc
flake.nix
flake.lock
# Vscode
.vscode/
# Zed
.zed/

View File

@ -402,13 +402,13 @@ Update instructions:
2. Stop your containers using the **docker-compose** syntax.
```sh
sudo docker-compose down
docker compose down
```
3. Bring the containers back up using the **docker compose** syntax.
```sh
sudo docker compose up -d
docker compose up -d
```
After this you can continue to use the **docker compose** syntax for all Docker management tasks.

View File

@ -1,120 +0,0 @@
FROM alpine:3.19 as requirements
RUN set -eux; \
apk add --no-cache \
poetry \
py3-cryptography \
py3-pip \
python3
COPY pyproject.toml poetry.lock /
RUN set -eux; \
poetry export --without-hashes --extras typesense > requirements.txt; \
poetry export --without-hashes --with dev > dev-requirements.txt;
FROM alpine:3.19 as builder
ENV PYTHONDONTWRITEBYTECODE=1
ENV PYTHONUNBUFFERED=1
ARG PIP_NO_CACHE_DIR=1
ENV CARGO_NET_GIT_FETCH_WITH_CLI=true
RUN set -eux; \
apk add --no-cache \
cargo \
curl \
gcc \
g++ \
git \
jpeg-dev \
libffi-dev \
libldap \
libxml2-dev \
libxslt-dev \
make \
musl-dev \
openldap-dev \
openssl-dev \
postgresql-dev \
zlib-dev \
py3-cryptography=41.0.7-r0 \
py3-lxml=4.9.3-r1 \
py3-pillow=10.3.0-r0 \
py3-psycopg2=2.9.9-r0 \
py3-watchfiles=0.19.0-r1 \
python3-dev
# Create virtual env
RUN python3 -m venv --system-site-packages /venv
ENV PATH="/venv/bin:$PATH"
COPY --from=requirements /requirements.txt /requirements.txt
COPY --from=requirements /dev-requirements.txt /dev-requirements.txt
RUN --mount=type=cache,target=~/.cache/pip; \
set -eux; \
pip3 install --upgrade pip; \
pip3 install setuptools wheel; \
# Currently we are unable to relieably build rust-based packages on armv7. This
# is why we need to use the packages shipped by Alpine Linux.
# Since poetry does not allow in-place dependency pinning, we need
# to install the deps using pip.
grep -Ev 'cryptography|lxml|pillow|psycopg2|watchfiles' /requirements.txt \
| pip3 install -r /dev/stdin \
cryptography==41.0.7 \
lxml==4.9.3 \
pillow==10.2.0 \
psycopg2==2.9.9 \
watchfiles==0.19.0
ARG install_dev_deps=0
RUN --mount=type=cache,target=~/.cache/pip; \
set -eux; \
if [ "$install_dev_deps" = "1" ] ; then \
grep -Ev 'cryptography|lxml|pillow|psycopg2|watchfiles' /dev-requirements.txt \
| pip3 install -r /dev/stdin \
cryptography==41.0.7 \
lxml==4.9.3 \
pillow==10.2.0 \
psycopg2==2.9.9 \
watchfiles==0.19.0; \
fi
FROM alpine:3.19 as production
ENV PYTHONDONTWRITEBYTECODE=1
ENV PYTHONUNBUFFERED=1
ARG PIP_NO_CACHE_DIR=1
RUN set -eux; \
apk add --no-cache \
bash \
ffmpeg \
gettext \
jpeg-dev \
libldap \
libmagic \
libpq \
libxml2 \
libxslt \
py3-cryptography=41.0.7-r0 \
py3-lxml=4.9.3-r1 \
py3-pillow=10.3.0-r0 \
py3-psycopg2=2.9.9-r0 \
py3-watchfiles=0.19.0-r1 \
python3 \
tzdata
COPY --from=builder /venv /venv
ENV PATH="/venv/bin:$PATH"
COPY . /app
WORKDIR /app
RUN --mount=type=cache,target=~/.cache/pip; \
set -eux; \
pip3 install --no-deps --editable .
ENV IS_DOCKER_SETUP=true
CMD ["./docker/server.sh"]

1
api/Dockerfile Symbolic link
View File

@ -0,0 +1 @@
Dockerfile.alpine

120
api/Dockerfile.alpine Normal file
View File

@ -0,0 +1,120 @@
FROM alpine:3.19 AS requirements
RUN set -eux; \
apk add --no-cache \
poetry \
py3-cryptography \
py3-pip \
python3
COPY pyproject.toml poetry.lock /
RUN set -eux; \
poetry export --without-hashes --extras typesense > requirements.txt; \
poetry export --without-hashes --with dev > dev-requirements.txt;
FROM alpine:3.19 AS builder
ENV PYTHONDONTWRITEBYTECODE=1
ENV PYTHONUNBUFFERED=1
ARG PIP_NO_CACHE_DIR=1
ENV CARGO_NET_GIT_FETCH_WITH_CLI=true
RUN set -eux; \
apk add --no-cache \
cargo \
curl \
gcc \
g++ \
git \
jpeg-dev \
libffi-dev \
libldap \
libxml2-dev \
libxslt-dev \
make \
musl-dev \
openldap-dev \
openssl-dev \
postgresql-dev \
zlib-dev \
py3-cryptography=41.0.7-r0 \
py3-lxml=4.9.3-r1 \
py3-pillow=10.3.0-r0 \
py3-psycopg2=2.9.9-r0 \
py3-watchfiles=0.19.0-r1 \
python3-dev
# Create virtual env
RUN python3 -m venv --system-site-packages /venv
ENV PATH="/venv/bin:$PATH"
COPY --from=requirements /requirements.txt /requirements.txt
COPY --from=requirements /dev-requirements.txt /dev-requirements.txt
RUN --mount=type=cache,target=~/.cache/pip; \
set -eux; \
pip3 install --upgrade pip; \
pip3 install setuptools wheel; \
# Currently we are unable to relieably build rust-based packages on armv7. This
# is why we need to use the packages shipped by Alpine Linux.
# Since poetry does not allow in-place dependency pinning, we need
# to install the deps using pip.
grep -Ev 'cryptography|lxml|pillow|psycopg2|watchfiles' /requirements.txt \
| pip3 install -r /dev/stdin \
cryptography==41.0.7 \
lxml==4.9.3 \
pillow==10.2.0 \
psycopg2==2.9.9 \
watchfiles==0.19.0
ARG install_dev_deps=0
RUN --mount=type=cache,target=~/.cache/pip; \
set -eux; \
if [ "$install_dev_deps" = "1" ] ; then \
grep -Ev 'cryptography|lxml|pillow|psycopg2|watchfiles' /dev-requirements.txt \
| pip3 install -r /dev/stdin \
cryptography==41.0.7 \
lxml==4.9.3 \
pillow==10.2.0 \
psycopg2==2.9.9 \
watchfiles==0.19.0; \
fi
FROM alpine:3.19 AS production
ENV PYTHONDONTWRITEBYTECODE=1
ENV PYTHONUNBUFFERED=1
ARG PIP_NO_CACHE_DIR=1
RUN set -eux; \
apk add --no-cache \
bash \
ffmpeg \
gettext \
jpeg-dev \
libldap \
libmagic \
libpq \
libxml2 \
libxslt \
py3-cryptography=41.0.7-r0 \
py3-lxml=4.9.3-r1 \
py3-pillow=10.3.0-r0 \
py3-psycopg2=2.9.9-r0 \
py3-watchfiles=0.19.0-r1 \
python3 \
tzdata
COPY --from=builder /venv /venv
ENV PATH="/venv/bin:$PATH"
COPY . /app
WORKDIR /app
RUN --mount=type=cache,target=~/.cache/pip; \
set -eux; \
pip3 install --no-deps --editable .
ENV IS_DOCKER_SETUP=true
CMD ["./docker/server.sh"]

71
api/Dockerfile.debian Normal file
View File

@ -0,0 +1,71 @@
FROM python:3.12-slim AS builder
ARG POETRY_VERSION=1.8
ENV DEBIAN_FRONTEND=noninteractive
ENV VIRTUAL_ENV=/venv
ENV PATH="/venv/bin:$PATH"
ENV POETRY_HOME=/opt/poetry
ENV POETRY_NO_INTERACTION=1
ENV POETRY_VIRTUALENVS_IN_PROJECT=1
ENV POETRY_VIRTUALENVS_CREATE=1
ENV PYTHONDONTWRITEBYTECODE=1
ENV PYTHONUNBUFFERED=1
# Tell Poetry where to place its cache and virtual environment
ENV POETRY_CACHE_DIR=/opt/.cache
RUN pip install "poetry==${POETRY_VERSION}"
RUN --mount=type=cache,target=/var/lib/apt/lists \
apt update; \
apt install -y \
build-essential \
python3-dev \
libldap-dev \
libsasl2-dev \
slapd \
ldap-utils \
tox \
lcov \
valgrind
WORKDIR /app
COPY pyproject.toml .
RUN python3 -m venv --system-site-packages ${VIRTUAL_ENV} && . ${VIRTUAL_ENV}/bin/activate
RUN --mount=type=cache,target=/opt/.cache \
poetry install --no-root --extras typesense
FROM python:3.12-slim AS runtime
ARG POETRY_VERSION=1.8
ENV DEBIAN_FRONTEND=noninteractive
ENV VIRTUAL_ENV=/venv
ENV PATH="/venv/bin:$PATH"
RUN --mount=type=cache,target=/var/lib/apt/lists \
apt update; \
apt install -y \
ffmpeg \
gettext \
libjpeg-dev \
libldap-2.5-0 \
libmagic1 \
libpq5 \
libxml2 \
libxslt1.1
RUN pip install "poetry==${POETRY_VERSION}"
COPY --from=builder ${VIRTUAL_ENV} ${VIRTUAL_ENV}
WORKDIR /app
COPY . /app
RUN poetry install --extras typesense
CMD ["./docker/server.sh"]

View File

@ -224,10 +224,13 @@ ALLOWED_HOSTS = env.list("DJANGO_ALLOWED_HOSTS", default=[]) + [FUNKWHALE_HOSTNA
List of allowed hostnames for which the Funkwhale server will answer.
"""
CSRF_TRUSTED_ORIGINS = [urlparse(o, FUNKWHALE_PROTOCOL).geturl() for o in ALLOWED_HOSTS]
CSRF_TRUSTED_ORIGINS = [
urlparse("//" + o, FUNKWHALE_PROTOCOL).geturl() for o in ALLOWED_HOSTS
]
"""
List of origins that are trusted for unsafe requests
We simply consider all allowed hosts to be trusted origins
See DJANGO_ALLOWED_HOSTS in .env.example for details
See https://docs.djangoproject.com/en/4.2/ref/settings/#csrf-trusted-origins
"""

View File

@ -2,8 +2,7 @@
Local settings
- Run in Debug mode
- Use console backend for e-mails
- Add Django Debug Toolbar
- Add Django Debug Toolbar when INTERNAL_IPS are given and match the request
- Add django-extensions as app
"""
@ -25,11 +24,6 @@ SECRET_KEY = env(
"DJANGO_SECRET_KEY", default="mc$&b=5j#6^bv7tld1gyjp2&+^-qrdy=0sw@r5sua*1zp4fmxc"
)
# Mail settings
# ------------------------------------------------------------------------------
EMAIL_HOST = "localhost"
EMAIL_PORT = 1025
# django-debug-toolbar
# ------------------------------------------------------------------------------
@ -147,5 +141,3 @@ MIDDLEWARE = (
"funkwhale_api.common.middleware.ProfilerMiddleware",
"funkwhale_api.common.middleware.PymallocMiddleware",
) + MIDDLEWARE
TYPESENSE_API_KEY = "apikey"

View File

@ -0,0 +1,22 @@
# Generated by Django 4.2.9 on 2024-10-18 20:23
import django.core.serializers.json
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("radios", "0007_merge_20220715_0801"),
]
operations = [
migrations.AlterField(
model_name="radiosession",
name="config",
field=models.JSONField(
blank=True,
encoder=django.core.serializers.json.DjangoJSONEncoder,
null=True,
),
),
]

View File

@ -0,0 +1,78 @@
# Generated by Django 4.2.9 on 2024-10-18 20:23
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import oauth2_provider.generators
import oauth2_provider.models
class Migration(migrations.Migration):
dependencies = [
("users", "0023_merge_20221125_1902"),
]
operations = [
migrations.AlterField(
model_name="accesstoken",
name="user",
field=models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="%(app_label)s_%(class)s",
to=settings.AUTH_USER_MODEL,
),
),
migrations.AlterField(
model_name="application",
name="client_secret",
field=oauth2_provider.models.ClientSecretField(
blank=True,
db_index=True,
default=oauth2_provider.generators.generate_client_secret,
help_text="Hashed on Save. Copy it now if this is a new secret.",
max_length=255,
),
),
migrations.AlterField(
model_name="application",
name="user",
field=models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="%(app_label)s_%(class)s",
to=settings.AUTH_USER_MODEL,
),
),
migrations.AlterField(
model_name="grant",
name="user",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="%(app_label)s_%(class)s",
to=settings.AUTH_USER_MODEL,
),
),
migrations.AlterField(
model_name="idtoken",
name="user",
field=models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="%(app_label)s_%(class)s",
to=settings.AUTH_USER_MODEL,
),
),
migrations.AlterField(
model_name="refreshtoken",
name="user",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="%(app_label)s_%(class)s",
to=settings.AUTH_USER_MODEL,
),
),
]

2907
api/poetry.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -36,7 +36,7 @@ django-cacheops = "==7.0.2"
django-cleanup = "==8.1.0"
django-cors-headers = "==4.3.1"
django-dynamic-preferences = "==1.14.0"
django-environ = "==0.10.0"
django-environ = "==0.11.2"
django-filter = "==23.5"
django-oauth-toolkit = "2.2.0"
django-redis = "==5.2.0"
@ -46,7 +46,7 @@ djangorestframework = "==3.14.0"
drf-spectacular = "==0.26.5"
markdown = "==3.4.4"
persisting-theory = "==1.0"
psycopg2 = "==2.9.9"
psycopg2-binary = "==2.9.9"
redis = "==5.0.1"
# Django LDAP
@ -159,28 +159,29 @@ python_files = [
testpaths = ["tests"]
addopts = "-p no:warnings"
env = [
"SECRET_KEY=test",
"EMAIL_CONFIG=consolemail://",
"CELERY_BROKER_URL=memory://",
"CELERY_TASK_ALWAYS_EAGER=True",
"FUNKWHALE_HOSTNAME_SUFFIX=",
"FUNKWHALE_HOSTNAME_PREFIX=",
"FUNKWHALE_HOSTNAME=test.federation",
"FEDERATION_HOSTNAME=test.federation",
"FUNKWHALE_URL=https://test.federation",
"DEBUG_TOOLBAR_ENABLED=False",
"DEBUG=False",
"WEAK_PASSWORDS=True",
"CREATE_IMAGE_THUMBNAILS=False",
"FORCE_HTTPS_URLS=False",
"FUNKWHALE_SPA_HTML_ROOT=http://noop/",
"PROXY_MEDIA=true",
"MUSIC_USE_DENORMALIZATION=true",
"DEBUG=False",
"DEBUG_TOOLBAR_ENABLED=False",
"DISABLE_PASSWORD_VALIDATORS=false",
"DISABLE_PASSWORD_VALIDATORS=false",
"EMAIL_CONFIG=consolemail://",
"EXTERNAL_MEDIA_PROXY_ENABLED=true",
"DISABLE_PASSWORD_VALIDATORS=false",
"DISABLE_PASSWORD_VALIDATORS=false",
"FEDERATION_HOSTNAME=test.federation",
"FORCE_HTTPS_URLS=False",
"FUNKWHALE_HOSTNAME=test.federation",
"FUNKWHALE_HOSTNAME_PREFIX=",
"FUNKWHALE_HOSTNAME_SUFFIX=",
"FUNKWHALE_PLUGINS=",
"FUNKWHALE_SPA_HTML_ROOT=http://noop/",
"FUNKWHALE_URL=https://test.federation",
"MUSIC_DIRECTORY_PATH=/music",
"MUSIC_USE_DENORMALIZATION=true",
"PROXY_MEDIA=true",
"SECRET_KEY=test",
"TYPESENSE_API_KEY=apikey",
"WEAK_PASSWORDS=True",
]
[tool.coverage.run]

View File

@ -0,0 +1 @@
Docker mac compatibility, dynamic DNS + Debian image (#2337)

7
compose.docs.yml Normal file
View File

@ -0,0 +1,7 @@
name: funkwhale-docs
networks:
web:
external: true
include:
- path: compose/docs.sphinx.yml
- path: compose/docs.openapi.yml

15
compose.net.yml Normal file
View File

@ -0,0 +1,15 @@
name: funkwhale-net
include:
- path: compose/net.dnsmasq.yml
- path: compose/net.traefik.yml
- path: compose/net.mailpit.yml
# Nowadays people rarely use the default `bridge` network in
# Docker (called `docker0` on the host system), why it often
# has no containers present.
# While we try to attach to the interface with the two services
# above, its link state will only be `UP` if at least one
# containers is connected, what this noop helper is for.
# Comment out the following line if you have other containers
# present on the docker0 network.
- path: compose/net.helpers.docker0.yml
- path: compose/net.verify.yml

139
compose.yml Normal file
View File

@ -0,0 +1,139 @@
networks:
internal:
web:
external: true
x-django: &django
environment:
- DEBUG
- DEFAULT_FROM_EMAIL
- DJANGO_SETTINGS_MODULE
- DJANGO_SECRET_KEY
- EXTERNAL_REQUESTS_VERIFY_SSL
- "FORCE_HTTPS_URLS=${FORCE_HTTPS_URLS:-False}"
- FUNKWHALE_PROTOCOL
- "FUNKWHALE_HOSTNAME=${COMPOSE_PROJECT_NAME:-funkwhale}.${FUNKWHALE_DOMAIN}"
- DATABASE_URL
- CACHE_URL
- EMAIL_CONFIG
- TYPESENSE_API_KEY
- "STATIC_URL=${FUNKWHALE_PROTOCOL}://${COMPOSE_PROJECT_NAME:-funkwhale}.${FUNKWHALE_DOMAIN}/static/"
- "MEDIA_URL=${FUNKWHALE_PROTOCOL}://${COMPOSE_PROJECT_NAME:-funkwhale}.${FUNKWHALE_DOMAIN}/media/"
- STATIC_ROOT
- MEDIA_ROOT
- FUNKWHALE_SPA_HTML_ROOT
- LDAP_ENABLED
- BROWSABLE_API_ENABLED
- "MUSIC_DIRECTORY_PATH=${MUSIC_DIRECTORY_PATH:-/music}"
- C_FORCE_ROOT
- PYTHONDONTWRITEBYTECODE
- PYTHONTRACEMALLOC
dns: 172.17.0.1
dns_search: funkwhale.test
services:
front:
build:
context: ./front
dockerfile: Dockerfile.dev
ports:
- "${VUE_PORT:-8080}:${VUE_PORT:-8080}"
environment:
- HOST
- VUE_PORT
volumes:
- "./front:/app"
- "/app/node_modules"
- "./po:/po"
networks:
- internal
command: "yarn dev --host"
api:
extends:
file: ./compose/app.django.yml
service: api
<<: *django
celeryworker:
extends:
file: ./compose/app.django.yml
service: celeryworker
<<: *django
nginx:
extends:
file: ./compose/app.nginx.yml
service: nginx
environment:
- "MUSIC_DIRECTORY_PATH=${MUSIC_DIRECTORY_PATH:-/music}"
- "FUNKWHALE_HOSTNAME=${COMPOSE_PROJECT_NAME:-funkwhale}.${FUNKWHALE_DOMAIN}"
- FUNKWHALE_PROTOCOL
- FUNKWHALE_API_HOST
- FUNKWHALE_API_PORT
- FUNKWHALE_FRONT_IP
- FUNKWHALE_FRONT_PORT
- NGINX_MAX_BODY_SIZE
- STATIC_ROOT
- "MEDIA_ROOT=${MEDIA_ROOT:-/data/media}"
networks:
- web
- internal
labels:
- "traefik.enable=true"
- "traefik.http.routers.test-funkwhale-${COMPOSE_PROJECT_NAME:-funkwhale}-web.rule=Host(`${COMPOSE_PROJECT_NAME:-funkwhale}.${FUNKWHALE_DOMAIN}`)"
- "traefik.http.routers.test-funkwhale-${COMPOSE_PROJECT_NAME:-funkwhale}-web.entrypoints=web"
- "traefik.http.routers.test-funkwhale-${COMPOSE_PROJECT_NAME:-funkwhale}-webs.rule=Host(`${COMPOSE_PROJECT_NAME:-funkwhale}.${FUNKWHALE_DOMAIN}`)"
- "traefik.http.routers.test-funkwhale-${COMPOSE_PROJECT_NAME:-funkwhale}-webs.entrypoints=webs"
- "traefik.http.routers.test-funkwhale-${COMPOSE_PROJECT_NAME:-funkwhale}-webs.tls=true"
- "traefik.http.routers.test-funkwhale-${COMPOSE_PROJECT_NAME:-funkwhale}-webs.tls.domains[0].main=${COMPOSE_PROJECT_NAME:-funkwhale}.${FUNKWHALE_DOMAIN}"
postgres:
image: "postgres:${POSTGRES_VERSION:-15}-alpine"
environment:
- POSTGRES_HOST_AUTH_METHOD
command: postgres ${POSTGRES_ARGS:-}
volumes:
- "./.state/${COMPOSE_PROJECT_NAME:-funkwhale}/postgres:/var/lib/postgresql/data"
networks:
- internal
healthcheck:
test: ["CMD-SHELL", "pg_isready -U postgres"]
interval: 10s
timeout: 5s
retries: 5
redis:
image: redis:7-alpine
volumes:
- "./.state/${COMPOSE_PROJECT_NAME:-funkwhale}/redis:/data"
networks:
- internal
healthcheck:
test: ["CMD", "redis-cli", "ping"]
interval: 10s
timeout: 5s
retries: 3
typesense:
extends:
file: ./compose/app.typesense.yml
service: typesense

33
compose/app.django.yml Normal file
View File

@ -0,0 +1,33 @@
x-django: &django
image: funkwhale-api
volumes:
- ../api:/app
- "${MUSIC_DIRECTORY_SERVE_PATH:-../.state/music}:/music:ro"
- "../.state/plugins:/srv/funkwhale/plugins"
- "../.state/staticfiles:/staticfiles"
- "../.state/media:/protected/media"
- "../.state/${COMPOSE_PROJECT_NAME:-funkwhale}/media:/data/media"
depends_on:
postgres:
condition: service_healthy
redis:
condition: service_healthy
networks:
- internal
services:
api:
<<: *django
build:
context: ../api
dockerfile: Dockerfile.debian
command: >
sh -c "
funkwhale-manage migrate &&
funkwhale-manage collectstatic --no-input &&
uvicorn --reload config.asgi:application --host 0.0.0.0 --port 5000 --reload-dir config/ --reload-dir funkwhale_api/
"
celeryworker:
<<: *django
command: celery -A funkwhale_api.taskapp worker -l debug -B --concurrency=${CELERYD_CONCURRENCY}

18
compose/app.nginx.yml Normal file
View File

@ -0,0 +1,18 @@
services:
nginx:
image: nginx
depends_on:
- api
- front
volumes:
- "${MUSIC_DIRECTORY_SERVE_PATH:-../.state/music}:${MUSIC_DIRECTORY_PATH:-/music}:ro"
- ./etc/nginx/conf.dev:/etc/nginx/templates/default.conf.template:ro
- ../deploy/funkwhale_proxy.conf:/etc/nginx/funkwhale_proxy.conf:ro
- ../front:/frontend:ro
- ../.state/staticfiles:/usr/share/nginx/html/staticfiles:ro
- ../.state/media:/protected/media:ro
- ../.state/${COMPOSE_PROJECT_NAME:-funkwhale}/media:/data/media:ro
networks:
- internal

10
compose/app.typesense.yml Normal file
View File

@ -0,0 +1,10 @@
services:
typesense:
environment:
- TYPESENSE_API_KEY
image: typesense/typesense:27.1
networks:
- internal
volumes:
- ../.state/${COMPOSE_PROJECT_NAME:-funkwhale}/typesense/data:/data
command: --data-dir /data --api-key=$${TYPESENSE_API_KEY} --enable-cors

19
compose/docs.openapi.yml Normal file
View File

@ -0,0 +1,19 @@
services:
openapi:
image: swaggerapi/swagger-ui
environment:
- "URL=/openapi.yml"
ports:
- "8002:8080"
volumes:
- "../docs/specs/nodeinfo21/schema.yml:/usr/share/nginx/html/openapi.yml"
# - "../docs/api:/usr/share/nginx/html/api"
labels:
- "traefik.enable=true"
- "traefik.http.routers.test-funkwhale-openapi-web.rule=Host(`openapi.funkwhale.test`)"
- "traefik.http.routers.test-funkwhale-openapi-web.entrypoints=web"
- "traefik.http.services.test-funkwhale-openapi.loadbalancer.server.port=8080"
- "traefik.http.routers.test-funkwhale-openapi-webs.rule=Host(`openapi.funkwhale.test`)"
- "traefik.http.routers.test-funkwhale-openapi-webs.entrypoints=webs"
- "traefik.http.routers.test-funkwhale-openapi-webs.tls=true"
networks: ["web"]

19
compose/docs.sphinx.yml Normal file
View File

@ -0,0 +1,19 @@
services:
sphinx:
build:
context: ../
dockerfile: docs/Dockerfile
init: true
ports: ["8001:8001"]
command: sh -c 'cd /src/docs && make dev'
volumes:
- ../docs:/src/docs
labels:
- "traefik.enable=true"
- "traefik.http.routers.test-funkwhale-docs-web.rule=Host(`docs.funkwhale.test`)"
- "traefik.http.routers.test-funkwhale-docs-web.entrypoints=web"
- "traefik.http.routers.test-funkwhale-docs-webs.rule=Host(`docs.funkwhale.test`)"
- "traefik.http.routers.test-funkwhale-docs-webs.entrypoints=webs"
- "traefik.http.routers.test-funkwhale-docs-webs.tls=true"
networks: ["web"]

View File

@ -0,0 +1,46 @@
port=53
bind-interfaces
# Bind to default Docker bridge
listen-address=172.17.0.1
# If you don't want dnsmasq to read /etc/hosts, uncomment the following line.
no-hosts
# Prevent dnsmasq to read /etc/resolv.conf or any other file, getting its
# servers from this file instead.
no-resolv
# Prevent dnsmasq to poll /etc/resolv.conf or other resolv files for changes and
# re-read them.
no-poll
# Uses the upstream nameserver in the order they are provided. If the first
# server doesn't reply, or can't answer, dnsmasq tries the next one and so on.
strict-order
# Prevent packets with malformed domain names from leaving the local network.
domain-needed
# Prevent packages with private IP addresses from leaving the local network.
bogus-priv
# Dnsmasq is not a DNSSEC validator, so it cannot perform the validation role of
# the recursive nameserver, but it can pass through the validation results from
# its own upstream nameservers. This option enables this behaviour.
proxy-dnssec
# If you want dnsmasq to change uid and gid to something other than the default,
# edit the following lines.
user=dnsmasq
group=nogroup
log-queries
cache-size=10000
server=9.9.9.9
server=1.1.1.1
# Include all files in a directory which end in .conf
conf-dir=/etc/dnsmasq.d/,*.conf

View File

@ -0,0 +1,4 @@
local=/test/
address=/funkwhale.test/172.17.0.1
rebind-domain-ok=/.test/
domain=test

View File

@ -161,7 +161,7 @@ server {
return 302 ${FUNKWHALE_PROTOCOL}://${FUNKWHALE_HOSTNAME}/api/v1/instance/spa-manifest.json;
}
location /staticfiles/ {
location /static/ {
alias /usr/share/nginx/html/staticfiles/;
}
}

View File

@ -0,0 +1,13 @@
http:
routers:
dashboard-web:
rule: Host(`traefik.funkwhale.test`) && (PathPrefix(`/api`) || PathPrefix(`/dashboard`))
service: api@internal
entryPoints:
- web
dashboard-webs:
rule: Host(`traefik.funkwhale.test`) && (PathPrefix(`/api`) || PathPrefix(`/dashboard`))
service: api@internal
entryPoints:
- webs
tls: true

View File

@ -0,0 +1,17 @@
http:
services:
test-funkwhale-mailpit:
loadbalancer:
servers:
- url: "http://172.17.0.1:8025"
passhostheader: true
routers:
test-funkwhale-mailpit-web:
entrypoints: web
rule: Host(`mailpit.funkwhale.test`)
service: test-funkwhale-mailpit
test-funkwhale-mailpit-webs:
entrypoints: webs
rule: Host(`mailpit.funkwhale.test`)
service: test-funkwhale-mailpit
tls: true

View File

@ -0,0 +1,4 @@
tls:
certificates:
- certFile: /ssl/test.crt
keyFile: /ssl/test.key

View File

@ -0,0 +1,20 @@
providers:
docker:
exposedByDefault: false
file:
directory: /etc/traefik/dynamic
watch: true
log:
level: debug
api:
insecure: true
entryPoints:
traefik:
address: "172.17.0.1:8008"
web:
address: "172.17.0.1:80"
webs:
address: "172.17.0.1:443"

7
compose/net.dnsmasq.yml Normal file
View File

@ -0,0 +1,7 @@
services:
dnsmasq:
image: jamesmorrison/dnsmasq
volumes:
- ./etc/dnsmasq/dnsmasq.conf:/etc/dnsmasq.conf
- ./etc/dnsmasq/funkwhale.test.conf:/etc/dnsmasq.d/funkwhale.test.conf
network_mode: host

View File

@ -0,0 +1,5 @@
services:
keep-docker0-up:
extends:
file: ./net.helpers.yml
service: keep-docker0-up

32
compose/net.helpers.yml Normal file
View File

@ -0,0 +1,32 @@
x-busybox: &busybox
init: true
image: "busybox"
network_mode: bridge
dns: 172.17.0.1
dns_search: funkwhale.test
networks:
web:
external: true
services:
whoami:
image: "traefik/whoami"
networks:
- web
labels:
- "traefik.enable=true"
- "traefik.http.routers.whoami-web.rule=Host(`whoami.funkwhale.test`)"
- "traefik.http.routers.whoami-web.entrypoints=web"
- "traefik.http.routers.whoami-webs.rule=Host(`whoami.funkwhale.test`)"
- "traefik.http.routers.whoami-webs.entrypoints=webs"
- "traefik.http.routers.whoami-webs.tls=true"
- "traefik.http.routers.whoami.tls.domains[0].main=whoami.funkwhale.test"
shell:
<<: *busybox
keep-docker0-up:
<<: *busybox
tty: true

13
compose/net.mailpit.yml Normal file
View File

@ -0,0 +1,13 @@
services:
mailpit:
image: axllent/mailpit
network_mode: host
volumes:
- ../.state/mailpit:/data
environment:
MP_UI_BIND_ADDR: 172.17.0.1:8025
MP_SMTP_BIND_ADDR: 172.17.0.1:1025
MP_MAX_MESSAGES: 5000
MP_DATABASE: /data/mailpit.db
MP_SMTP_AUTH_ACCEPT_ANY: "true"
MP_SMTP_AUTH_ALLOW_INSECURE: "true"

23
compose/net.minio.yml Normal file
View File

@ -0,0 +1,23 @@
services:
minio:
image: minio/minio
command: server /data
volumes:
- "../.state/${COMPOSE_PROJECT_NAME:-funkwhale}/minio:/data"
environment:
- "MINIO_ACCESS_KEY=${AWS_ACCESS_KEY_ID:-access_key}"
- "MINIO_SECRET_KEY=${AWS_SECRET_ACCESS_KEY:-secret_key}"
- "MINIO_HTTP_TRACE: /dev/stdout"
ports:
- "9000:9000"
networks:
- web
- internal
api:
depends_on:
minio: {}
celeryworker:
depends_on:
minio: {}

10
compose/net.traefik.yml Normal file
View File

@ -0,0 +1,10 @@
services:
traefik:
image: traefik:mimolette
volumes:
- /var/run/docker.sock:/var/run/docker.sock
- ./etc/traefik/traefik.yml:/etc/traefik/traefik.yml
- ./etc/traefik/dynamic:/etc/traefik/dynamic
- ./var/test.key:/ssl/test.key
- ./var/test.crt:/ssl/test.crt
network_mode: host

15
compose/net.verify.yml Normal file
View File

@ -0,0 +1,15 @@
x-verify: &verify
init: true
image: "busybox"
network_mode: bridge
dns: 172.17.0.1
dns_search: funkwhale.test
services:
verify-external-connectivity:
<<: *verify
command: ping -c 1 funkwhale.audio
verify-internal-connectivity:
<<: *verify
command: "ping -c 1 ${COMPOSE_PROJECT_NAME:-funkwhale}.funkwhale.test"

0
compose/var/.keep Normal file
View File

View File

@ -1 +1 @@
../docker/nginx/conf.dev
../compose/etc/nginx/conf.dev

200
dev.yml
View File

@ -1,200 +0,0 @@
services:
front:
build:
context: front
dockerfile: Dockerfile.dev
env_file:
- .env.dev
- .env
environment:
- "HOST=0.0.0.0"
- "VUE_PORT=${VUE_PORT-8080}"
ports:
- "${VUE_PORT-8080}"
volumes:
- "./front:/app"
- "/app/node_modules"
- "./po:/po"
networks:
- internal
command: "yarn dev --host"
postgres:
healthcheck:
test: ["CMD-SHELL", "pg_isready -U postgres"]
interval: 10s
timeout: 5s
retries: 5
env_file:
- .env.dev
- .env
image: postgres:${POSTGRES_VERSION-15}-alpine
environment:
- "POSTGRES_HOST_AUTH_METHOD=trust"
command: postgres ${POSTGRES_ARGS-}
volumes:
- "./data/${COMPOSE_PROJECT_NAME-node1}/postgres:/var/lib/postgresql/data"
networks:
- internal
redis:
env_file:
- .env.dev
- .env
image: redis:7-alpine
volumes:
- "./data/${COMPOSE_PROJECT_NAME-node1}/redis:/data"
networks:
- internal
api:
env_file:
- .env.dev
- .env
build:
context: ./api
dockerfile: Dockerfile
args:
install_dev_deps: 1
image: funkwhale-api
command: >
bash -c "funkwhale-manage collectstatic --no-input
&& uvicorn --reload config.asgi:application --host 0.0.0.0 --port 5000 --reload-dir config/ --reload-dir=funkwhale_api/"
volumes:
- ./api:/app
- "${MUSIC_DIRECTORY_SERVE_PATH-./data/music}:/music:ro"
- "./data/plugins:/srv/funkwhale/plugins"
- "./data/staticfiles:/staticfiles"
- "./data/media:/data/media"
environment:
- "FUNKWHALE_HOSTNAME=${FUNKWHALE_HOSTNAME-localhost}"
- "FUNKWHALE_HOSTNAME_SUFFIX=funkwhale.test"
- "FUNKWHALE_HOSTNAME_PREFIX=${COMPOSE_PROJECT_NAME}"
- "FUNKWHALE_PROTOCOL=${FUNKWHALE_PROTOCOL-http}"
- "DATABASE_URL=postgresql://postgres@postgres/postgres"
- "CACHE_URL=redis://redis:6379/0"
- "STATIC_ROOT=/staticfiles"
depends_on:
- postgres
# - minio
- redis
networks:
- internal
cap_add:
- SYS_PTRACE
extra_hosts:
- "node1.funkwhale.test:172.17.0.1"
- "node2.funkwhale.test:172.17.0.1"
- "node3.funkwhale.test:172.17.0.1"
celeryworker:
env_file:
- .env.dev
- .env
image: funkwhale-api
depends_on:
- postgres
# - minio
- redis
command: celery -A funkwhale_api.taskapp worker -l debug -B --concurrency=${CELERYD_CONCURRENCY-0}
environment:
- "FUNKWHALE_HOSTNAME=${FUNKWHALE_HOSTNAME-localhost}"
- "FUNKWHALE_HOSTNAME_SUFFIX=funkwhale.test"
- "FUNKWHALE_HOSTNAME_PREFIX=${COMPOSE_PROJECT_NAME}"
- "FUNKWHALE_PROTOCOL=${FUNKWHALE_PROTOCOL-http}"
- "DATABASE_URL=postgresql://postgres@postgres/postgres"
- "CACHE_URL=redis://redis:6379/0"
volumes:
- ./api:/app
- "${MUSIC_DIRECTORY_SERVE_PATH-./data/music}:/music:ro"
- "./data/plugins:/srv/funkwhale/plugins"
- "./data/media:/data/media"
networks:
- internal
extra_hosts:
- "node1.funkwhale.test:172.17.0.1"
- "node2.funkwhale.test:172.17.0.1"
- "node3.funkwhale.test:172.17.0.1"
nginx:
env_file:
- .env.dev
- .env
image: nginx
ports:
- "${NGINX_PORTS_MAPPING-8000:80}"
environment:
- "NGINX_MAX_BODY_SIZE=${NGINX_MAX_BODY_SIZE-100M}"
- "FUNKWHALE_API_HOST=${FUNKWHALE_API_HOST-api}"
- "FUNKWHALE_API_PORT=${FUNKWHALE_API_PORT-5000}"
- "FUNKWHALE_FRONT_IP=${FUNKWHALE_FRONT_IP-front}"
- "FUNKWHALE_FRONT_PORT=${VUE_PORT-8080}"
- "COMPOSE_PROJECT_NAME=${COMPOSE_PROJECT_NAME- }"
- "FUNKWHALE_HOSTNAME=${FUNKWHALE_HOSTNAME-localhost}"
depends_on:
- api
- front
volumes:
- ./docker/nginx/conf.dev:/etc/nginx/templates/default.conf.template:ro
- "${MUSIC_DIRECTORY_SERVE_PATH-./data/music}:/music:ro"
- ./deploy/funkwhale_proxy.conf:/etc/nginx/funkwhale_proxy.conf:ro
- "./front:/frontend:ro"
- "./data/staticfiles:/staticfiles:ro"
- "./data/media:/protected/media:ro"
networks:
- federation
- internal
labels:
traefik.backend: "${COMPOSE_PROJECT_NAME-node1}"
traefik.frontend.rule: "Host:${COMPOSE_PROJECT_NAME-node1}.funkwhale.test,${NODE_IP-127.0.0.1},${DJANGO_ALLOWED_HOSTS}"
traefik.enable: "true"
traefik.federation.protocol: "http"
traefik.federation.port: "80"
traefik.frontend.passHostHeader: true
traefik.docker.network: federation
api-docs:
image: swaggerapi/swagger-ui:v3.37.2
environment:
- "API_URL=/swagger.yml"
ports:
- "8002:8080"
volumes:
- "./docs/swagger.yml:/usr/share/nginx/html/swagger.yml"
- "./docs/api:/usr/share/nginx/html/api"
typesense:
env_file:
- .env.dev
- .env
image: typesense/typesense:0.24.0
networks:
- internal
volumes:
- ./typesense/data:/data
command: --data-dir /data --enable-cors
profiles:
- typesense
# minio:
# image: minio/minio
# command: server /data
# volumes:
# - "./data/${COMPOSE_PROJECT_NAME-node1}/minio:/data"
# environment:
# - "MINIO_ACCESS_KEY=${AWS_ACCESS_KEY_ID-access_key}"
# - "MINIO_SECRET_KEY=${AWS_SECRET_ACCESS_KEY-secret_key}"
# - "MINIO_HTTP_TRACE: /dev/stdout"
# ports:
# - "9000:9000"
# networks:
# - federation
# - internal
networks:
internal:
federation:
name: federation
external: true

View File

@ -1,88 +0,0 @@
# cf https://stackoverflow.com/questions/10175812/how-to-create-a-self-signed-certificate-with-openssl/27931596#27931596
# create with openssl req -config openssl.conf -new -x509 -sha256 -newkey rsa:2048 -nodes -keyout test.key -days 365 -out test.crt
[ req ]
default_bits = 2048
default_keyfile = server-key.pem
distinguished_name = subject
req_extensions = req_ext
x509_extensions = x509_ext
string_mask = utf8only
# The Subject DN can be formed using X501 or RFC 4514 (see RFC 4519 for a description).
# Its sort of a mashup. For example, RFC 4514 does not provide emailAddress.
[ subject ]
countryName = Country Name (2 letter code)
countryName_default = US
stateOrProvinceName = State or Province Name (full name)
stateOrProvinceName_default = NY
localityName = Locality Name (eg, city)
localityName_default = New York
organizationName = Organization Name (eg, company)
organizationName_default = Example, LLC
# Use a friendly name here because it's presented to the user. The server's DNS
# names are placed in Subject Alternate Names. Plus, DNS names here is deprecated
# by both IETF and CA/Browser Forums. If you place a DNS name here, then you
# must include the DNS name in the SAN too (otherwise, Chrome and others that
# strictly follow the CA/Browser Baseline Requirements will fail).
commonName = Common Name (e.g. server FQDN or YOUR name)
commonName_default = Example Company
emailAddress = Email Address
emailAddress_default = test@example.com
# Section x509_ext is used when generating a self-signed certificate. I.e., openssl req -x509 ...
[ x509_ext ]
subjectKeyIdentifier = hash
authorityKeyIdentifier = keyid,issuer
# You only need digitalSignature below. *If* you don't allow
# RSA Key transport (i.e., you use ephemeral cipher suites), then
# omit keyEncipherment because that's key transport.
basicConstraints = CA:FALSE
keyUsage = digitalSignature, keyEncipherment
subjectAltName = @alternate_names
nsComment = "OpenSSL Generated Certificate"
# RFC 5280, Section 4.2.1.12 makes EKU optional
# CA/Browser Baseline Requirements, Appendix (B)(3)(G) makes me confused
# In either case, you probably only need serverAuth.
# extendedKeyUsage = serverAuth, clientAuth
# Section req_ext is used when generating a certificate signing request. I.e., openssl req ...
[ req_ext ]
subjectKeyIdentifier = hash
basicConstraints = CA:FALSE
keyUsage = digitalSignature, keyEncipherment
subjectAltName = @alternate_names
nsComment = "OpenSSL Generated Certificate"
# RFC 5280, Section 4.2.1.12 makes EKU optional
# CA/Browser Baseline Requirements, Appendix (B)(3)(G) makes me confused
# In either case, you probably only need serverAuth.
# extendedKeyUsage = serverAuth, clientAuth
[ alternate_names ]
DNS.1 = funkwhale.test
DNS.2 = node1.funkwhale.test
DNS.3 = node2.funkwhale.test
DNS.4 = node3.funkwhale.test
DNS.5 = localhost
DNS.6 = 127.0.0.1
# Add these if you need them. But usually you don't want them or
# need them in production. You may need them for development.
# DNS.5 = localhost
# DNS.6 = localhost.localdomain
# DNS.7 = 127.0.0.1
# IPv6 localhost
# DNS.8 = ::1

View File

@ -1,27 +0,0 @@
-----BEGIN CERTIFICATE-----
MIIEiTCCA3GgAwIBAgIUYxpKxPZIyG2n6qTPNESvYX/VpkowDQYJKoZIhvcNAQEL
BQAwfzELMAkGA1UEBhMCVVMxCzAJBgNVBAgMAk5ZMREwDwYDVQQHDAhOZXcgWW9y
azEVMBMGA1UECgwMRXhhbXBsZSwgTExDMRgwFgYDVQQDDA9FeGFtcGxlIENvbXBh
bnkxHzAdBgkqhkiG9w0BCQEWEHRlc3RAZXhhbXBsZS5jb20wHhcNMjAwMTA5MTM0
ODMyWhcNMzAwMTA2MTM0ODMyWjB/MQswCQYDVQQGEwJVUzELMAkGA1UECAwCTlkx
ETAPBgNVBAcMCE5ldyBZb3JrMRUwEwYDVQQKDAxFeGFtcGxlLCBMTEMxGDAWBgNV
BAMMD0V4YW1wbGUgQ29tcGFueTEfMB0GCSqGSIb3DQEJARYQdGVzdEBleGFtcGxl
LmNvbTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL1SKznmggF6IaCF
4Jq+CHl9x8tmteQkws+ix65J2Id104fSibrRK8If3LMbKlrmXrpXFIl1TvDGPJQd
emcJhy3tFXR0eRqPTyqOfwxVy4AW7plMpemsoDrk8uONtwUdwmNNRsPeppIIEov7
aj6SWGLzFUjoKwHbXsfy2ff80/9EP7zkJr1ts6VPbPafExDKT225OoANlZ4B3bOG
bviWcP5+HuWUolA1wcyIqLXpc9Lw1M6NsC252sgje9IBpx1NhGe5QNAg5p3BA75/
jbOQH0qSo1xm9cV+FNQJpBybnZ5wuUEgsPJ87MtTIbr0cM5IiarUr/kvyg4sywDV
e07Aaw0CAwEAAaOB/DCB+TAdBgNVHQ4EFgQU9wRYbfo7sxyDITOZCK0H8udIaiww
HwYDVR0jBBgwFoAU9wRYbfo7sxyDITOZCK0H8udIaiwwCQYDVR0TBAIwADALBgNV
HQ8EBAMCBaAwcQYDVR0RBGowaIIOZnVua3doYWxlLnRlc3SCFG5vZGUxLmZ1bmt3
aGFsZS50ZXN0ghRub2RlMi5mdW5rd2hhbGUudGVzdIIUbm9kZTMuZnVua3doYWxl
LnRlc3SCCWxvY2FsaG9zdIIJMTI3LjAuMC4xMCwGCWCGSAGG+EIBDQQfFh1PcGVu
U1NMIEdlbmVyYXRlZCBDZXJ0aWZpY2F0ZTANBgkqhkiG9w0BAQsFAAOCAQEAmXD3
pjwYG4M4NTixkxs9KvdQE5yDqOMEh5ZMygA7/kRbKrYLaFgDYYsNlRFqJNz3sDLe
jTU663Eur5TdwTNiksa11VB3qKCrgQIzhjOavofF0ODfaNBtHtBWwEcpq0t2MnWP
kWot/kqpUcphbx5zyzqHHjiSnNUu16PS/lepNZyQIrfSy23+WIEYEiTbDYqS38SX
p8Pc+i9hQyeOwo4CYnuoPcIRtL/zsFl7WnWKVqXqr7w0PDWus226xO2ZMMLRkMi5
scufzyGBJAsedlCXIbJ+azYlZ2yTr98C7ffEA1PSuhO7wTUim/LUo0UBC/bs6wpc
ZxMkNLp6IaHhNEIeyA==
-----END CERTIFICATE-----

View File

@ -1,28 +0,0 @@
-----BEGIN PRIVATE KEY-----
MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQC9Uis55oIBeiGg
heCavgh5fcfLZrXkJMLPoseuSdiHddOH0om60SvCH9yzGypa5l66VxSJdU7wxjyU
HXpnCYct7RV0dHkaj08qjn8MVcuAFu6ZTKXprKA65PLjjbcFHcJjTUbD3qaSCBKL
+2o+klhi8xVI6CsB217H8tn3/NP/RD+85Ca9bbOlT2z2nxMQyk9tuTqADZWeAd2z
hm74lnD+fh7llKJQNcHMiKi16XPS8NTOjbAtudrII3vSAacdTYRnuUDQIOadwQO+
f42zkB9KkqNcZvXFfhTUCaQcm52ecLlBILDyfOzLUyG69HDOSImq1K/5L8oOLMsA
1XtOwGsNAgMBAAECggEAAbgEQnNQTNkiAwYUIvOEui2lKbiWACtBRYdRzshG2fv8
3qfPrk2F2y5U359ohAjBZWmy+wiAnfj+xc16tgLFImqbnkIMc2xHqLhAeQkyXshW
hDfI7dUuYzp+5gf8WGSLxkEGWnLkCkFegbzXmxfTC5rvX4kUEuE9/Ay9Y938wr2E
26qdRGxtfVsnFFkLXmj50W3AyF6nBRqZsaS2x8JpHTdw7AjevpL/au2nz1p1rTK9
6cR/V4Hy+dtXLgm0mLdg1G+CJmanjqiweaD4+m91rFTagFIFKf/t5i4IZMu/BLT7
OuylxvEnvZH4p3aSOF1ME0Uv4n2Pzb7Iov/ZZ52/AQKBgQD4qnuj4V3ASXqsraMH
m5MtpBlKAZkngWFesi5ZFijgyutfbIcCPwFOGuXmcaTMj9HtTIwAki+mxkN87UmV
ZM+em2ZJz6srRGvIGN5CMJaJtOPdh3iMjI5QdefJ5gkk207YKzKVw4sw5C+tr4Sr
Uyf3K5ttL+CS5bo26CVXGLlpwQKBgQDC55wrgIzC1VDoFU0N2AZqU31tpP2DTIxc
eu4PqEMF0hjtTh4R5JHR827PmcW3VCaZ1+Fet8+yJ5nZTHWJlFyIg3dIyebn9dau
Yy256S+/1tq7ACmTzw3tn/125g4Is6Sz8yHdZ1YejHqyrK8nmyxuHJVEpWgLI+Ru
U9qQAQqcTQKBgAYb2hG6lZ0FsRfQ5DJppgH3CBADXgnUadnzsqPJoZN0KLgdaGur
tJKAoqk4nX3RAq07tizFappEQKAvDCG5akhRNQAXM/NKKQOvaLZjjy8u3HIyw8lg
IpbjbqBNIGhhYtx4ozN+rEq1MF6p8y5qSo8N6TGTfYbeUebLaS9skhGBAoGAcmZF
uRb8CAPzODYAg0awBUq6DVhRYPbWUBXrk48cv9bgwLEgXzo9CPGMshe9AG1JNvWK
l/Dl3Nj3qZ8CQl2trocTxcqUWMRoXPVjyoJ/f2eZ/TcMMHDQ6RAGUvqXdC4VV3Y3
A2B7IPUts6A+Ms4W1w654o//sMJBeyyG1g12b+UCgYEA9oLi1licSby9pGuuZXqf
q5zyGzM3adQzOrUNR+GTOAnoQD7tcz2jTvlmn0yv66NzBoy8FAD+UNOiMGipe8Au
1Y3XVCeYho0crCRJP3/fLLmjNe1P/Ijgujpb5jEgCA91opWSpqRVjIspGU0YOApU
jCCVQukqEnud65ur9FLD4a8=
-----END PRIVATE KEY-----

View File

@ -1,26 +0,0 @@
defaultEntryPoints = ["http", "https"]
[accessLog]
################################################################
# Web configuration backend
################################################################
[web]
address = ":8040"
################################################################
# Docker configuration backend
################################################################
[docker]
domain = "funkwhale.test"
watch = true
exposedbydefault = false
[entryPoints]
[entryPoints.http]
address = ":80"
# [entryPoints.http.redirect]
entryPoint = "http"
[entryPoints.https]
address = ":443"
[entryPoints.https.tls]
[[entryPoints.https.tls.certificates]]
certFile = "/ssl/traefik.crt"
keyFile = "/ssl/traefik.key"

View File

@ -1,21 +0,0 @@
version: "2.1"
services:
traefik:
image: traefik:alpine
volumes:
- /var/run/docker.sock:/var/run/docker.sock
- ./traefik.toml:/traefik.toml
- ./ssl/test.key:/ssl/traefik.key
- ./ssl/test.crt:/ssl/traefik.crt
ports:
- "80:80"
- "443:443"
- "8040:8040"
networks:
federation:
networks:
federation:
name: federation
external: true

1
docs/.dockerignore Normal file
View File

@ -0,0 +1 @@
.venv/

1
docs/.gitignore vendored
View File

@ -1,2 +1,3 @@
.doctrees/
_panels_static/
.venv

13
docs/Dockerfile Normal file
View File

@ -0,0 +1,13 @@
FROM mwalbeck/python-poetry
ENV DEBIAN_FRONTEND=noninteractive
ENV VIRTUAL_ENV=/venv
ENV PATH="/venv/bin:$PATH"
RUN --mount=type=cache,target=/var/lib/apt/lists \
apt update; \
apt install -y \
curl \
make
COPY . /src
WORKDIR /src/docs
RUN python3 -m venv --system-site-packages ${VIRTUAL_ENV} && . ${VIRTUAL_ENV}/bin/activate
RUN make install

View File

@ -30,7 +30,7 @@ $(BUILD_DIR):
# Dev
dev: $(VENV)
poetry run sphinx-autobuild . /tmp/_build/ --port 8001
poetry run sphinx-autobuild . /tmp/_build/ --host 0.0.0.0 --port 8001
# I18n
LOCALES = en_GB en_US fr

View File

@ -2,25 +2,12 @@
{% block document %}
{% if language is not none %}
<div class="translation-hint">
<i class="fa fa-times" id="translation-closer" aria-hidden="true"></i>
<h1>Something wrong with the translation?</h1>
<p>
Help us to improve it on our
<a
href="https://translate.funkwhale.audio/projects/documentation/{{ pagename | replace("/", "-") }}"
target="_blank">translation platform</a>
</p>
</div>
{% endif %}
{% if current_version and current_version.name == "develop" %}
<div class="admonition warning">
<p class="admonition-title">Warning</p>
<p>
This documentation only applies for the development version of Funkwhale!
<a href="{{ vpathto('stable') }}">Visit the docs for the latest release.</a>
<a href="{{ vpathto('stable') }}">Visit the stable documentation for the latest release.</a>
</p>
</div>
{% endif %}

View File

@ -37,7 +37,7 @@ Running `fix_federation_ids` with the `--no-dry-run` flag is irreversible. Make
:sync: docker
```{code-block} sh
sudo docker compose run --rm api funkwhale-manage fix_federation_ids https://old-url https://new-url --no-dry-run --no-input
docker compose run --rm api funkwhale-manage fix_federation_ids https://old-url https://new-url --no-dry-run --no-input
```
:::

View File

@ -19,7 +19,7 @@ sudo systemctl restart funkwhale.target
:sync: docker
```{code-block} sh
sudo docker compose restart
docker compose restart
```
:::

View File

@ -99,7 +99,7 @@ To make the job of writing and debugging MRF policies easier, we provide a manag
:sync: docker
```{code-block} sh
sudo docker compose run --rm api funkwhale-manage mrf_check --list
docker compose run --rm api funkwhale-manage mrf_check --list
```
:::
@ -124,7 +124,7 @@ To make the job of writing and debugging MRF policies easier, we provide a manag
```{code-block} sh
export MRF_MESSAGE='{"actor": "https://normal.domain/@alice", "type": "Create", "object": {"type": "Follow"}}'
echo $MRF_MESSAGE | sudo docker compose run --rm api funkwhale-manage mrf_check inbox - -p blocked_follow_domains
echo $MRF_MESSAGE | docker compose run --rm api funkwhale-manage mrf_check inbox - -p blocked_follow_domains
```
::::
@ -148,7 +148,7 @@ To make the job of writing and debugging MRF policies easier, we provide a manag
```{code-block} sh
export MRF_MESSAGE='{"actor": "https://botdomain.org/@bob", "type": "Create", "object": {"type": "Follow"}}'
echo $MRF_MESSAGE | sudo docker compose run --rm api funkwhale-manage mrf_check inbox - -p blocked_follow_domains
echo $MRF_MESSAGE | docker compose run --rm api funkwhale-manage mrf_check inbox - -p blocked_follow_domains
```
:::
@ -175,7 +175,7 @@ To make the job of writing and debugging MRF policies easier, we provide a manag
export ACTIVITY_UUID="06208aea-c687-4e8b-aefd-22f1c3f76039"
echo $MRF_MESSAGE | sudo docker compose run --rm api funkwhale-manage mrf_check inbox $ACTIVITY_UUID -p blocked_follow_domains
echo $MRF_MESSAGE | docker compose run --rm api funkwhale-manage mrf_check inbox $ACTIVITY_UUID -p blocked_follow_domains
```
@ -199,7 +199,7 @@ venv/bin/funkwhale-manage mrf_check --help
:sync: docker
```{code-block} sh
sudo docker compose run --rm api funkwhale-manage mrf_check --help
docker compose run --rm api funkwhale-manage mrf_check --help
```
:::

View File

@ -162,7 +162,7 @@ Serving files from an object store requires some changes to the reverse proxy.
:sync: docker
```{code-block} sh
sudo docker compose restart
docker compose restart
sudo systemctl restart nginx
```

View File

@ -60,7 +60,7 @@ Celery uses a `prefork` pool by default. This enables the server to process many
2. Restart Celery.
```{code-block} sh
sudo docker compose restart celerybeat
docker compose restart celerybeat
```
:::

View File

@ -24,7 +24,7 @@ venv/bin/funkwhale-manage import_files --help
:sync: docker
```{code-block} sh
sudo docker compose run --rm api funkwhale-manage import_files --help
docker compose run --rm api funkwhale-manage import_files --help
```
:::
@ -188,7 +188,7 @@ Funkwhale imports the music in your storage directory into the specified library
2. Run your import command against your music storage directory:
```{code-block} sh
sudo docker compose run --rm api funkwhale-manage import_files $LIBRARY_ID "/srv/funkwhale/data/music/nfsshare/" --recursive --noinput --in-place
docker compose run --rm api funkwhale-manage import_files $LIBRARY_ID "/srv/funkwhale/data/music/nfsshare/" --recursive --noinput --in-place
```
Funkwhale imports the music in your storage directory into the specified library.
@ -250,7 +250,7 @@ venv/bin/funkwhale-manage import_files $LIBRARY_ID "/srv/funkwhale/data/music/nf
:sync: docker
```{code-block} sh
sudo docker compose run --rm api funkwhale-manage import_files $LIBRARY_ID "/srv/funkwhale/data/music/nfsshare/" --recursive --noinput --in-place --watch
docker compose run --rm api funkwhale-manage import_files $LIBRARY_ID "/srv/funkwhale/data/music/nfsshare/" --recursive --noinput --in-place --watch
```
:::
@ -281,7 +281,7 @@ venv/bin/funkwhale-manage import_files $LIBRARY_ID "/srv/funkwhale/data/music/nf
:sync: docker
```{code-block} sh
sudo docker compose run --rm api funkwhale-manage import_files $LIBRARY_ID "/srv/funkwhale/data/music/nfsshare/" --recursive --noinput --in-place --watch --prune
docker compose run --rm api funkwhale-manage import_files $LIBRARY_ID "/srv/funkwhale/data/music/nfsshare/" --recursive --noinput --in-place --watch --prune
```
:::

View File

@ -126,19 +126,19 @@ Once you've filled in your environment file, you can set up Funkwhale. Follow th
```{code-block} sh
cd /srv/funkwhale
sudo docker compose pull
docker compose pull
```
2. Bring up the database container so you can run the database migrations.
```{code-block} sh
sudo docker compose up -d postgres
docker compose up -d postgres
```
3. Run the database migrations.
```{code-block} sh
sudo docker compose run --rm api funkwhale-manage migrate
docker compose run --rm api funkwhale-manage migrate
```
````{note}
@ -154,13 +154,13 @@ Once you've filled in your environment file, you can set up Funkwhale. Follow th
4. Create your superuser.
```{code-block} sh
sudo docker compose run --rm api funkwhale-manage fw users create --superuser
docker compose run --rm api funkwhale-manage fw users create --superuser
```
5. Launch all the containers to bring up your pod.
```{code-block} sh
sudo docker compose up -d
docker compose up -d
```
That's it! Your Funkwhale pod is now up and running.
@ -266,7 +266,7 @@ The frontend container ships default Nginx templates which serve content to the
6. Bring the `front` container up again to pick up the changes.
```{code-block} sh
sudo docker compose up -d front
docker compose up -d front
```
That's it! The container mounts your custom nginx files and uses its values to serve Funkwhale content. To revert to the default values, comment out the volumes by adding a `#` in front of them and bring the `front` container back up.

View File

@ -62,7 +62,7 @@ On your {term}`destination server`, follow the [installation guide](docker.md).
Once you have finished the installation, stop the Funkwhale services. These shouldn't be running when you copy your existing data over.
```{code-block} sh
sudo docker compose stop
docker compose stop
```
:::
@ -87,7 +87,7 @@ sudo -u postgres -H pg_dump funkwhale > /srv/funkwhale/dump.sql
:sync: docker
```{code-block} sh
sudo docker compose exec postgres pg_dumpall -c -U postgres > dump.sql
docker compose exec postgres pg_dumpall -c -U postgres > dump.sql
```
:::
@ -157,19 +157,19 @@ You need to initialize the postgres container on your {term}`destination server`
CREATE USER funkwhale; \
GRANT ALL PRIVILEGES ON DATABASE funkwhale TO funkwhale;" > init.sql # Create an init.sql file with the correct permissions
sudo docker compose run --rm postgres psql -U postgres -d postgres < "init.sql" # Import the init.sql file
docker compose run --rm postgres psql -U postgres -d postgres < "init.sql" # Import the init.sql file
```
2. Import your database backup.
```{code-block} sh
sudo docker compose run --rm postgres psql -U postgres -d postgres < "dump.sql"
docker compose run --rm postgres psql -U postgres -d postgres < "dump.sql"
```
3. When the import finishes, run the `funkwhale-manage migrate` command to set up the database.
```{code-block} sh
sudo docker compose run --rm api funkwhale-manage migrate
docker compose run --rm api funkwhale-manage migrate
```
:::
@ -198,7 +198,7 @@ sudo systemctl start funkwhale.target
:sync: docker
```{code-block} sh
sudo docker compose up -d
docker compose up -d
```
:::

View File

@ -47,7 +47,7 @@ venv/bin/funkwhale-manage create_library username1 --name="Library 1" --privacy-
:sync: docker
```{code-block} sh
sudo docker compose run --rm api funkwhale-manage create_library username1 --name="Library 1" --privacy-level="everyone"
docker compose run --rm api funkwhale-manage create_library username1 --name="Library 1" --privacy-level="everyone"
```
:::
@ -79,7 +79,7 @@ venv/bin/funkwhale-manage create_library username1
:sync: docker
```{code-block} sh
sudo docker compose run --rm api funkwhale-manage create_library username1
docker compose run --rm api funkwhale-manage create_library username1
```
:::
@ -111,7 +111,7 @@ venv/bin/funkwhale-manage create_library username1 --name="Library 1" --privacy-
:sync: docker
```{code-block} sh
sudo docker compose run --rm api funkwhale-manage create_library username1 --name="Library 1" --privacy-level="everyone"
docker compose run --rm api funkwhale-manage create_library username1 --name="Library 1" --privacy-level="everyone"
```
:::

View File

@ -25,7 +25,7 @@ venv/bin/funkwhale-manage check_inplace_files
:sync: docker
```{code-block} sh
sudo docker compose run --rm api funkwhale-manage check_inplace_files
docker compose run --rm api funkwhale-manage check_inplace_files
```
:::

View File

@ -23,7 +23,7 @@ venv/bin/funkwhale-manage fix_uploads --mimetype
:sync: docker
```{code-block} sh
sudo docker compose run --rm api funkwhale-manage fix_uploads --mimetype
docker compose run --rm api funkwhale-manage fix_uploads --mimetype
```
:::
@ -48,7 +48,7 @@ venv/bin/funkwhale-manage fix_uploads --audio-data
:sync: docker
```{code-block} sh
sudo docker compose run --rm api funkwhale-manage fix_uploads --audio-data
docker compose run --rm api funkwhale-manage fix_uploads --audio-data
```
:::
@ -73,7 +73,7 @@ venv/bin/funkwhale-manage fix_uploads --size
:sync: docker
```{code-block} sh
sudo docker compose run --rm api funkwhale-manage fix_uploads --size
docker compose run --rm api funkwhale-manage fix_uploads --size
```
:::
@ -98,7 +98,7 @@ venv/bin/funkwhale-manage fix_uploads --checksum
:sync: docker
```{code-block} sh
sudo docker compose run --rm api funkwhale-manage fix_uploads --checksum
docker compose run --rm api funkwhale-manage fix_uploads --checksum
```
:::
@ -123,7 +123,7 @@ venv/bin/funkwhale-manage fix_uploads --batch-size 500
:sync: docker
```{code-block} sh
sudo docker compose run --rm api funkwhale-manage fix_uploads --batch-size 500
docker compose run --rm api funkwhale-manage fix_uploads --batch-size 500
```
:::

View File

@ -29,7 +29,7 @@ venv/bin/funkwhale-manage prune_library --tracks
:sync: docker
```bash
sudo docker compose run --rm api funkwhale-manage prune_library --tracks
docker compose run --rm api funkwhale-manage prune_library --tracks
```
:::
@ -52,7 +52,7 @@ venv/bin/funkwhale-manage prune_library --albums
:sync: docker
```{code-block} sh
sudo docker compose run --rm api funkwhale-manage prune_library --albums
docker compose run --rm api funkwhale-manage prune_library --albums
```
:::
@ -75,7 +75,7 @@ venv/bin/funkwhale-manage prune_library --artists
:sync: docker
```{code-block} sh
sudo docker compose run --rm api funkwhale-manage prune_library --artists
docker compose run --rm api funkwhale-manage prune_library --artists
```
:::
@ -98,7 +98,7 @@ venv/bin/funkwhale-manage prune_library --tracks --albums --artists
:sync: docker
```{code-block} sh
sudo docker compose run --rm api funkwhale-manage prune_library --tracks --albums --artists
docker compose run --rm api funkwhale-manage prune_library --tracks --albums --artists
```
:::
@ -121,7 +121,7 @@ venv/bin/funkwhale-manage prune_library --help
:sync: docker
```{code-block} sh
sudo docker compose run --rm api funkwhale-manage prune_library --help
docker compose run --rm api funkwhale-manage prune_library --help
```
:::

View File

@ -25,7 +25,7 @@ venv/bin/funkwhale-manage prune_non_mbid_content
:sync: docker
```{code-block} sh
sudo docker compose run --rm api funkwhale-manage prune_non_mbid_content
docker compose run --rm api funkwhale-manage prune_non_mbid_content
```
:::

View File

@ -45,7 +45,7 @@ To add tags to untagged albums:
3. Run the `funkwhale-manage` command line interface to generate tags for untagged albums.
```{code-block} sh
sudo docker compose run --rm api funkwhale-manage fw albums add-tags-from-tracks
docker compose run --rm api funkwhale-manage fw albums add-tags-from-tracks
```
:::
@ -88,7 +88,7 @@ To add tags to untagged artists:
3. Run the `funkwhale-manage` command line interface to generate tags for untagged artists.
```{code-block} sh
sudo docker compose run --rm api funkwhale-manage fw artists add-tags-from-tracks
docker compose run --rm api funkwhale-manage fw artists add-tags-from-tracks
```
:::

View File

@ -53,7 +53,7 @@ To generate new thumbnails:
4. Run the `funkwhale-manage` command line interface to regenerate the thumbnails.
```{code-block} sh
sudo docker compose run --rm api funkwhale-manage fw media generate-thumbnails
docker compose run --rm api funkwhale-manage fw media generate-thumbnails
```
:::

View File

@ -60,7 +60,7 @@ This command doesn't move files. It only updates the location of the file to its
3. Run the `funkwhale-manage` command line interface to update your in-place imports.
```{code-block} console
$ sudo docker compose run --rm api funkwhale-manage inplace_to_s3 --no-dry-run
$ docker compose run --rm api funkwhale-manage inplace_to_s3 --no-dry-run
```
:::
@ -101,7 +101,7 @@ This command doesn't move files. It only updates the location of the file to its
3. Run the `funkwhale-manage` command line interface to update your in-place imports.
```{code-block} console
$ sudo docker compose run --rm api funkwhale-manage inplace_to_s3 --source "/music" --no-dry-run
$ docker compose run --rm api funkwhale-manage inplace_to_s3 --source "/music" --no-dry-run
```
:::
@ -144,7 +144,7 @@ All in-place imports in the `/music` folder are updated to reference the `/music
3. Run the `funkwhale-manage` command line interface to update your in-place imports.
```{code-block} console
$ sudo docker compose run --rm api funkwhale-manage inplace_to_s3 --source "/music" --target "/new_import" --no-dry-run
$ docker compose run --rm api funkwhale-manage inplace_to_s3 --source "/music" --target "/new_import" --no-dry-run
```
:::

View File

@ -25,7 +25,7 @@ venv/bin/funkwhale-manage fw users create
:sync: docker
```{code-block} sh
sudo docker compose run --rm api funkwhale-manage fw users create
docker compose run --rm api funkwhale-manage fw users create
```
:::
@ -48,7 +48,7 @@ venv/bin/funkwhale-manage fw users create --username <username> --email <user em
:sync: docker
```{code-block} sh
sudo docker compose run --rm api funkwhale-manage fw users create --username <username> --email <user email> -p ""
docker compose run --rm api funkwhale-manage fw users create --username <username> --email <user email> -p ""
```
:::
@ -73,7 +73,7 @@ venv/bin/funkwhale-manage fw users create --username <username> --email <user em
```{code-block} sh
export FUNKWHALE_CLI_USER_PASSWORD=<password>
sudo docker compose run --rm api funkwhale-manage fw users create --username <username> --email <user email>
docker compose run --rm api funkwhale-manage fw users create --username <username> --email <user email>
```
:::
@ -96,7 +96,7 @@ venv/bin/funkwhale-manage fw users --help
:sync: docker
```{code-block} sh
sudo docker compose run --rm api funkwhale-manage fw users --help
docker compose run --rm api funkwhale-manage fw users --help
```
:::
@ -123,7 +123,7 @@ venv/bin/funkwhale-manage fw users set --upload-quota 500 <user>
:sync: docker
```{code-block} sh
sudo docker compose run --rm api funkwhale-manage fw users set --upload-quota 500 <user>
docker compose run --rm api funkwhale-manage fw users set --upload-quota 500 <user>
```
:::
@ -146,7 +146,7 @@ venv/bin/funkwhale-manage fw users set --staff --superuser <user 1> <user 2>
:sync: docker
```{code-block} sh
sudo docker compose run --rm api funkwhale-manage fw users set --staff --superuser <user 1> <user 2>
docker compose run --rm api funkwhale-manage fw users set --staff --superuser <user 1> <user 2>
```
:::
@ -169,7 +169,7 @@ venv/bin/funkwhale-manage fw users set --no-staff --no-superuser <user>
:sync: docker
```{code-block} sh
sudo docker compose run --rm api funkwhale-manage fw users set --no-staff --no-superuser <user>
docker compose run --rm api funkwhale-manage fw users set --no-staff --no-superuser <user>
```
:::
@ -192,7 +192,7 @@ venv/bin/funkwhale-manage fw users set --permission-moderation <user>
:sync: docker
```{code-block} sh
sudo docker compose run --rm api funkwhale-manage fw users set --permission-moderation <user>
docker compose run --rm api funkwhale-manage fw users set --permission-moderation <user>
```
:::
@ -215,7 +215,7 @@ venv/bin/funkwhale-manage fw users set --password "<password>" <user>
:sync: docker
```{code-block} sh
sudo docker compose run --rm api funkwhale-manage fw users set --password "<password>" <user>
docker compose run --rm api funkwhale-manage fw users set --password "<password>" <user>
```
:::
@ -240,7 +240,7 @@ venv/bin/funkwhale-manage fw users set <user>
```{code-block} sh
export FUNKWHALE_CLI_USER_UPDATE_PASSWORD=<password>
sudo docker compose run --rm api funkwhale-manage fw users set <user>
docker compose run --rm api funkwhale-manage fw users set <user>
```
:::
@ -263,7 +263,7 @@ venv/bin/funkwhale-manage fw users set --help
:sync: docker
```{code-block} sh
sudo docker compose run --rm api funkwhale-manage fw users set --help
docker compose run --rm api funkwhale-manage fw users set --help
```
:::
@ -290,7 +290,7 @@ venv/bin/funkwhale-manage fw users rm <user>
:sync: docker
```{code-block} py
sudo docker compose run --rm api funkwhale-manage fw users rm <user>
docker compose run --rm api funkwhale-manage fw users rm <user>
```
:::
@ -315,7 +315,7 @@ venv/bin/funkwhale-manage fw users rm --hard <user>
:sync: docker
```{code-block} py
sudo docker compose run --rm api funkwhale-manage fw users rm --hard <user>
docker compose run --rm api funkwhale-manage fw users rm --hard <user>
```
:::
@ -338,7 +338,7 @@ venv/bin/funkwhale-manage fw users rm --help
:sync: docker
```{code-block} sh
sudo docker compose run --rm api funkwhale-manage fw users rm --help
docker compose run --rm api funkwhale-manage fw users rm --help
```
:::

View File

@ -35,7 +35,7 @@ Follow this guide to migrate a mono-container installation to a multi-container
1. Create a backup of your Funkwhale database. We will import this into the new postgres container later.
```{code-block} sh
sudo docker compose exec funkwhale /usr/bin/pg_dumpall -U funkwhale > db_dump.sql
docker compose exec funkwhale /usr/bin/pg_dumpall -U funkwhale > db_dump.sql
```
## Stop your Funkwhale instance
@ -43,7 +43,7 @@ Follow this guide to migrate a mono-container installation to a multi-container
1. Stop all Funkwhale services. This ensures that no data is changed while you migrate your instance.
```{code-block} sh
sudo docker compose down
docker compose down
```
## Prepare the multi-container setup

View File

@ -47,7 +47,7 @@ journalctl -xn -u funkwhale-server
:sync: docker
```{code-block} sh
sudo docker compose logs -f --tail=50 api # Follow the last 50 messages
docker compose logs -f --tail=50 api # Follow the last 50 messages
```
:::
@ -70,7 +70,7 @@ journalctl -xn -u funkwhale-worker
:sync: docker
```{code-block} sh
sudo docker compose logs -f --tail=50 celery # Follow the last 50 messages
docker compose logs -f --tail=50 celery # Follow the last 50 messages
```
:::
@ -150,7 +150,7 @@ If your Funkwhale server uses more memory than expected, you can check the footp
:sync: docker
```{code-block} sh
sudo docker compose restart
docker compose restart
```
:::
@ -189,7 +189,7 @@ To disable memory tracing:
:sync: docker
```{code-block} sh
sudo docker compose restart
docker compose restart
```
:::

View File

@ -24,7 +24,7 @@ Before you remove any data, you need to stop the Funkwhale containers.
2. Stop the containers
```{code-block} sh
sudo docker compose down
docker compose down
```
## Remove the reverse proxy
@ -80,7 +80,7 @@ This action is __irreversible__. Make sure you have [backed up your data](../upg
Once you have stopped the containers, you can delete all containers and associated volumes.
```{code-block} sh
sudo docker compose rm -fsv
docker compose rm -fsv
```
## Remove the Funkwhale directory

View File

@ -21,13 +21,13 @@ Before performing big changes, we recommend you back up your database and media
1. Stop the running containers:
```console
$ sudo docker compose down
$ docker compose down
```
2. Dump the database to a backup file:
```console
$ sudo docker compose run --rm postgres pg_dump -U postgres postgres > dump_`date +%d-%m-%Y"_"%H_%M_%S`.sql
$ docker compose run --rm postgres pg_dump -U postgres postgres > dump_`date +%d-%m-%Y"_"%H_%M_%S`.sql
```
:::
@ -133,19 +133,19 @@ To restore your database, do the following:
1. Restore your database backup.
```console
$ sudo docker compose run --rm -T postgres psql -U postgres postgres < "/path/to/your/backup/dump.sql"
$ docker compose run --rm -T postgres psql -U postgres postgres < "/path/to/your/backup/dump.sql"
```
2. Run the `funkwhale-manage migrate` command to set up the database.
```console
$ sudo docker compose run --rm api funkwhale-manage migrate
$ docker compose run --rm api funkwhale-manage migrate
```
3. Restart the services.
```console
$ sudo docker compose up -d
$ docker compose up -d
```
:::

View File

@ -36,7 +36,7 @@ Try to keep your tests small and focused. Each test should test a single functio
Test files must target a module and follow the `funkwhale_api` directory structure. If you write tests for `funkwhale_api/myapp/views.py`, you should put them in `tests/myapp/test_views.py`.
```
We provide utilities and fixtures to make writing tests as easy as possible. You can see the list of available fixtures by running `sudo docker compose -f dev.yml run --rm api pytest --fixtures`.
We provide utilities and fixtures to make writing tests as easy as possible. You can see the list of available fixtures by running `docker compose run --rm api pytest --fixtures`.
### Factories
@ -134,17 +134,17 @@ def test_downgrade_not_superuser_skips_email(factories, mocker):
You can run all tests in the pytest suite with the following command:
```sh
sudo docker compose -f dev.yml run --rm api pytest
docker compose run --rm api pytest
```
Run a specific test file by calling pytest against it:
```sh
sudo docker compose -f dev.yml run --rm api pytest tests/music/test_models.py
docker compose run --rm api pytest tests/music/test_models.py
```
You can check the full list of options by passing the `-h` flag:
```sh
sudo docker compose -f dev.yml run --rm api pytest -h
docker compose run --rm api pytest -h
```

View File

@ -26,11 +26,11 @@ The Funkwhale frontend contains some tests to catch errors before changes go liv
To run the test suite, run the following command:
```sh
sudo docker compose -f dev.yml run --rm front yarn test:unit
docker compose run --rm front yarn test:unit
```
To run tests as you make changes, launch the test suite with the `-w` flag:
```sh
sudo docker compose -f dev.yml run --rm front yarn test:unit -w
docker compose run --rm front yarn test:unit -w
```

View File

@ -40,7 +40,7 @@ You can install third-party plugins using the `funkwhale-manage` command line in
:::{tab-item} Docker
```{code-block} shell
sudo docker compose run --rm api funkwhale-manage fw plugins install https://plugin_url.zip
docker compose run --rm api funkwhale-manage fw plugins install https://plugin_url.zip
```
:::

View File

@ -2,9 +2,13 @@
Funkwhale can be run in Docker containers for local development. You can work on any part of the Funkwhale codebase and run the container setup to test your changes. To work with Docker:
## Prerequisites
1. [Install Docker](https://docs.docker.com/install)
2. [Install docker compose](https://docs.docker.com/compose/install)
3. Clone the Funkwhale repository to your system. The `develop` branch is checked out by default
2. [Install Docker Compose](https://docs.docker.com/compose/install)
3. [Install mkcert](https://github.com/FiloSottile/mkcert#installation)
4. [Install pre-commit](https://pre-commit.com/#install)
5. Clone the Funkwhale repository to your system. The `develop` branch is checked out by default.
::::{tab-set}
@ -28,167 +32,373 @@ Funkwhale can be run in Docker containers for local development. You can work on
::::
## Set up your Docker environment
6. Activate the pre-commit hook:
```sh
pre-commit install
```
7. Finally, initialise the environment:
```sh
cp .env.example .env
```
````{note}
## Set up the Docker environment
Funkwhale provides a `dev.yml` file that contains the required docker compose setup. You need to pass the `-f dev.yml` flag you run docker compose commands to ensure it uses this file. If you don't want to add this each time, you can export it as a `COMPOSE_FILE` variable:
```sh
export COMPOSE_FILE=dev.yml
```{note}
Funkwhale provides a `compose.yml` file following the default file naming convention of a Docker Compose manifest. For Linux users, we assume that you finished the post-install steps to [manage Docker as a non-root user](https://docs.docker.com/engine/install/linux-postinstall/#manage-docker-as-a-non-root-user).
```
````
To set up your Docker environment:
1. Create a `.env` file to enable customization of your setup.
1. Create a network for federation support via the web proxy:
```sh
docker network create web
```
2. Then build the application containers. Run this command any time there are upstream changes or dependency changes to ensure you're up-to-date.
```sh
touch .env
docker compose build
```
2. Add the following variables to load images and enable access to Django admin pages:
## Set up auxiliary services
```text
MEDIA_URL=http://localhost:8000/media/
STATIC_URL=http://localhost:8000/staticfiles/
To support ActivityPub in the local development environment, we use a
combination of auxiliary services that provide DNS-based discovery, local email delivery and web/TLS termination. This also has the benefit that we can talk to
our development instance(s) with using regular domain names.
The needed certificate is generated and installed to system and
browser with `mkcert`. Dynamic DNS resolution of local domain names in the
`funkwhale.test` zone is provided by dnsmasq. Proxying secure web traffic
between the containers and between the host and the containers is provided by
Træfik.
The services bind to the following ports on the default Docker bridge network:
| Service | Description | Protocol | Port(s) |
| ---------------------------------------------------- | ------------------------------------- | -------- | --------------- |
| [dnsmasq](https://dnsmasq.org/docs/dnsmasq-man.html) | Name server and recursive resolver | DNS | 172.17.0.1:53 |
| [Træfik v2](https://doc.traefik.io/traefik/v2.11/) | TLS offloader and web proxy | HTTP | 172.17.0.1:80 |
| &nbsp; | &nbsp; | HTTPS | 172.17.0.1:443 |
| &nbsp; | API dashboard | HTTP | 172.17.0.1:8008 |
| [Mailpit](https://mailpit.axllent.org/docs/) | Mail-delivery agent (MDA), Nullmailer | SMTP | 172.17.0.1:1025 |
| &nbsp; | &nbsp; | HTTP | 172.17.0.1:8025 |
1. Create a wildcard certificate for the Common Name (CN) `funkwhale.test` and
the Subject Alternative Name (SAN) `*.funkwhale.test` which will be
installed into your system and browser trust stores with:
```sh
mkcert -install -cert-file compose/var/test.crt -key-file compose/var/test.key "funkwhale.test" "*.funkwhale.test"
```
It will be used by Træefik to secure connections, which is needed for
ActivityPub to work locally.
3. Create a network for federation support
Then run the network services used for convenient access to application
containers.
2. Launch the Træfik web proxy, the dnsmasq resolver and the nullmailer using
the `net` manifest:
```sh
sudo docker network create federation
docker compose -f compose.net.yml up -d
```
Once you've set everything up, you need to build the containers. Run this command any time there are upstream changes or dependency changes to ensure you're up-to-date.
<details><summary>Manage the networking services with regular <a href="https://docs.docker.com/reference/cli/docker/compose/" target="_blank">Compose life cycle commands</a>.</summary>
:::{hint}
```sh
docker compose -f compose.net.yml config
docker compose -f compose.net.yml ps
docker compose -f compose.net.yml stop
docker compose -f compose.net.yml rm
docker compose -f compose.net.yml down
docker compose -f compose.net.yml …
```
:::
</details>
3. Add the DNS search domain for `~funkwhale.test` to your system. This allows your system to dereference our domain names `funkwhale.funkwhale.test`, `node1.funkwhale.test`, `node2.…`, `…` to the IP address of the Træfik reverse proxy listening at `172.17.0.1`.
::::{tab-set}
:::{tab-item} Linux
Considering your Linux uses systemd-resolved for local DNS resolution, apply:
```sh
sudo resolvectl dns docker0 172.17.0.1
sudo resolvectl domain docker0 '~funkwhale.test.'
```
This is a temporary setting that will be lost after a reboot.
A superuser of the system can persist this setting by providing a
systemd service that `BindsTo=` the `docker0` device. This requires `sudo`
privilege.
```sh
sudo sh -c "umask 133; tee /etc/systemd/system/funkwhale-dns-docker0.service" <<< "[Unit]
Description=Funkwhale per-link DNS configuration for docker0
BindsTo=sys-subsystem-net-devices-docker0.device
After=sys-subsystem-net-devices-docker0.device
[Service]
Type=oneshot
ExecStart=/usr/bin/resolvectl dns docker0 172.17.0.1
ExecStart=/usr/bin/resolvectl domain docker0 '~funkwhale.test.'
ExecStopPost=/usr/bin/resolvectl revert docker0
RemainAfterExit=yes
[Install]
WantedBy=sys-subsystem-net-devices-docker0.device
"
sudo systemctl daemon-reload
sudo systemctl enable --now funkwhale-dns-docker0.service
```
This gives you a systemd unit, whose life cycle is bound to the `docker0`
network device.
```sh
systemctl status \
funkwhale-dns-docker0.service \
sys-subsystem-net-devices-docker0.device
```
Please refer to the manual of your distribution for other configurations,
e.g. with system installations of netplan, systemd-networkd, NetworkManager, resolvconf or dnsmasq. Ensure the search domain is set to `funkwhale.test.`
and the nameserver address is set to `172.17.0.1`.
:::
:::{tab-item} Mac
To set up `172.17.0.1` as the search domain for the `funkwhale.test` zone on
your mac OS system, please follow the instructions at
[macOS: Using Custom DNS Resolvers](https://vninja.net/2020/02/06/macos-custom-dns-resolvers/).
For [Docker on Mac](https://docs.docker.com/desktop/install/mac-install/) you
will also need to install and use [recap/docker-mac-routes](https://github.com/recap/docker-mac-routes)
each time the Docker VM is restarted.
For [OrbStack]() make sure:
- to configure the
[Container IP ranges](https://docs.orbstack.dev/docker/network#container-ip-ranges)
of the Docker daemon to resemble the default Docker configuration. This
helps to recreate the expected environment for DNS + HTTPS networking.
E.g.:
```json
{
"bip": "172.17.0.1/23",
"default-address-pools": [
{ "base": "172.17.0.0/19", "size": 23 },
{ "base": "172.18.0.0/19", "size": 23 }
]
}
```
- to disable its
[HTTPS for containers](https://docs.orbstack.dev/features/https)
function, as we are supplying our own Træfik instance.
:::
::::
:::{hint}
You can now reach your Træfik dashboard at
[http://172.17.0.1:8008/dashboard/](http://172.17.0.1:8008/dashboard/). The DNS
server will answer requests to `172.17.0.1:53`. The SMTP MDA listens on
`172.17.0.1:1025` and has a web interface at
[http://172.17.0.1:8025/](http://172.17.0.1:8025/)
When all works as expected, you can also access
[https://traefik.funkwhale.test/dashboard/](https://traefik.funkwhale.test/dashboard/)
and [https://mailpit.funkwhale.test/](https://mailpit.funkwhale.test/).
:::
:::{note}
If your `docker0` network has running containers not belonging to Funkwhale
already attached to it, comment out the `net.helpers.docker0.yml` rule in
`compose.net.yml`. Then restart the networking stack with
`docker compose -f compose.net.yml up -d`.
:::
## Set up application services
Once you have set up the dependencies, launch all services to start developing:
```sh
sudo docker compose -f dev.yml build
docker compose up -d
```
## Set up the database
Funkwhale relies on a postgresql database to store information. To set this up, you need to run the `funkwhale-manage migrate` command:
```sh
sudo docker compose -f dev.yml run --rm api funkwhale-manage migrate
```
This command creates all the required tables. You need to run this whenever there are changes to the API schema. You can run this at any time without causing issues.
## Set up local data
You need to create some local data to mimic a production environment.
1. Create a superuser so you can log in to your local app:
```sh
sudo docker compose -f dev.yml run --rm api funkwhale-manage fw users create --superuser
```
2. Add some fake data to populate the database. The following command creates 25 artists with random albums, tracks, and metadata.
```sh
artists=25 # Adds 25 fake artists
command="from funkwhale_api.music import fake_data; fake_data.create_data($artists)"
echo $command | sudo docker compose -f dev.yml run --rm -T api funkwhale-manage shell -i python
```
## Manage services
Once you have set up your containers, launch all services to start working on them:
```sh
sudo docker compose -f dev.yml up front api nginx celeryworker
```
Find instructions for [starting multiple instances for federation](#running-multiple-instances) further below.
:::{tip}
This gives you access to the following:
- The Funkwhale webapp on `http://localhost:8000`
- The Funkwhale API on `http://localhost:8000/api/v1`
- The Django admin interface on `http://localhost:8000/api/admin`
- The Funkwhale web app on [https://funkwhale.funkwhale.test](https://funkwhale.funkwhale.test)
- The Funkwhale API on [https://funkwhale.funkwhale.test/api/v1](https://funkwhale.funkwhale.test/api/v1)
- The Django admin interface on [https://funkwhale.funkwhale.test/api/admin](https://funkwhale.funkwhale.test/api/admin)
:::
Create a local superuser to be able to login and to manage the service:
```sh
docker compose run --rm api funkwhale-manage fw users create --superuser
```
Review the configuration:
```sh
docker compose config
```
### Lifecycle
Recycle individual containers:
```sh
docker compose rm -sf api celeryworker; docker compose up -d api celeryworker
```
Once you're done with the containers, you can stop them all:
```sh
sudo docker compose -f dev.yml stop
docker compose stop
```
If you want to destroy your containers, run the following:
```sh
sudo docker compose -f dev.yml down -v
docker compose down
```
## Set up federation support
Destroy all state of your containers:
Working on federation features requires some additional setup. You need to do the following:
```sh
docker compose down --volumes
```
1. Update your DNS resolver to resolve all your .dev hostnames locally
2. Set up a reverse proxy (such as traefik) to catch .dev requests with a TLS certificate
3. Set up two or more local instances
Remove all state of all Funkwhale-related containers, incl. from additional
instances:
To resolve hostnames locally, run the following:
```sh
rm -rf .state/
```
### Running multiple instances
Set up as many different projects as you need. Make sure the
`COMPOSE_PROJECT_NAME` and `VUE_PORT` variables are unique per instance.
```sh
export COMPOSE_PROJECT_NAME=node2
# VUE_PORT this has to be unique for each instance
export VUE_PORT=1234
docker compose run --rm api funkwhale-manage fw users create --superuser
docker compose up -d
```
You can access your project at `https://{COMPOSE_PROJECT_NAME}.funkwhale.test`.
:::{admonition} Running and accessing multiple instances in parallel
:class: hint
You may as well address the different Compose projects by using ad hoc
environment variables:
```
COMPOSE_PROJECT_NAME=node1 VUE_PORT=1234 docker compose run --rm api funkwhale-manage fw users create --superuser
COMPOSE_PROJECT_NAME=node1 VUE_PORT=1234 docker compose up -d
```
The `node1` instance will be available at [https://node1.funkwhale.test](https://node1.funkwhale.test).
```
COMPOSE_PROJECT_NAME=node2 VUE_PORT=1235 docker compose run --rm api funkwhale-manage fw users create --superuser
COMPOSE_PROJECT_NAME=node2 VUE_PORT=1235 docker compose up -d
```
The `node2` instance will be available at [https://node2.funkwhale.test](https://node2.funkwhale.test).
Proceed freely with different sets of values for `COMPOSE_PROJECT_NAME` and
`VUE_PORT`.
:::
::::{tab-set}
:::{tab-item} dnsmasq
:::{tab-item} Stop everything
In case you wanted to stop everything after a day's work, you can remove all
running containers:
```sh
echo "address=/test/172.17.0.1" | sudo tee /etc/dnsmasq.d/test.conf
sudo systemctl restart dnsmasq
docker compose -f compose.docs.yml rm -sf
docker compose -f compose.net.yml rm -sf
docker compose rm -sf
```
Repeat these steps for every additional instance:
```sh
COMPOSE_PROJECT_NAME=node1 docker compose rm -sf
COMPOSE_PROJECT_NAME=node2 docker compose rm -sf
COMPOSE_PROJECT_NAME=… docker compose rm -sf
```
:::
:::{tab-item} NetworkManager
:::{tab-item} Discover projects and containers
List all currently running Compose projects to get an overview:
```sh
echo "address=/test/172.17.0.1" | sudo tee /etc/NetworkManager/dnsmasq.d/test.conf
sudo systemctl restart NetworkManager
docker compose ls
```
Show all projects for which containers exist, including stopped ones:
```sh
docker compose ls -a
```
Ultimately Docker gives you an overview what is running:
```sh
docker ps
```
And also which containers are not running, but existing:
```sh
docker ps -a
```
Refer to the [Docker CLI documentation](https://docs.docker.com/reference/cli/docker/)
to learn how else you may interact directly with containers, when needed.
:::
::::
To add a wildcard certificate, copy the test certificate from the `docker/ssl` folder. This certificate is a wildcard for `*.funkwhale.test`
## Local documentation
To build the documentation locally run:
```sh
sudo cp docker/ssl/test.crt /usr/local/share/ca-certificates/
sudo update-ca-certificates
docker compose -f compose.docs.yml up -d
```
To run a reverse proxy for your app:
The documentation is then accessible at [https://docs.funkwhale.test](https://docs.funkwhale.test). The OpenAPI schema is available at [https://openapi.funkwhale.test](https://openapi.funkwhale.test).
1. Add the following configuration to your `.env` file:
Fallback ports are available for the documentation at
[http://localhost:8001/](http://localhost:8001/) and for the OpenAPI schema at
[http://localhost:8002/](http://localhost:8002/).
```text
# Remove any port binding so you can specify this per-instance
VUE_PORT_BINDING=
# Disable certificate validation
EXTERNAL_REQUESTS_VERIFY_SSL=false
# Ensure all links use https
FUNKWHALE_PROTOCOL=https
# Disable host ports binding for the nginx container so that traefik handles everything
NGINX_PORTS_MAPPING=80
```
2. Launch traefik using the bundled configuration:
```sh
sudo docker compose -f docker/traefik.yml up -d
```
3. Set up as many different projects as you need. Make sure the `COMPOSE_PROJECT_NAME` and `VUE_PORT` variables are unique per instance
```sh
export COMPOSE_PROJECT_NAME=node2
export VUE_PORT=1234 # this has to be unique for each instance
sudo docker compose -f dev.yml run --rm api funkwhale-manage migrate
sudo docker compose -f dev.yml run --rm api funkwhale-manage fw users create --superuser
sudo docker compose -f dev.yml up nginx api front nginx api celeryworker
```
You can access your project at `https://{COMPOSE_PROJECT_NAME}.funkwhale.test`.
Maintain their life cycle with similar commands to those used to
[set up auxiliary services (point 2.)](#set-up-auxiliary-services).

View File

@ -9171,10 +9171,10 @@ msgstr ""
#~ "using-the-repository>`_"
#~ msgstr ""
#~ msgid "```sh sudo docker-compose down ```"
#~ msgid "```docker compose down ```"
#~ msgstr ""
#~ msgid "```sh sudo docker compose up -d ```"
#~ msgid "```docker compose up -d ```"
#~ msgstr ""
#~ msgid ""
@ -10079,4 +10079,3 @@ msgstr ""
#~ "shortcut to toggle the currently playing"
#~ " track as a favorite (#53)"
#~ msgstr ""

View File

@ -144,7 +144,7 @@ msgstr ""
msgid ""
"We provide utilities and fixtures to make writing tests as easy as "
"possible. You can see the list of available fixtures by running `sudo "
"docker compose -f dev.yml run --rm api pytest --fixtures`."
"docker compose run --rm api pytest --fixtures`."
msgstr ""
#: ../../developer_documentation/contribute/api.md:41
@ -183,4 +183,3 @@ msgstr ""
#: ../../developer_documentation/contribute/api.md:146
msgid "You can check the full list of options by passing the `-h` flag:"
msgstr ""

View File

@ -9171,10 +9171,10 @@ msgstr ""
#~ "using-the-repository>`_"
#~ msgstr ""
#~ msgid "```sh sudo docker-compose down ```"
#~ msgid "```docker compose down ```"
#~ msgstr ""
#~ msgid "```sh sudo docker compose up -d ```"
#~ msgid "```docker compose up -d ```"
#~ msgstr ""
#~ msgid ""
@ -10079,4 +10079,3 @@ msgstr ""
#~ "shortcut to toggle the currently playing"
#~ " track as a favorite (#53)"
#~ msgstr ""

View File

@ -144,7 +144,7 @@ msgstr ""
msgid ""
"We provide utilities and fixtures to make writing tests as easy as "
"possible. You can see the list of available fixtures by running `sudo "
"docker compose -f dev.yml run --rm api pytest --fixtures`."
"docker compose run --rm api pytest --fixtures`."
msgstr ""
#: ../../developer_documentation/contribute/api.md:41
@ -183,4 +183,3 @@ msgstr ""
#: ../../developer_documentation/contribute/api.md:146
msgid "You can check the full list of options by passing the `-h` flag:"
msgstr ""

View File

@ -9178,10 +9178,10 @@ msgstr ""
#~ "using-the-repository>`_"
#~ msgstr ""
#~ msgid "```sh sudo docker-compose down ```"
#~ msgid "```docker compose down ```"
#~ msgstr ""
#~ msgid "```sh sudo docker compose up -d ```"
#~ msgid "```docker compose up -d ```"
#~ msgstr ""
#~ msgid ""

View File

@ -144,7 +144,7 @@ msgstr ""
msgid ""
"We provide utilities and fixtures to make writing tests as easy as "
"possible. You can see the list of available fixtures by running `sudo "
"docker compose -f dev.yml run --rm api pytest --fixtures`."
"docker compose run --rm api pytest --fixtures`."
msgstr ""
#: ../../developer_documentation/contribute/api.md:41
@ -183,4 +183,3 @@ msgstr ""
#: ../../developer_documentation/contribute/api.md:146
msgid "You can check the full list of options by passing the `-h` flag:"
msgstr ""

View File

@ -109,7 +109,7 @@ msgid "Test files must target a module and follow the `funkwhale_api` directory
msgstr ""
#: ../../developer/contribute/api.md:39
msgid "We provide utilities and fixtures to make writing tests as easy as possible. You can see the list of available fixtures by running `sudo docker compose -f dev.yml run --rm api pytest --fixtures`."
msgid "We provide utilities and fixtures to make writing tests as easy as possible. You can see the list of available fixtures by running `docker compose run --rm api pytest --fixtures`."
msgstr ""
#: ../../developer/contribute/api.md:41

344
docs/poetry.lock generated
View File

@ -1,4 +1,4 @@
# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand.
# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand.
[[package]]
name = "alabaster"
@ -13,13 +13,13 @@ files = [
[[package]]
name = "asgiref"
version = "3.7.2"
version = "3.8.1"
description = "ASGI specs, helper code, and adapters"
optional = false
python-versions = ">=3.7"
python-versions = ">=3.8"
files = [
{file = "asgiref-3.7.2-py3-none-any.whl", hash = "sha256:89b2ef2247e3b562a16eef663bc0e2e703ec6468e2fa8a5cd61cd449786d4f6e"},
{file = "asgiref-3.7.2.tar.gz", hash = "sha256:9e0ce3aa93a819ba5b45120216b23878cf6e8525eb3848653452b4192b92afed"},
{file = "asgiref-3.8.1-py3-none-any.whl", hash = "sha256:3e1e3ecc849832fe52ccf2cb6686b7a55f82bb1d6aee72a58826471390335e47"},
{file = "asgiref-3.8.1.tar.gz", hash = "sha256:c343bd80a0bec947a9860adb4c432ffa7db769836c64238fc34bdc3fec84d590"},
]
[package.dependencies]
@ -30,13 +30,13 @@ tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"]
[[package]]
name = "babel"
version = "2.14.0"
version = "2.16.0"
description = "Internationalization utilities"
optional = false
python-versions = ">=3.7"
python-versions = ">=3.8"
files = [
{file = "Babel-2.14.0-py3-none-any.whl", hash = "sha256:efb1a25b7118e67ce3a259bed20545c29cb68be8ad2c784c83689981b7a57287"},
{file = "Babel-2.14.0.tar.gz", hash = "sha256:6919867db036398ba21eb5c7a0f6b28ab8cbc3ae7a73a44ebe34ae74a4e7d363"},
{file = "babel-2.16.0-py3-none-any.whl", hash = "sha256:368b5b98b37c06b7daf6696391c3240c938b37767d4584413e8438c5c435fa8b"},
{file = "babel-2.16.0.tar.gz", hash = "sha256:d1f3554ca26605fe173f3de0c65f750f5a42f924499bf134de6423582298e316"},
]
[package.extras]
@ -44,13 +44,13 @@ dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"]
[[package]]
name = "certifi"
version = "2024.2.2"
version = "2024.8.30"
description = "Python package for providing Mozilla's CA Bundle."
optional = false
python-versions = ">=3.6"
files = [
{file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"},
{file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"},
{file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"},
{file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"},
]
[[package]]
@ -226,15 +226,18 @@ files = [
[[package]]
name = "idna"
version = "3.6"
version = "3.10"
description = "Internationalized Domain Names in Applications (IDNA)"
optional = false
python-versions = ">=3.5"
python-versions = ">=3.6"
files = [
{file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"},
{file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"},
{file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"},
{file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"},
]
[package.extras]
all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"]
[[package]]
name = "imagesize"
version = "1.4.1"
@ -248,32 +251,36 @@ files = [
[[package]]
name = "importlib-metadata"
version = "7.0.1"
version = "8.5.0"
description = "Read metadata from Python packages"
optional = false
python-versions = ">=3.8"
files = [
{file = "importlib_metadata-7.0.1-py3-none-any.whl", hash = "sha256:4805911c3a4ec7c3966410053e9ec6a1fecd629117df5adee56dfc9432a1081e"},
{file = "importlib_metadata-7.0.1.tar.gz", hash = "sha256:f238736bb06590ae52ac1fab06a3a9ef1d8dce2b7a35b5ab329371d6c8f5d2cc"},
{file = "importlib_metadata-8.5.0-py3-none-any.whl", hash = "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b"},
{file = "importlib_metadata-8.5.0.tar.gz", hash = "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7"},
]
[package.dependencies]
zipp = ">=0.5"
zipp = ">=3.20"
[package.extras]
docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"]
check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"]
cover = ["pytest-cov"]
doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
enabler = ["pytest-enabler (>=2.2)"]
perf = ["ipython"]
testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"]
test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"]
type = ["pytest-mypy"]
[[package]]
name = "jinja2"
version = "3.1.3"
version = "3.1.4"
description = "A very fast and expressive template engine."
optional = false
python-versions = ">=3.7"
files = [
{file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"},
{file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"},
{file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"},
{file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"},
]
[package.dependencies]
@ -284,18 +291,17 @@ i18n = ["Babel (>=2.7)"]
[[package]]
name = "livereload"
version = "2.6.3"
version = "2.7.0"
description = "Python LiveReload is an awesome tool for web developers"
optional = false
python-versions = "*"
python-versions = ">=3.7"
files = [
{file = "livereload-2.6.3-py2.py3-none-any.whl", hash = "sha256:ad4ac6f53b2d62bb6ce1a5e6e96f1f00976a32348afedcb4b6d68df2a1d346e4"},
{file = "livereload-2.6.3.tar.gz", hash = "sha256:776f2f865e59fde56490a56bcc6773b6917366bce0c267c60ee8aaf1a0959869"},
{file = "livereload-2.7.0-py3-none-any.whl", hash = "sha256:19bee55aff51d5ade6ede0dc709189a0f904d3b906d3ea71641ed548acff3246"},
{file = "livereload-2.7.0.tar.gz", hash = "sha256:f4ba199ef93248902841e298670eebfe1aa9e148e19b343bc57dbf1b74de0513"},
]
[package.dependencies]
six = "*"
tornado = {version = "*", markers = "python_version > \"2.7\""}
tornado = "*"
[[package]]
name = "markdown-it-py"
@ -392,13 +398,13 @@ files = [
[[package]]
name = "mdit-py-plugins"
version = "0.4.0"
version = "0.4.2"
description = "Collection of plugins for markdown-it-py"
optional = false
python-versions = ">=3.8"
files = [
{file = "mdit_py_plugins-0.4.0-py3-none-any.whl", hash = "sha256:b51b3bb70691f57f974e257e367107857a93b36f322a9e6d44ca5bf28ec2def9"},
{file = "mdit_py_plugins-0.4.0.tar.gz", hash = "sha256:d8ab27e9aed6c38aa716819fedfde15ca275715955f8a185a8e1cf90fb1d2c1b"},
{file = "mdit_py_plugins-0.4.2-py3-none-any.whl", hash = "sha256:0c673c3f889399a33b95e88d2f0d111b4447bdfea7f237dab2d488f459835636"},
{file = "mdit_py_plugins-0.4.2.tar.gz", hash = "sha256:5f2cd1fdb606ddf152d37ec30e46101a60512bc0e5fa1a7002c36647b09e26b5"},
]
[package.dependencies]
@ -448,109 +454,111 @@ testing-docutils = ["pygments", "pytest (>=7,<8)", "pytest-param-files (>=0.3.4,
[[package]]
name = "packaging"
version = "23.2"
version = "24.1"
description = "Core utilities for Python packages"
optional = false
python-versions = ">=3.7"
python-versions = ">=3.8"
files = [
{file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"},
{file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"},
{file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"},
{file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"},
]
[[package]]
name = "pygments"
version = "2.17.2"
version = "2.18.0"
description = "Pygments is a syntax highlighting package written in Python."
optional = false
python-versions = ">=3.7"
python-versions = ">=3.8"
files = [
{file = "pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c"},
{file = "pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367"},
{file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"},
{file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"},
]
[package.extras]
plugins = ["importlib-metadata"]
windows-terminal = ["colorama (>=0.4.6)"]
[[package]]
name = "pytz"
version = "2024.1"
version = "2024.2"
description = "World timezone definitions, modern and historical"
optional = false
python-versions = "*"
files = [
{file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"},
{file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"},
{file = "pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725"},
{file = "pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a"},
]
[[package]]
name = "pyyaml"
version = "6.0.1"
version = "6.0.2"
description = "YAML parser and emitter for Python"
optional = false
python-versions = ">=3.6"
python-versions = ">=3.8"
files = [
{file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"},
{file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"},
{file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"},
{file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"},
{file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"},
{file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"},
{file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"},
{file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"},
{file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"},
{file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"},
{file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"},
{file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"},
{file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"},
{file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"},
{file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"},
{file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"},
{file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"},
{file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"},
{file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"},
{file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"},
{file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"},
{file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"},
{file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"},
{file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"},
{file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"},
{file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"},
{file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"},
{file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"},
{file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"},
{file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"},
{file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"},
{file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"},
{file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"},
{file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"},
{file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"},
{file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"},
{file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"},
{file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"},
{file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"},
{file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"},
{file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"},
{file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"},
{file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"},
{file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"},
{file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"},
{file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"},
{file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"},
{file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"},
{file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"},
{file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"},
{file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"},
{file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"},
{file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"},
{file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"},
{file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"},
{file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"},
{file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"},
{file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"},
{file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"},
{file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"},
{file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"},
{file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"},
{file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"},
{file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"},
{file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"},
{file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"},
{file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"},
{file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"},
{file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"},
{file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"},
{file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"},
{file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"},
{file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"},
{file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"},
{file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"},
{file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"},
{file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"},
{file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"},
{file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"},
{file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"},
{file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"},
{file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"},
{file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"},
{file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"},
{file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"},
{file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"},
{file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"},
{file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"},
{file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"},
{file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"},
{file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"},
{file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"},
{file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"},
{file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"},
{file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"},
{file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"},
{file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"},
{file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"},
{file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"},
{file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"},
{file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"},
{file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"},
{file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"},
]
[[package]]
name = "requests"
version = "2.31.0"
version = "2.32.3"
description = "Python HTTP for Humans."
optional = false
python-versions = ">=3.7"
python-versions = ">=3.8"
files = [
{file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"},
{file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"},
{file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"},
{file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"},
]
[package.dependencies]
@ -565,30 +573,23 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
[[package]]
name = "setuptools"
version = "69.0.3"
version = "75.1.0"
description = "Easily download, build, install, upgrade, and uninstall Python packages"
optional = false
python-versions = ">=3.8"
files = [
{file = "setuptools-69.0.3-py3-none-any.whl", hash = "sha256:385eb4edd9c9d5c17540511303e39a147ce2fc04bc55289c322b9e5904fe2c05"},
{file = "setuptools-69.0.3.tar.gz", hash = "sha256:be1af57fc409f93647f2e8e4573a142ed38724b8cdd389706a867bb4efcf1e78"},
{file = "setuptools-75.1.0-py3-none-any.whl", hash = "sha256:35ab7fd3bcd95e6b7fd704e4a1539513edad446c097797f2985e0e4b960772f2"},
{file = "setuptools-75.1.0.tar.gz", hash = "sha256:d59a21b17a275fb872a9c3dae73963160ae079f1049ed956880cd7c09b120538"},
]
[package.extras]
docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"]
testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"]
testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"]
[[package]]
name = "six"
version = "1.16.0"
description = "Python 2 and 3 compatibility utilities"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
files = [
{file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
{file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
]
check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.5.2)"]
core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.collections", "jaraco.functools", "jaraco.text (>=3.7)", "more-itertools", "more-itertools (>=8.8)", "packaging", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"]
cover = ["pytest-cov"]
doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"]
enabler = ["pytest-enabler (>=2.2)"]
test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"]
type = ["importlib-metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.11.*)", "pytest-mypy"]
[[package]]
name = "snowballstemmer"
@ -737,49 +738,49 @@ dev = ["bump2version", "sphinxcontrib-httpdomain", "transifex-client", "wheel"]
[[package]]
name = "sphinxcontrib-applehelp"
version = "1.0.8"
version = "2.0.0"
description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books"
optional = false
python-versions = ">=3.9"
files = [
{file = "sphinxcontrib_applehelp-1.0.8-py3-none-any.whl", hash = "sha256:cb61eb0ec1b61f349e5cc36b2028e9e7ca765be05e49641c97241274753067b4"},
{file = "sphinxcontrib_applehelp-1.0.8.tar.gz", hash = "sha256:c40a4f96f3776c4393d933412053962fac2b84f4c99a7982ba42e09576a70619"},
{file = "sphinxcontrib_applehelp-2.0.0-py3-none-any.whl", hash = "sha256:4cd3f0ec4ac5dd9c17ec65e9ab272c9b867ea77425228e68ecf08d6b28ddbdb5"},
{file = "sphinxcontrib_applehelp-2.0.0.tar.gz", hash = "sha256:2f29ef331735ce958efa4734873f084941970894c6090408b079c61b2e1c06d1"},
]
[package.extras]
lint = ["docutils-stubs", "flake8", "mypy"]
lint = ["mypy", "ruff (==0.5.5)", "types-docutils"]
standalone = ["Sphinx (>=5)"]
test = ["pytest"]
[[package]]
name = "sphinxcontrib-devhelp"
version = "1.0.6"
version = "2.0.0"
description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp documents"
optional = false
python-versions = ">=3.9"
files = [
{file = "sphinxcontrib_devhelp-1.0.6-py3-none-any.whl", hash = "sha256:6485d09629944511c893fa11355bda18b742b83a2b181f9a009f7e500595c90f"},
{file = "sphinxcontrib_devhelp-1.0.6.tar.gz", hash = "sha256:9893fd3f90506bc4b97bdb977ceb8fbd823989f4316b28c3841ec128544372d3"},
{file = "sphinxcontrib_devhelp-2.0.0-py3-none-any.whl", hash = "sha256:aefb8b83854e4b0998877524d1029fd3e6879210422ee3780459e28a1f03a8a2"},
{file = "sphinxcontrib_devhelp-2.0.0.tar.gz", hash = "sha256:411f5d96d445d1d73bb5d52133377b4248ec79db5c793ce7dbe59e074b4dd1ad"},
]
[package.extras]
lint = ["docutils-stubs", "flake8", "mypy"]
lint = ["mypy", "ruff (==0.5.5)", "types-docutils"]
standalone = ["Sphinx (>=5)"]
test = ["pytest"]
[[package]]
name = "sphinxcontrib-htmlhelp"
version = "2.0.5"
version = "2.1.0"
description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files"
optional = false
python-versions = ">=3.9"
files = [
{file = "sphinxcontrib_htmlhelp-2.0.5-py3-none-any.whl", hash = "sha256:393f04f112b4d2f53d93448d4bce35842f62b307ccdc549ec1585e950bc35e04"},
{file = "sphinxcontrib_htmlhelp-2.0.5.tar.gz", hash = "sha256:0dc87637d5de53dd5eec3a6a01753b1ccf99494bd756aafecd74b4fa9e729015"},
{file = "sphinxcontrib_htmlhelp-2.1.0-py3-none-any.whl", hash = "sha256:166759820b47002d22914d64a075ce08f4c46818e17cfc9470a9786b759b19f8"},
{file = "sphinxcontrib_htmlhelp-2.1.0.tar.gz", hash = "sha256:c9e2916ace8aad64cc13a0d233ee22317f2b9025b9cf3295249fa985cc7082e9"},
]
[package.extras]
lint = ["docutils-stubs", "flake8", "mypy"]
lint = ["mypy", "ruff (==0.5.5)", "types-docutils"]
standalone = ["Sphinx (>=5)"]
test = ["html5lib", "pytest"]
@ -824,92 +825,91 @@ files = [
[[package]]
name = "sphinxcontrib-qthelp"
version = "1.0.7"
version = "2.0.0"
description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp documents"
optional = false
python-versions = ">=3.9"
files = [
{file = "sphinxcontrib_qthelp-1.0.7-py3-none-any.whl", hash = "sha256:e2ae3b5c492d58fcbd73281fbd27e34b8393ec34a073c792642cd8e529288182"},
{file = "sphinxcontrib_qthelp-1.0.7.tar.gz", hash = "sha256:053dedc38823a80a7209a80860b16b722e9e0209e32fea98c90e4e6624588ed6"},
{file = "sphinxcontrib_qthelp-2.0.0-py3-none-any.whl", hash = "sha256:b18a828cdba941ccd6ee8445dbe72ffa3ef8cbe7505d8cd1fa0d42d3f2d5f3eb"},
{file = "sphinxcontrib_qthelp-2.0.0.tar.gz", hash = "sha256:4fe7d0ac8fc171045be623aba3e2a8f613f8682731f9153bb2e40ece16b9bbab"},
]
[package.extras]
lint = ["docutils-stubs", "flake8", "mypy"]
lint = ["mypy", "ruff (==0.5.5)", "types-docutils"]
standalone = ["Sphinx (>=5)"]
test = ["pytest"]
test = ["defusedxml (>=0.7.1)", "pytest"]
[[package]]
name = "sphinxcontrib-serializinghtml"
version = "1.1.10"
version = "2.0.0"
description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)"
optional = false
python-versions = ">=3.9"
files = [
{file = "sphinxcontrib_serializinghtml-1.1.10-py3-none-any.whl", hash = "sha256:326369b8df80a7d2d8d7f99aa5ac577f51ea51556ed974e7716cfd4fca3f6cb7"},
{file = "sphinxcontrib_serializinghtml-1.1.10.tar.gz", hash = "sha256:93f3f5dc458b91b192fe10c397e324f262cf163d79f3282c158e8436a2c4511f"},
{file = "sphinxcontrib_serializinghtml-2.0.0-py3-none-any.whl", hash = "sha256:6e2cb0eef194e10c27ec0023bfeb25badbbb5868244cf5bc5bdc04e4464bf331"},
{file = "sphinxcontrib_serializinghtml-2.0.0.tar.gz", hash = "sha256:e9d912827f872c029017a53f0ef2180b327c3f7fd23c87229f7a8e8b70031d4d"},
]
[package.extras]
lint = ["docutils-stubs", "flake8", "mypy"]
lint = ["mypy", "ruff (==0.5.5)", "types-docutils"]
standalone = ["Sphinx (>=5)"]
test = ["pytest"]
[[package]]
name = "sqlparse"
version = "0.4.4"
version = "0.5.1"
description = "A non-validating SQL parser."
optional = false
python-versions = ">=3.5"
python-versions = ">=3.8"
files = [
{file = "sqlparse-0.4.4-py3-none-any.whl", hash = "sha256:5430a4fe2ac7d0f93e66f1efc6e1338a41884b7ddf2a350cedd20ccc4d9d28f3"},
{file = "sqlparse-0.4.4.tar.gz", hash = "sha256:d446183e84b8349fa3061f0fe7f06ca94ba65b426946ffebe6e3e8295332420c"},
{file = "sqlparse-0.5.1-py3-none-any.whl", hash = "sha256:773dcbf9a5ab44a090f3441e2180efe2560220203dc2f8c0b0fa141e18b505e4"},
{file = "sqlparse-0.5.1.tar.gz", hash = "sha256:bb6b4df465655ef332548e24f08e205afc81b9ab86cb1c45657a7ff173a3a00e"},
]
[package.extras]
dev = ["build", "flake8"]
dev = ["build", "hatch"]
doc = ["sphinx"]
test = ["pytest", "pytest-cov"]
[[package]]
name = "tornado"
version = "6.4"
version = "6.4.1"
description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed."
optional = false
python-versions = ">= 3.8"
python-versions = ">=3.8"
files = [
{file = "tornado-6.4-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:02ccefc7d8211e5a7f9e8bc3f9e5b0ad6262ba2fbb683a6443ecc804e5224ce0"},
{file = "tornado-6.4-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:27787de946a9cffd63ce5814c33f734c627a87072ec7eed71f7fc4417bb16263"},
{file = "tornado-6.4-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7894c581ecdcf91666a0912f18ce5e757213999e183ebfc2c3fdbf4d5bd764e"},
{file = "tornado-6.4-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e43bc2e5370a6a8e413e1e1cd0c91bedc5bd62a74a532371042a18ef19e10579"},
{file = "tornado-6.4-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0251554cdd50b4b44362f73ad5ba7126fc5b2c2895cc62b14a1c2d7ea32f212"},
{file = "tornado-6.4-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:fd03192e287fbd0899dd8f81c6fb9cbbc69194d2074b38f384cb6fa72b80e9c2"},
{file = "tornado-6.4-cp38-abi3-musllinux_1_1_i686.whl", hash = "sha256:88b84956273fbd73420e6d4b8d5ccbe913c65d31351b4c004ae362eba06e1f78"},
{file = "tornado-6.4-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:71ddfc23a0e03ef2df1c1397d859868d158c8276a0603b96cf86892bff58149f"},
{file = "tornado-6.4-cp38-abi3-win32.whl", hash = "sha256:6f8a6c77900f5ae93d8b4ae1196472d0ccc2775cc1dfdc9e7727889145c45052"},
{file = "tornado-6.4-cp38-abi3-win_amd64.whl", hash = "sha256:10aeaa8006333433da48dec9fe417877f8bcc21f48dda8d661ae79da357b2a63"},
{file = "tornado-6.4.tar.gz", hash = "sha256:72291fa6e6bc84e626589f1c29d90a5a6d593ef5ae68052ee2ef000dfd273dee"},
{file = "tornado-6.4.1-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:163b0aafc8e23d8cdc3c9dfb24c5368af84a81e3364745ccb4427669bf84aec8"},
{file = "tornado-6.4.1-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:6d5ce3437e18a2b66fbadb183c1d3364fb03f2be71299e7d10dbeeb69f4b2a14"},
{file = "tornado-6.4.1-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2e20b9113cd7293f164dc46fffb13535266e713cdb87bd2d15ddb336e96cfc4"},
{file = "tornado-6.4.1-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ae50a504a740365267b2a8d1a90c9fbc86b780a39170feca9bcc1787ff80842"},
{file = "tornado-6.4.1-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:613bf4ddf5c7a95509218b149b555621497a6cc0d46ac341b30bd9ec19eac7f3"},
{file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:25486eb223babe3eed4b8aecbac33b37e3dd6d776bc730ca14e1bf93888b979f"},
{file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:454db8a7ecfcf2ff6042dde58404164d969b6f5d58b926da15e6b23817950fc4"},
{file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a02a08cc7a9314b006f653ce40483b9b3c12cda222d6a46d4ac63bb6c9057698"},
{file = "tornado-6.4.1-cp38-abi3-win32.whl", hash = "sha256:d9a566c40b89757c9aa8e6f032bcdb8ca8795d7c1a9762910c722b1635c9de4d"},
{file = "tornado-6.4.1-cp38-abi3-win_amd64.whl", hash = "sha256:b24b8982ed444378d7f21d563f4180a2de31ced9d8d84443907a0a64da2072e7"},
{file = "tornado-6.4.1.tar.gz", hash = "sha256:92d3ab53183d8c50f8204a51e6f91d18a15d5ef261e84d452800d4ff6fc504e9"},
]
[[package]]
name = "typing-extensions"
version = "4.9.0"
version = "4.12.2"
description = "Backported and Experimental Type Hints for Python 3.8+"
optional = false
python-versions = ">=3.8"
files = [
{file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"},
{file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"},
{file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"},
{file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"},
]
[[package]]
name = "urllib3"
version = "2.2.0"
version = "2.2.3"
description = "HTTP library with thread-safe connection pooling, file post, and more."
optional = false
python-versions = ">=3.8"
files = [
{file = "urllib3-2.2.0-py3-none-any.whl", hash = "sha256:ce3711610ddce217e6d113a2732fafad960a03fd0318c91faa79481e35c11224"},
{file = "urllib3-2.2.0.tar.gz", hash = "sha256:051d961ad0c62a94e50ecf1af379c3aba230c66c710493493560c0c223c49f20"},
{file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"},
{file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"},
]
[package.extras]
@ -920,18 +920,22 @@ zstd = ["zstandard (>=0.18.0)"]
[[package]]
name = "zipp"
version = "3.17.0"
version = "3.20.2"
description = "Backport of pathlib-compatible object wrapper for zip files"
optional = false
python-versions = ">=3.8"
files = [
{file = "zipp-3.17.0-py3-none-any.whl", hash = "sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31"},
{file = "zipp-3.17.0.tar.gz", hash = "sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0"},
{file = "zipp-3.20.2-py3-none-any.whl", hash = "sha256:a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350"},
{file = "zipp-3.20.2.tar.gz", hash = "sha256:bc9eb26f4506fda01b81bcde0ca78103b6e62f991b381fec825435c836edbc29"},
]
[package.extras]
docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"]
testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"]
check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"]
cover = ["pytest-cov"]
doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
enabler = ["pytest-enabler (>=2.2)"]
test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"]
type = ["pytest-mypy"]
[metadata]
lock-version = "2.0"

1
docs/schema.yml Symbolic link
View File

@ -0,0 +1 @@
specs/nodeinfo21/schema.yml

View File

@ -1,4 +1,4 @@
FROM --platform=$BUILDPLATFORM node:18-alpine as builder
FROM --platform=$BUILDPLATFORM node:18-alpine AS builder
RUN apk add --no-cache jq bash coreutils python3 build-base
@ -13,7 +13,7 @@ COPY src /app/src
COPY index.html pwa-manifest.json tsconfig.json vite.config.ts /app/
RUN yarn build:deployment
FROM nginx:1.23.4-alpine as production
FROM nginx:1.23.4-alpine AS production
COPY --from=builder /app/dist /usr/share/nginx/html
COPY docker/funkwhale.conf.template /etc/nginx/templates/default.conf.template

View File

@ -7,7 +7,7 @@ env = Environment(
files = [
{
"output": "docker/nginx/conf.dev",
"output": "docker/etc/nginx/conf.dev",
"config": {"proxy_frontend": True, "inside_docker": True},
},
{