Compare commits

..

No commits in common. "develop" and "1.4.0-rc2" have entirely different histories.

629 changed files with 60475 additions and 131388 deletions

View File

@ -7,7 +7,6 @@ nd
readby
serie
upto
afterall
# Names
nin

View File

@ -67,6 +67,3 @@ mailhog
*.sqlite3
api/music
api/media
# Docker state
.state

23
.env.dev Normal file
View File

@ -0,0 +1,23 @@
DJANGO_ALLOWED_HOSTS=.funkwhale.test,localhost,nginx,0.0.0.0,127.0.0.1,.gitpod.io
DJANGO_SETTINGS_MODULE=config.settings.local
DJANGO_SECRET_KEY=dev
C_FORCE_ROOT=true
FUNKWHALE_HOSTNAME=localhost
FUNKWHALE_PROTOCOL=http
PYTHONDONTWRITEBYTECODE=true
VUE_PORT=8080
MUSIC_DIRECTORY_PATH=/music
BROWSABLE_API_ENABLED=True
FORWARDED_PROTO=http
LDAP_ENABLED=False
FUNKWHALE_SPA_HTML_ROOT=http://nginx/
PYTHONTRACEMALLOC=0
MEDIA_ROOT=/data/media
# Uncomment this if you're using traefik/https
# FORCE_HTTPS_URLS=True
# Customize to your needs
POSTGRES_VERSION=11
DEBUG=true
TYPESENSE_API_KEY="apikey"

View File

@ -1,58 +0,0 @@
# api + celeryworker
DEBUG=True
DEFAULT_FROM_EMAIL=hello@funkwhale.test
FUNKWHALE_DOMAIN=funkwhale.test
FUNKWHALE_PROTOCOL=https
DJANGO_SECRET_KEY=dev
DJANGO_ALLOWED_HOSTS=.funkwhale.test,nginx
DJANGO_SETTINGS_MODULE=config.settings.local
DATABASE_URL=postgresql://postgres@postgres/postgres
CACHE_URL=redis://redis:6379/0
EMAIL_CONFIG=smtp://mailpit.funkwhale.test:1025
FORCE_HTTPS_URLS=True
EXTERNAL_REQUESTS_VERIFY_SSL=false
C_FORCE_ROOT=true
PYTHONDONTWRITEBYTECODE=true
PYTHONTRACEMALLOC=0
# api
FUNKWHALE_SPA_HTML_ROOT=http://nginx/
LDAP_ENABLED=False
BROWSABLE_API_ENABLED=True
# celeryworker
CELERYD_CONCURRENCY=0
# api + nginx
STATIC_ROOT=/staticfiles
MEDIA_ROOT=/data/media
# api + Typesense
TYPESENSE_API_KEY=apikey
# front
HOST=0.0.0.0
VUE_PORT=8080
# nginx
NGINX_MAX_BODY_SIZE=10G
FUNKWHALE_API_HOST=api
FUNKWHALE_API_PORT=5000
FUNKWHALE_FRONT_IP=front
FUNKWHALE_FRONT_PORT=${VUE_PORT}
# postgres
POSTGRES_HOST_AUTH_METHOD=trust

17
.gitignore vendored
View File

@ -1,5 +1,3 @@
/dist
### OSX ###
.DS_Store
.AppleDouble
@ -85,15 +83,10 @@ front/yarn-debug.log*
front/yarn-error.log*
front/tests/unit/coverage
front/tests/e2e/reports
front/test_results.xml
front/coverage/
front/selenium-debug.log
docs/_build
#Tauri
front/tauri/gen
/data/
.state
.env
po/*.po
@ -104,20 +97,10 @@ _build
# Docker
docker-bake.*.json
metadata.json
compose/var/test.*
# Linting
.eslintcache
tsconfig.tsbuildinfo
# Nix
.direnv/
.envrc
flake.nix
flake.lock
# Vscode
.vscode/
# Zed
.zed/

View File

@ -144,13 +144,13 @@ find_broken_links:
--cache
--no-progress
--exclude-all-private
--exclude-mail
--exclude 'demo\.funkwhale\.audio'
--exclude 'nginx\.com'
--exclude-path 'docs/_templates/'
-- . || exit $?
require_changelog:
allow_failure: false
stage: lint
rules:
# Don't run on merge request that mention NOCHANGELOG or renovate bot commits
@ -175,8 +175,7 @@ lint_api:
- if: $CI_COMMIT_BRANCH =~ /(stable|develop)/
- changes: [api/**/*]
image: $CI_REGISTRY/funkwhale/ci/python-funkwhale-api:3.13
cache: *api_cache
image: $CI_REGISTRY/funkwhale/ci/python-funkwhale-api:3.11
before_script:
- cd api
- make install
@ -232,7 +231,7 @@ test_api:
image: $CI_REGISTRY/funkwhale/ci/python-funkwhale-api:$PYTHON_VERSION
parallel:
matrix:
- PYTHON_VERSION: ["3.10", "3.11", "3.12", "3.13"]
- PYTHON_VERSION: ["3.8", "3.9", "3.10", "3.11"]
services:
- name: postgres:15-alpine
command:
@ -249,7 +248,7 @@ test_api:
CACHE_URL: "redis://redis:6379/0"
before_script:
- cd api
- make install
- poetry install --all-extras
script:
- >
poetry run pytest
@ -289,7 +288,6 @@ test_front:
coverage_report:
coverage_format: cobertura
path: front/coverage/cobertura-coverage.xml
coverage: '/All files\s+(?:\|\s+((?:\d+\.)?\d+)\s+){4}.*/'
build_metadata:
stage: build
@ -315,7 +313,7 @@ test_integration:
interruptible: true
image:
name: cypress/included:13.6.4
name: cypress/included:12.14.0
entrypoint: [""]
cache:
- *front_cache
@ -339,7 +337,7 @@ build_api_schema:
# Add build_docs rules because it depends on the build_api_schema artifact
- changes: [docs/**/*]
image: $CI_REGISTRY/funkwhale/ci/python-funkwhale-api:3.13
image: $CI_REGISTRY/funkwhale/ci/python-funkwhale-api:3.11
services:
- postgres:15-alpine
- redis:7-alpine
@ -353,15 +351,10 @@ build_api_schema:
API_TYPE: "v1"
before_script:
- cd api
- make install
- poetry install --all-extras
- poetry run funkwhale-manage migrate
script:
- poetry run funkwhale-manage spectacular --file ../docs/schema.yml
- diff ../docs/schema.yml ./funkwhale_api/common/schema.yml || (
echo "Schema files do not match! run sudo docker compose run --rm
api funkwhale-manage spectacular > ./api/funkwhale_api/common/schema.yml" &&
exit 1
)
artifacts:
expire_in: 2 weeks
paths:
@ -437,25 +430,6 @@ build_api:
paths:
- api
# build_tauri:
# stage: build
# rules:
# - if: $CI_COMMIT_BRANCH =~ /(stable|develop)/
# - changes: [front/**/*]
# image: $CI_REGISTRY/funkwhale/ci/node-tauri:18
# variables:
# <<: *keep_git_files_permissions
# before_script:
# - source /root/.cargo/env
# - yarn install
# script:
# - yarn tauri build --verbose
# artifacts:
# name: desktop_${CI_COMMIT_REF_NAME}
# paths:
# - front/tauri/target/release/bundle/appimage/*.AppImage
deploy_docs:
interruptible: false
extends: .ssh-agent
@ -499,8 +473,7 @@ docker:
--set *.cache-to=type=registry,ref=$DOCKER_CACHE_IMAGE:$CI_COMMIT_BRANCH,mode=max,oci-mediatypes=false
--push
- if: $CI_PIPELINE_SOURCE == "merge_request_event" && $CI_PROJECT_NAMESPACE == "funkwhale"
# We don't provide priviledged runners to everyone, so we can only build docker images in the funkwhale group
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
variables:
BUILD_ARGS: >
--set *.platform=linux/amd64
@ -535,24 +508,3 @@ docker:
name: docker_metadata_${CI_COMMIT_REF_NAME}
paths:
- metadata.json
package:
stage: publish
needs:
- job: build_metadata
artifacts: true
- job: build_api
artifacts: true
- job: build_front
artifacts: true
# - job: build_tauri
# artifacts: true
rules:
- if: $CI_COMMIT_BRANCH =~ /(stable|develop)/
image: $CI_REGISTRY/funkwhale/ci/python:3.11
variables:
<<: *keep_git_files_permissions
script:
- make package
- scripts/ci-upload-packages.sh

View File

@ -16,7 +16,7 @@
"ignoreDeps": ["$CI_REGISTRY/funkwhale/backend-test-docker"],
"packageRules": [
{
"matchFileNames": ["api/*", "front/*", "docs/*"],
"matchPaths": ["api/*", "front/*", "docs/*"],
"additionalBranchPrefix": "{{parentDir}}-",
"semanticCommitScope": "{{parentDir}}"
},
@ -25,16 +25,6 @@
"branchConcurrentLimit": 0,
"prConcurrentLimit": 0
},
{
"matchBaseBranches": ["develop"],
"matchUpdateTypes": ["major"],
"prPriority": 2
},
{
"matchBaseBranches": ["develop"],
"matchUpdateTypes": ["minor"],
"prPriority": 1
},
{
"matchUpdateTypes": ["major", "minor"],
"matchBaseBranches": ["stable"],
@ -45,6 +35,12 @@
"matchBaseBranches": ["stable"],
"enabled": false
},
{
"matchUpdateTypes": ["patch", "pin", "digest"],
"matchBaseBranches": ["develop"],
"automerge": true,
"automergeType": "branch"
},
{
"matchManagers": ["npm"],
"addLabels": ["Area::Frontend"]
@ -54,20 +50,20 @@
"addLabels": ["Area::Backend"]
},
{
"groupName": "vueuse",
"matchDepNames": ["/^@vueuse/.*/"]
"matchPackagePatterns": ["^@vueuse/.*"],
"groupName": "vueuse"
},
{
"matchDepNames": ["channels", "channels-redis", "daphne"],
"matchPackageNames": ["channels", "channels-redis", "daphne"],
"groupName": "channels"
},
{
"matchDepNames": ["node"],
"matchPackageNames": ["node"],
"allowedVersions": "/\\d+[02468]$/"
},
{
"matchFileNames": ["deploy/docker-compose.yml"],
"matchDepNames": ["postgres"],
"matchFiles": ["deploy/docker-compose.yml"],
"matchPackageNames": ["postgres"],
"postUpgradeTasks": {
"commands": [
"echo 'Upgrade Postgres to version {{ newVersion }}. [Make sure to migrate!](https://docs.funkwhale.audio/administrator_documentation/upgrade_docs/docker.html#upgrade-the-postgres-container)' > changes/changelog.d/postgres.update"
@ -76,7 +72,7 @@
}
},
{
"matchDepNames": ["python"],
"matchPackageNames": ["python"],
"rangeStrategy": "widen"
}
]

View File

@ -14,7 +14,7 @@ tasks:
docker-compose up -d
poetry env use python
make install
poetry install
gp ports await 5432

View File

@ -6,8 +6,6 @@ RUN sudo apt update -y \
RUN pyenv install 3.11 && pyenv global 3.11
RUN brew install neovim
RUN pip install poetry pre-commit jinja2 towncrier \
RUN pip install poetry pre-commit \
&& poetry config virtualenvs.create true \
&& poetry config virtualenvs.in-project true

View File

@ -28,16 +28,15 @@ services:
environment:
- "NGINX_MAX_BODY_SIZE=100M"
- "FUNKWHALE_API_IP=host.docker.internal"
- "FUNKWHALE_API_HOST=host.docker.internal"
- "FUNKWHALE_API_PORT=5000"
- "FUNKWHALE_FRONT_IP=host.docker.internal"
- "FUNKWHALE_FRONT_PORT=8080"
- "FUNKWHALE_HOSTNAME=${FUNKWHALE_HOSTNAME-host.docker.internal}"
- "FUNKWHALE_PROTOCOL=https"
volumes:
- ../data/media:/workspace/funkwhale/data/media:ro
- ../data/media:/protected/media:ro
- ../data/music:/music:ro
- ../data/staticfiles:/usr/share/nginx/html/staticfiles/:ro
- ../data/staticfiles:/staticfiles:ro
- ../deploy/funkwhale_proxy.conf:/etc/nginx/funkwhale_proxy.conf:ro
- ../docker/nginx/conf.dev:/etc/nginx/templates/default.conf.template:ro
- ../front:/frontend:ro

View File

@ -6,7 +6,6 @@ repos:
rev: v4.4.0
hooks:
- id: check-added-large-files
exclude: "api/funkwhale_api/common/schema.yml"
- id: check-case-conflict
- id: check-executables-have-shebangs
- id: check-shebang-scripts-are-executable
@ -54,7 +53,7 @@ repos:
- id: isort
- repo: https://github.com/pycqa/flake8
rev: 6.1.0
rev: 6.0.0
hooks:
- id: flake8
@ -63,7 +62,6 @@ repos:
hooks:
- id: prettier
files: \.(md|yml|yaml|json)$
exclude: "api/funkwhale_api/common/schema.yml"
- repo: https://github.com/codespell-project/codespell
rev: v2.2.6

View File

@ -9,13 +9,23 @@ This changelog is viewable on the web at https://docs.funkwhale.audio/changelog.
<!-- towncrier -->
## 1.4.0 (2023-12-12)
## 1.4.0-rc2 (2023-11-30)
Upgrade instructions are available at https://docs.funkwhale.audio/administrator/upgrade/index.html
Changes since 1.4.0-rc1:
Bugfixes:
- Fix broken nginx templates for docker setup (#2252)
- Fix docker builds in CI by using correct flag to disable cache
## 1.4.0-rc1 (2023-11-28)
Upgrade instructions are available at https://docs.funkwhale.audio/administrator/upgrade/index.html
Features:
- Add a management command to generate dummy notifications for testing
- Add atom1.0 to node info services (#2085)
- Add basic cypress testing
- Add NodeInfo 2.1 (#2085)
@ -26,14 +36,14 @@ Features:
- Cache radio queryset into redis. New radio track endpoint for api v2 is /api/v2/radios/sessions/{radiosessionid}/tracks (#2135)
- Create a testing environment in production for ListenBrainz recommendation engine (troi-recommendation-playground) (#1861)
- Generate all nginx configurations from one template
- New management command to update Uploads which have been imported using --in-place and are now
stored in s3 (#2156)
- Only allow MusicBrainz tagged file on a pod (#2083)
- New management command to update Uploads which have been imported using --in-place and are now stored in s3 (#2156)
- Add option to only allow MusicBrainz tagged file on a pod (#2083)
- Prohibit the creation of new users using django's `createsuperuser` command in favor of our own CLI
entry point. Run `funkwhale-manage fw users create --superuser` instead. (#1288)
Enhancements:
- Add a management command to generate dummy notifications for testing
- Add custom logging functionality (#2155)
- Adding typesense container and api client (2104)
- Cache pip package in api docker builds (#2193)
@ -51,12 +61,9 @@ Bugfixes:
- `postgres > db_dump.sql` cannot be used if the postgres container is stopped. Update command.
- Avoid troi radio to give duplicates (#2231)
- Do not cache all requests to avoid missing updates #2258
- Fix broken nginx templates for docker setup (#2252)
- Fix help messages for running scripts using funkwhale-manage
- Fix missing og meta tags (#2208)
- Fix multiarch docker builds #2211
- Fix regression that prevent static files from being served in non-docker-deployments (#2256)
- Fixed an issue where the copy button didn't copy the Embed code in the embed modal.
- Fixed an issue with the nginx templates that caused issues when connecting to websockets.
- Fixed development docker setup (2102)
@ -100,79 +107,6 @@ Other:
Removal:
- Drop support for python3.7
- This release doesn't support Debian 10 anymore. If you are still on Debian 10, we recommend
updating to a later version. Alternatively, install a supported Python version (>= Python 3.8). Python 3.11 is recommended.
Contributors to our Issues:
- AMoonRabbit
- Alexandra Parker
- ChengChung
- Ciarán Ainsworth
- Georg Krause
- Ghost User
- Johann Queuniet
- JuniorJPDJ
- Kasper Seweryn
- Kay Borowski
- Marcos Peña
- Mathieu Jourdan
- Nicolas Derive
- Virgile Robles
- jooola
- petitminion
- theit8514
Contributors to our Merge Requests:
- AMoonRabbit
- Alexander Dunkel
- Alexander Torre
- Ciarán Ainsworth
- Georg Krause
- JuniorJPDJ
- Kasper Seweryn
- Kay Borowski
- Marcos Peña
- Mathieu Jourdan
- Philipp Wolfer
- Virgile Robles
- interfect
- jooola
- petitminion
Committers:
- Aitor
- Alexander Dunkel
- alextprog
- Aznörth Niryn
- Ciarán Ainsworth
- dignny
- drakonicguy
- Fun.k.whale Trad
- Georg krause
- Georg Krause
- Jérémie Lorente
- jo
- jooola
- josé m
- Julian-Samuel Gebühr
- JuniorJPDJ
- Kasper Seweryn
- Marcos Peña
- Mathieu Jourdan
- Matteo Piovanelli
- Matyáš Caras
- MhP
- omarmaciasmolina
- petitminion
- Philipp Wolfer
- ppom
- Quentin PAGÈS
- rinenweb
- Thomas
- Transcriber allium
## 1.3.4 (2023-11-16)
@ -402,13 +336,13 @@ Update instructions:
2. Stop your containers using the **docker-compose** syntax.
```sh
docker compose down
sudo docker-compose down
```
3. Bring the containers back up using the **docker compose** syntax.
```sh
docker compose up -d
sudo docker compose up -d
```
After this you can continue to use the **docker compose** syntax for all Docker management tasks.

View File

@ -17,41 +17,3 @@ docker-build: docker-metadata
build-metadata:
./scripts/build_metadata.py --format env | tee build_metadata.env
BUILD_DIR = dist
package:
rm -Rf $(BUILD_DIR)
mkdir -p $(BUILD_DIR)
tar --create --gunzip --file='$(BUILD_DIR)/funkwhale-api.tar.gz' \
--owner='root' \
--group='root' \
--exclude-vcs \
api/config \
api/funkwhale_api \
api/install_os_dependencies.sh \
api/manage.py \
api/poetry.lock \
api/pyproject.toml \
api/Readme.md
cd '$(BUILD_DIR)' && \
tar --extract --gunzip --file='funkwhale-api.tar.gz' && \
zip -q 'funkwhale-api.zip' -r api && \
rm -Rf api
tar --create --gunzip --file='$(BUILD_DIR)/funkwhale-front.tar.gz' \
--owner='root' \
--group='root' \
--exclude-vcs \
--transform='s/^front\/dist/front/' \
front/dist
cd '$(BUILD_DIR)' && \
tar --extract --gunzip --file='funkwhale-front.tar.gz' && \
zip -q 'funkwhale-front.zip' -r front && \
rm -Rf front
# cd '$(BUILD_DIR)' && \
# cp ../front/tauri/target/release/bundle/appimage/funkwhale_*.AppImage FunkwhaleDesktop.AppImage
cd '$(BUILD_DIR)' && sha256sum * > SHA256SUMS

View File

@ -1 +0,0 @@
Dockerfile.alpine

124
api/Dockerfile Normal file
View File

@ -0,0 +1,124 @@
FROM alpine:3.17 as requirements
# We need this additional step to avoid having poetrys deps interacting with our
# dependencies. This is only required until alpine 3.16 is released, since this
# allows us to install poetry as package.
RUN set -eux; \
apk add --no-cache \
poetry \
py3-cryptography \
py3-pip \
python3
COPY pyproject.toml poetry.lock /
RUN set -eux; \
poetry export --without-hashes --extras typesense > requirements.txt; \
poetry export --without-hashes --with dev > dev-requirements.txt;
FROM alpine:3.17 as builder
ENV PYTHONDONTWRITEBYTECODE=1
ENV PYTHONUNBUFFERED=1
ARG PIP_NO_CACHE_DIR=1
ENV CARGO_NET_GIT_FETCH_WITH_CLI=true
RUN set -eux; \
apk add --no-cache \
cargo \
curl \
gcc \
g++ \
git \
jpeg-dev \
libffi-dev \
libldap \
libxml2-dev \
libxslt-dev \
make \
musl-dev \
openldap-dev \
openssl-dev \
postgresql-dev \
zlib-dev \
py3-cryptography=38.0.3-r1 \
py3-lxml=4.9.3-r1 \
py3-pillow=9.3.0-r0 \
py3-psycopg2=2.9.5-r0 \
py3-watchfiles=0.18.1-r0 \
python3-dev
# Create virtual env
RUN python3 -m venv --system-site-packages /venv
ENV PATH="/venv/bin:$PATH"
COPY --from=requirements /requirements.txt /requirements.txt
COPY --from=requirements /dev-requirements.txt /dev-requirements.txt
RUN --mount=type=cache,target=~/.cache/pip; \
set -eux; \
pip3 install --upgrade pip; \
pip3 install setuptools wheel; \
# Currently we are unable to relieably build rust-based packages on armv7. This
# is why we need to use the packages shipped by Alpine Linux.
# Since poetry does not allow in-place dependency pinning, we need
# to install the deps using pip.
grep -Ev 'cryptography|lxml|pillow|psycopg2|watchfiles' /requirements.txt \
| pip3 install -r /dev/stdin \
cryptography==38.0.3 \
lxml==4.9.3 \
pillow==9.3.0 \
psycopg2==2.9.5 \
watchfiles==0.18.1
ARG install_dev_deps=0
RUN --mount=type=cache,target=~/.cache/pip; \
set -eux; \
if [ "$install_dev_deps" = "1" ] ; then \
grep -Ev 'cryptography|lxml|pillow|psycopg2|watchfiles' /dev-requirements.txt \
| pip3 install -r /dev/stdin \
cryptography==38.0.3 \
lxml==4.9.3 \
pillow==9.3.0 \
psycopg2==2.9.5 \
watchfiles==0.18.1; \
fi
FROM alpine:3.17 as production
ENV PYTHONDONTWRITEBYTECODE=1
ENV PYTHONUNBUFFERED=1
ARG PIP_NO_CACHE_DIR=1
RUN set -eux; \
apk add --no-cache \
bash \
ffmpeg \
gettext \
jpeg-dev \
libldap \
libmagic \
libpq \
libxml2 \
libxslt \
py3-cryptography=38.0.3-r1 \
py3-lxml=4.9.3-r1 \
py3-pillow=9.3.0-r0 \
py3-psycopg2=2.9.5-r0 \
py3-watchfiles=0.18.1-r0 \
python3 \
tzdata
COPY --from=builder /venv /venv
ENV PATH="/venv/bin:$PATH"
COPY . /app
WORKDIR /app
RUN --mount=type=cache,target=~/.cache/pip; \
set -eux; \
pip3 install --no-deps --editable .
ENV IS_DOCKER_SETUP=true
CMD ["./docker/server.sh"]

View File

@ -1,137 +0,0 @@
FROM alpine:3.21 AS requirements
RUN set -eux; \
apk add --no-cache \
poetry \
py3-cryptography \
py3-pip \
python3
COPY pyproject.toml poetry.lock /
RUN set -eux; \
poetry export --without-hashes --extras typesense > requirements.txt; \
poetry export --without-hashes --with dev > dev-requirements.txt;
FROM alpine:3.21 AS builder
ENV PYTHONDONTWRITEBYTECODE=1
ENV PYTHONUNBUFFERED=1
ARG PIP_NO_CACHE_DIR=1
ENV CARGO_NET_GIT_FETCH_WITH_CLI=true
RUN set -eux; \
apk add --no-cache \
cargo \
curl \
gcc \
g++ \
git \
jpeg-dev \
libffi-dev \
libldap \
libxml2-dev \
libxslt-dev \
make \
musl-dev \
openldap-dev \
openssl-dev \
postgresql-dev \
zlib-dev \
py3-cryptography \
py3-lxml \
py3-pillow \
py3-psycopg2 \
py3-watchfiles \
python3-dev \
gfortran \
libgfortran \
openblas-dev \
py3-scipy \
py3-scikit-learn;
# Create virtual env
RUN python3 -m venv --system-site-packages /venv
ENV PATH="/venv/bin:$PATH"
COPY --from=requirements /requirements.txt /requirements.txt
COPY --from=requirements /dev-requirements.txt /dev-requirements.txt
RUN --mount=type=cache,target=~/.cache/pip; \
set -eux; \
pip3 install --upgrade pip;
RUN --mount=type=cache,target=~/.cache/pip; \
set -eux; \
pip3 install setuptools wheel;
RUN --mount=type=cache,target=~/.cache/pip; \
set -eux; \
# Currently we are unable to relieably build rust-based packages on armv7. This
# is why we need to use the packages shipped by Alpine Linux.
# Since poetry does not allow in-place dependency pinning, we need
# to install the deps using pip.
grep -Ev 'cryptography|lxml|pillow|psycopg2|watchfiles|scipy|scikit-learn' /requirements.txt \
| pip3 install -r /dev/stdin \
cryptography \
lxml \
pillow \
psycopg2 \
watchfiles \
scipy \
scikit-learn;
ARG install_dev_deps=0
RUN --mount=type=cache,target=~/.cache/pip; \
set -eux; \
if [ "$install_dev_deps" = "1" ] ; then \
grep -Ev 'cryptography|lxml|pillow|psycopg2|watchfiles' /dev-requirements.txt \
| pip3 install -r /dev/stdin \
cryptography \
lxml \
pillow \
psycopg2 \
watchfiles; \
fi
FROM alpine:3.21 AS production
ENV PYTHONDONTWRITEBYTECODE=1
ENV PYTHONUNBUFFERED=1
ARG PIP_NO_CACHE_DIR=1
RUN set -eux; \
apk add --no-cache \
bash \
ffmpeg \
gettext \
jpeg-dev \
libldap \
libmagic \
libpq \
libxml2 \
libxslt \
py3-cryptography \
py3-lxml \
py3-pillow \
py3-psycopg2 \
py3-watchfiles \
py3-scipy \
py3-scikit-learn \
python3 \
tzdata
COPY --from=builder /venv /venv
ENV PATH="/venv/bin:$PATH"
COPY . /app
WORKDIR /app
RUN apk add --no-cache gfortran
RUN --mount=type=cache,target=~/.cache/pip; \
set -eux; \
pip3 install --no-deps --editable .
ENV IS_DOCKER_SETUP=true
CMD ["./docker/server.sh"]

View File

@ -1,71 +0,0 @@
FROM python:3.13-slim AS builder
ARG POETRY_VERSION=1.8
ENV DEBIAN_FRONTEND=noninteractive
ENV VIRTUAL_ENV=/venv
ENV PATH="/venv/bin:$PATH"
ENV POETRY_HOME=/opt/poetry
ENV POETRY_NO_INTERACTION=1
ENV POETRY_VIRTUALENVS_IN_PROJECT=1
ENV POETRY_VIRTUALENVS_CREATE=1
ENV PYTHONDONTWRITEBYTECODE=1
ENV PYTHONUNBUFFERED=1
# Tell Poetry where to place its cache and virtual environment
ENV POETRY_CACHE_DIR=/opt/.cache
RUN pip install "poetry==${POETRY_VERSION}"
RUN --mount=type=cache,target=/var/lib/apt/lists \
apt update; \
apt install -y \
build-essential \
python3-dev \
libldap-dev \
libsasl2-dev \
slapd \
ldap-utils \
tox \
lcov \
valgrind
WORKDIR /app
COPY pyproject.toml .
RUN python3 -m venv --system-site-packages ${VIRTUAL_ENV} && . ${VIRTUAL_ENV}/bin/activate
RUN --mount=type=cache,target=/opt/.cache \
poetry install --no-root --extras typesense
FROM python:3.13-slim AS runtime
ARG POETRY_VERSION=1.8
ENV DEBIAN_FRONTEND=noninteractive
ENV VIRTUAL_ENV=/venv
ENV PATH="/venv/bin:$PATH"
RUN --mount=type=cache,target=/var/lib/apt/lists \
apt update; \
apt install -y \
ffmpeg \
gettext \
libjpeg-dev \
libldap-2.5-0 \
libmagic1 \
libpq5 \
libxml2 \
libxslt1.1
RUN pip install "poetry==${POETRY_VERSION}"
COPY --from=builder ${VIRTUAL_ENV} ${VIRTUAL_ENV}
WORKDIR /app
COPY . /app
RUN poetry install --extras typesense
CMD ["./docker/server.sh"]

View File

@ -4,12 +4,11 @@ CPU_CORES := $(shell N=$$(nproc); echo $$(( $$N > 4 ? 4 : $$N )))
.PHONY: install lint
install:
poetry install --all-extras
poetry install
lint:
poetry run pylint \
--jobs=$(CPU_CORES) \
--output-format=colorized \
--recursive=true \
--disable=C,R,W,I \
config funkwhale_api tests

View File

@ -299,31 +299,10 @@ def background_task(name):
# HOOKS
TRIGGER_THIRD_PARTY_UPLOAD = "third_party_upload"
"""
Called when a track is being listened
"""
LISTENING_CREATED = "listening_created"
"""
Called when a track is being listened
"""
LISTENING_SYNC = "listening_sync"
"""
Called by the task manager to trigger listening sync
"""
FAVORITE_CREATED = "favorite_created"
"""
Called when a track is being favorited
"""
FAVORITE_DELETED = "favorite_deleted"
"""
Called when a favorited track is being unfavorited
"""
FAVORITE_SYNC = "favorite_sync"
"""
Called by the task manager to trigger favorite sync
"""
SCAN = "scan"
"""

View File

@ -1,7 +1,7 @@
from channels.auth import AuthMiddlewareStack
from channels.routing import ProtocolTypeRouter, URLRouter
from django.conf.urls import url
from django.core.asgi import get_asgi_application
from django.urls import re_path
from funkwhale_api.instance import consumers
@ -10,12 +10,7 @@ application = ProtocolTypeRouter(
# Empty for now (http->django views is added by default)
"websocket": AuthMiddlewareStack(
URLRouter(
[
re_path(
"^api/v1/activity$",
consumers.InstanceActivityConsumer.as_asgi(),
)
]
[url("^api/v1/activity$", consumers.InstanceActivityConsumer.as_asgi())]
)
),
"http": get_asgi_application(),

View File

@ -1,3 +1,5 @@
import os
from drf_spectacular.contrib.django_oauth_toolkit import OpenApiAuthenticationExtension
from drf_spectacular.plumbing import build_bearer_security_scheme_object
@ -42,6 +44,7 @@ def custom_preprocessing_hook(endpoints):
filtered = []
# your modifications to the list of operations that are exposed in the schema
api_type = os.environ.get("API_TYPE", "v1")
for path, path_regex, method, callback in endpoints:
if path.startswith("/api/v1/providers"):
@ -53,7 +56,7 @@ def custom_preprocessing_hook(endpoints):
if path.startswith("/api/v1/oauth/authorize"):
continue
if path.startswith("/api/v1") or path.startswith("/api/v2"):
if path.startswith(f"/api/{api_type}"):
filtered.append((path, path_regex, method, callback))
return filtered

View File

@ -2,7 +2,7 @@ import logging.config
import sys
import warnings
from collections import OrderedDict
from urllib.parse import urlparse, urlsplit
from urllib.parse import urlsplit
import environ
from celery.schedules import crontab
@ -114,7 +114,6 @@ else:
logger.info("Loaded env file at %s/.env", path)
break
FUNKWHALE_PLUGINS = env("FUNKWHALE_PLUGINS", default="")
FUNKWHALE_PLUGINS_PATH = env(
"FUNKWHALE_PLUGINS_PATH", default="/srv/funkwhale/plugins/"
)
@ -225,16 +224,6 @@ ALLOWED_HOSTS = env.list("DJANGO_ALLOWED_HOSTS", default=[]) + [FUNKWHALE_HOSTNA
List of allowed hostnames for which the Funkwhale server will answer.
"""
CSRF_TRUSTED_ORIGINS = [
urlparse("//" + o, FUNKWHALE_PROTOCOL).geturl() for o in ALLOWED_HOSTS
]
"""
List of origins that are trusted for unsafe requests
We simply consider all allowed hosts to be trusted origins
See DJANGO_ALLOWED_HOSTS in .env.example for details
See https://docs.djangoproject.com/en/4.2/ref/settings/#csrf-trusted-origins
"""
# APP CONFIGURATION
# ------------------------------------------------------------------------------
DJANGO_APPS = (
@ -280,7 +269,6 @@ LOCAL_APPS = (
# Your stuff: custom apps go here
"funkwhale_api.instance",
"funkwhale_api.audio",
"funkwhale_api.contrib.listenbrainz",
"funkwhale_api.music",
"funkwhale_api.requests",
"funkwhale_api.favorites",
@ -315,7 +303,6 @@ MIDDLEWARE = (
tuple(plugins.trigger_filter(plugins.MIDDLEWARES_BEFORE, [], enabled=True))
+ tuple(ADDITIONAL_MIDDLEWARES_BEFORE)
+ (
"allauth.account.middleware.AccountMiddleware",
"django.middleware.security.SecurityMiddleware",
"django.middleware.clickjacking.XFrameOptionsMiddleware",
"corsheaders.middleware.CorsMiddleware",
@ -614,20 +601,7 @@ if AWS_ACCESS_KEY_ID:
"""
AWS_S3_CUSTOM_DOMAIN = env("AWS_S3_CUSTOM_DOMAIN", default=None)
"""
Custom domain for serving your S3 files.
Useful if your provider offers a CDN-like service for your bucket.
.. important::
The URL must not contain a scheme (:attr:`AWS_S3_URL_PROTOCOL` is
automatically prepended) nor a trailing slash.
"""
AWS_S3_URL_PROTOCOL = env("AWS_S3_URL_PROTOCOL", default="https:")
"""
Protocol to use when constructing the custom domain (see :attr:`AWS_S3_CUSTOM_DOMAIN`)
.. important::
It must end with a `:`, remove `//`.
Custom domain to use for your S3 storage.
"""
AWS_S3_ENDPOINT_URL = env("AWS_S3_ENDPOINT_URL", default=None)
"""
@ -856,7 +830,7 @@ If you're using password auth (the extra slash is important)
.. note::
If you want to use Redis over unix sockets, you also need to update
:attr:`CELERY_BROKER_URL`, because the scheme differs from the one used by
:attr:`CELERY_BROKER_URL`, because the scheme differ from the one used by
:attr:`CACHE_URL`.
"""
@ -907,7 +881,7 @@ to use a different server or use Redis sockets to connect.
Example:
- ``unix://127.0.0.1:6379/0``
- ``redis://127.0.0.1:6379/0``
- ``redis+socket:///run/redis/redis.sock?virtual_host=0``
"""
@ -968,29 +942,12 @@ CELERY_BEAT_SCHEDULE = {
),
"options": {"expires": 60 * 60},
},
"listenbrainz.trigger_listening_sync_with_listenbrainz": {
"task": "listenbrainz.trigger_listening_sync_with_listenbrainz",
"schedule": crontab(day_of_week="*", minute="0", hour="3"),
"options": {"expires": 60 * 60 * 24},
},
"listenbrainz.trigger_favorite_sync_with_listenbrainz": {
"task": "listenbrainz.trigger_favorite_sync_with_listenbrainz",
"schedule": crontab(day_of_week="*", minute="0", hour="3"),
"options": {"expires": 60 * 60 * 24},
},
"tags.update_musicbrainz_genre": {
"task": "tags.update_musicbrainz_genre",
"schedule": crontab(day_of_month="2", minute="30", hour="3"),
"options": {"expires": 60 * 60 * 24},
},
}
if env.str("TYPESENSE_API_KEY", default=None):
CELERY_BEAT_SCHEDULE["typesense.build_canonical_index"] = {
"typesense.build_canonical_index": {
"task": "typesense.build_canonical_index",
"schedule": crontab(day_of_week="*/2", minute="0", hour="3"),
"options": {"expires": 60 * 60 * 24},
}
},
}
if env.bool("ADD_ALBUM_TAGS_FROM_TRACKS", default=True):
CELERY_BEAT_SCHEDULE["music.albums_set_tags_from_tracks"] = {
@ -1236,7 +1193,7 @@ if BROWSABLE_API_ENABLED:
"rest_framework.renderers.BrowsableAPIRenderer",
)
REST_AUTH = {
REST_AUTH_SERIALIZERS = {
"PASSWORD_RESET_SERIALIZER": "funkwhale_api.users.serializers.PasswordResetSerializer", # noqa
"PASSWORD_RESET_CONFIRM_SERIALIZER": "funkwhale_api.users.serializers.PasswordResetConfirmSerializer", # noqa
}

View File

@ -2,7 +2,8 @@
Local settings
- Run in Debug mode
- Add Django Debug Toolbar when INTERNAL_IPS are given and match the request
- Use console backend for e-mails
- Add Django Debug Toolbar
- Add django-extensions as app
"""
@ -24,6 +25,11 @@ SECRET_KEY = env(
"DJANGO_SECRET_KEY", default="mc$&b=5j#6^bv7tld1gyjp2&+^-qrdy=0sw@r5sua*1zp4fmxc"
)
# Mail settings
# ------------------------------------------------------------------------------
EMAIL_HOST = "localhost"
EMAIL_PORT = 1025
# django-debug-toolbar
# ------------------------------------------------------------------------------
@ -90,6 +96,8 @@ CELERY_TASK_ALWAYS_EAGER = False
# Your local stuff: Below this line define 3rd party library settings
CSRF_TRUSTED_ORIGINS = [o for o in ALLOWED_HOSTS]
REST_FRAMEWORK["DEFAULT_SCHEMA_CLASS"] = "funkwhale_api.schema.CustomAutoSchema"
SPECTACULAR_SETTINGS = {
"TITLE": "Funkwhale API",
@ -142,16 +150,4 @@ MIDDLEWARE = (
"funkwhale_api.common.middleware.PymallocMiddleware",
) + MIDDLEWARE
REST_FRAMEWORK.update(
{
"TEST_REQUEST_RENDERER_CLASSES": [
"rest_framework.renderers.MultiPartRenderer",
"rest_framework.renderers.JSONRenderer",
"rest_framework.renderers.TemplateHTMLRenderer",
"funkwhale_api.playlists.renderers.PlaylistXspfRenderer",
],
}
)
# allows makemigrations and superuser creation
FORCE = env("FORCE", default=1)
TYPESENSE_API_KEY = "apikey"

View File

@ -41,6 +41,14 @@ SECRET_KEY = env("DJANGO_SECRET_KEY")
# SESSION_COOKIE_HTTPONLY = True
# SECURE_SSL_REDIRECT = env.bool("DJANGO_SECURE_SSL_REDIRECT", default=True)
# SITE CONFIGURATION
# ------------------------------------------------------------------------------
# Hosts/domain names that are valid for this site
# See https://docs.djangoproject.com/en/1.6/ref/settings/#allowed-hosts
CSRF_TRUSTED_ORIGINS = ALLOWED_HOSTS
# END SITE CONFIGURATION
# Static Assets
# ------------------------
STATICFILES_STORAGE = "django.contrib.staticfiles.storage.StaticFilesStorage"

View File

@ -0,0 +1,9 @@
import os
os.environ.setdefault("FUNKWHALE_URL", "http://funkwhale.dev")
from .common import * # noqa
DEBUG = True
SECRET_KEY = "a_super_secret_key!"
TYPESENSE_API_KEY = "apikey"

View File

@ -1,6 +1,7 @@
from django.conf import settings
from django.conf.urls import url
from django.conf.urls.static import static
from django.urls import include, path, re_path
from django.urls import include, path
from django.views import defaults as default_views
from config import plugins
@ -9,41 +10,34 @@ from funkwhale_api.common import admin
plugins_patterns = plugins.trigger_filter(plugins.URLS, [], enabled=True)
api_patterns = [
re_path("v1/", include("config.urls.api")),
re_path("v2/", include("config.urls.api_v2")),
re_path("subsonic/", include("config.urls.subsonic")),
url("v1/", include("config.urls.api")),
url("v2/", include("config.urls.api_v2")),
url("subsonic/", include("config.urls.subsonic")),
]
urlpatterns = [
# Django Admin, use {% url 'admin:index' %}
re_path(settings.ADMIN_URL, admin.site.urls),
re_path(r"^api/", include((api_patterns, "api"), namespace="api")),
re_path(
url(settings.ADMIN_URL, admin.site.urls),
url(r"^api/", include((api_patterns, "api"), namespace="api")),
url(
r"^",
include(
("funkwhale_api.federation.urls", "federation"), namespace="federation"
),
),
re_path(
r"^api/v1/auth/",
include("funkwhale_api.users.rest_auth_urls"),
),
re_path(
r"^api/v2/auth/",
include("funkwhale_api.users.rest_auth_urls"),
),
re_path(r"^accounts/", include("allauth.urls")),
url(r"^api/v1/auth/", include("funkwhale_api.users.rest_auth_urls")),
url(r"^accounts/", include("allauth.urls")),
] + plugins_patterns
if settings.DEBUG:
# This allows the error pages to be debugged during development, just visit
# these url in browser to see how these error pages look like.
urlpatterns += [
re_path(r"^400/$", default_views.bad_request),
re_path(r"^403/$", default_views.permission_denied),
re_path(r"^404/$", default_views.page_not_found),
re_path(r"^500/$", default_views.server_error),
url(r"^400/$", default_views.bad_request),
url(r"^403/$", default_views.permission_denied),
url(r"^404/$", default_views.page_not_found),
url(r"^500/$", default_views.server_error),
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
if "debug_toolbar" in settings.INSTALLED_APPS:
@ -55,5 +49,5 @@ if settings.DEBUG:
if "silk" in settings.INSTALLED_APPS:
urlpatterns = [
re_path(r"^api/silk/", include("silk.urls", namespace="silk"))
url(r"^api/silk/", include("silk.urls", namespace="silk"))
] + urlpatterns

View File

@ -1,5 +1,4 @@
from django.conf.urls import include
from django.urls import re_path
from django.conf.urls import include, url
from funkwhale_api.activity import views as activity_views
from funkwhale_api.audio import views as audio_views
@ -29,61 +28,61 @@ router.register(r"attachments", common_views.AttachmentViewSet, "attachments")
v1_patterns = router.urls
v1_patterns += [
re_path(r"^oembed/$", views.OembedView.as_view(), name="oembed"),
re_path(
url(r"^oembed/$", views.OembedView.as_view(), name="oembed"),
url(
r"^instance/",
include(("funkwhale_api.instance.urls", "instance"), namespace="instance"),
),
re_path(
url(
r"^manage/",
include(("funkwhale_api.manage.urls", "manage"), namespace="manage"),
),
re_path(
url(
r"^moderation/",
include(
("funkwhale_api.moderation.urls", "moderation"), namespace="moderation"
),
),
re_path(
url(
r"^federation/",
include(
("funkwhale_api.federation.api_urls", "federation"), namespace="federation"
),
),
re_path(
url(
r"^providers/",
include(("funkwhale_api.providers.urls", "providers"), namespace="providers"),
),
re_path(
url(
r"^favorites/",
include(("funkwhale_api.favorites.urls", "favorites"), namespace="favorites"),
),
re_path(r"^search$", views.Search.as_view(), name="search"),
re_path(
url(r"^search$", views.Search.as_view(), name="search"),
url(
r"^radios/",
include(("funkwhale_api.radios.urls", "radios"), namespace="radios"),
),
re_path(
url(
r"^history/",
include(("funkwhale_api.history.urls", "history"), namespace="history"),
),
re_path(
url(
r"^",
include(("funkwhale_api.users.api_urls", "users"), namespace="users"),
),
# XXX: remove if Funkwhale 1.1
re_path(
url(
r"^users/",
include(("funkwhale_api.users.api_urls", "users"), namespace="users-nested"),
),
re_path(
url(
r"^oauth/",
include(("funkwhale_api.users.oauth.urls", "oauth"), namespace="oauth"),
),
re_path(r"^rate-limit/?$", common_views.RateLimitView.as_view(), name="rate-limit"),
re_path(
url(r"^rate-limit/?$", common_views.RateLimitView.as_view(), name="rate-limit"),
url(
r"^text-preview/?$", common_views.TextPreviewView.as_view(), name="text-preview"
),
]
urlpatterns = [re_path("", include((v1_patterns, "v1"), namespace="v1"))]
urlpatterns = [url("", include((v1_patterns, "v1"), namespace="v1"))]

View File

@ -1,36 +1,19 @@
from django.conf.urls import include
from django.urls import re_path
from django.conf.urls import include, url
from funkwhale_api.common import routers as common_routers
from . import api
router = common_routers.OptionalSlashRouter()
v2_patterns = router.urls
v2_patterns += [
re_path(
url(
r"^instance/",
include(("funkwhale_api.instance.urls_v2", "instance"), namespace="instance"),
),
re_path(
url(
r"^radios/",
include(("funkwhale_api.radios.urls_v2", "radios"), namespace="radios"),
),
]
v2_paths = {
pattern.pattern.regex.pattern
for pattern in v2_patterns
if hasattr(pattern.pattern, "regex")
}
filtered_v1_patterns = [
pattern
for pattern in api.v1_patterns
if pattern.pattern.regex.pattern not in v2_paths
]
v2_patterns += filtered_v1_patterns
urlpatterns = [re_path("", include((v2_patterns, "v2"), namespace="v2"))]
urlpatterns = [url("", include((v2_patterns, "v2"), namespace="v2"))]

View File

@ -1,5 +1,4 @@
from django.conf.urls import include
from django.urls import re_path
from django.conf.urls import include, url
from rest_framework import routers
from rest_framework.urlpatterns import format_suffix_patterns
@ -9,9 +8,7 @@ subsonic_router = routers.SimpleRouter(trailing_slash=False)
subsonic_router.register(r"rest", SubsonicViewSet, basename="subsonic")
subsonic_patterns = format_suffix_patterns(subsonic_router.urls, allowed=["view"])
urlpatterns = [
re_path("", include((subsonic_patterns, "subsonic"), namespace="subsonic"))
]
urlpatterns = [url("", include((subsonic_patterns, "subsonic"), namespace="subsonic"))]
# urlpatterns = [
# url(

View File

@ -9,5 +9,5 @@ funkwhale-manage migrate
exec gunicorn config.asgi:application \
--workers "${FUNKWHALE_WEB_WORKERS-1}" \
--worker-class uvicorn.workers.UvicornWorker \
--bind 0.0.0.0:"${FUNKWHALE_API_PORT}" \
--bind 0.0.0.0:5000 \
${GUNICORN_ARGS-}

View File

@ -38,27 +38,13 @@ def combined_recent(limit, **kwargs):
def get_activity(user, limit=20):
query = fields.privacy_level_query(
user, "actor__user__privacy_level", "actor__user"
)
query = fields.privacy_level_query(user, lookup_field="user__privacy_level")
querysets = [
Listening.objects.filter(query)
.select_related(
"track",
"actor",
)
.prefetch_related(
"track__artist_credit__artist",
"track__album__artist_credit__artist",
Listening.objects.filter(query).select_related(
"track", "user", "track__artist", "track__album__artist"
),
TrackFavorite.objects.filter(query)
.select_related(
"track",
"actor",
)
.prefetch_related(
"track__artist_credit__artist",
"track__album__artist_credit__artist",
TrackFavorite.objects.filter(query).select_related(
"track", "user", "track__artist", "track__album__artist"
),
]
records = combined_recent(limit=limit, querysets=querysets)

View File

@ -21,11 +21,7 @@ TAG_FILTER = common_filters.MultipleQueryFilter(method=filter_tags)
class ChannelFilter(moderation_filters.HiddenContentFilterSet):
q = fields.SearchFilter(
search_fields=[
"artist_credit__artist__name",
"actor__summary",
"actor__preferred_username",
]
search_fields=["artist__name", "actor__summary", "actor__preferred_username"]
)
tag = TAG_FILTER
scope = common_filters.ActorScopeFilter(actor_field="attributed_to", distinct=True)

View File

@ -26,7 +26,6 @@ from funkwhale_api.federation import serializers as federation_serializers
from funkwhale_api.federation import utils as federation_utils
from funkwhale_api.moderation import mrf
from funkwhale_api.music import models as music_models
from funkwhale_api.music import tasks
from funkwhale_api.music.serializers import COVER_WRITE_FIELD, CoverField
from funkwhale_api.tags import models as tags_models
from funkwhale_api.tags import serializers as tags_serializers
@ -247,14 +246,11 @@ class SimpleChannelArtistSerializer(serializers.Serializer):
description = common_serializers.ContentSerializer(allow_null=True, required=False)
cover = CoverField(allow_null=True, required=False)
channel = serializers.UUIDField(allow_null=True, required=False)
tracks_count = serializers.SerializerMethodField(required=False)
tracks_count = serializers.IntegerField(source="_tracks_count", required=False)
tags = serializers.ListField(
child=serializers.CharField(), source="_prefetched_tagged_items", required=False
)
def get_tracks_count(self, o) -> int:
return getattr(o, "_tracks_count", 0)
class ChannelSerializer(serializers.ModelSerializer):
artist = SimpleChannelArtistSerializer()
@ -753,7 +749,7 @@ class RssFeedItemSerializer(serializers.Serializer):
else:
existing_track = (
music_models.Track.objects.filter(
uuid=expected_uuid, artist_credit__artist__channel=channel
uuid=expected_uuid, artist__channel=channel
)
.select_related("description", "attachment_cover")
.first()
@ -769,6 +765,7 @@ class RssFeedItemSerializer(serializers.Serializer):
"disc_number": validated_data.get("itunes_season", 1) or 1,
"position": validated_data.get("itunes_episode", 1) or 1,
"title": validated_data["title"],
"artist": channel.artist,
}
)
if "rights" in validated_data:
@ -804,21 +801,6 @@ class RssFeedItemSerializer(serializers.Serializer):
**track_kwargs,
defaults=track_defaults,
)
# channel only have one artist so we can safely update artist_credit
defaults = {
"artist": channel.artist,
"credit": channel.artist.name,
"joinphrase": "",
}
query = (
Q(artist=channel.artist) & Q(credit=channel.artist.name) & Q(joinphrase="")
)
artist_credit = tasks.get_best_candidate_or_create(
music_models.ArtistCredit, query, defaults, ["artist", "joinphrase"]
)
track.artist_credit.set([artist_credit[0]])
# optimisation for reducing SQL queries, because we cannot use select_related with
# update or create, so we restore the cache by hand
if existing_track:

View File

@ -27,7 +27,7 @@ ARTIST_PREFETCH_QS = (
"attachment_cover",
)
.prefetch_related(music_views.TAG_PREFETCH)
.annotate(_tracks_count=Count("artist_credit__tracks"))
.annotate(_tracks_count=Count("tracks"))
)
@ -103,7 +103,7 @@ class ChannelViewSet(
queryset = super().get_queryset()
if self.action == "retrieve":
queryset = queryset.annotate(
_downloads_count=Sum("artist__artist_credit__tracks__downloads_count")
_downloads_count=Sum("artist__tracks__downloads_count")
)
return queryset
@ -192,6 +192,7 @@ class ChannelViewSet(
if object.attributed_to == actors.get_service_actor():
# external feed, we redirect to the canonical one
return http.HttpResponseRedirect(object.rss_url)
uploads = (
object.library.uploads.playable_by(None)
.prefetch_related(

View File

@ -49,7 +49,6 @@ def handler_create_user(
utils.logger.warn("Unknown permission %s", permission)
utils.logger.debug("Creating actor…")
user.actor = models.create_actor(user)
models.create_user_libraries(user)
user.save()
return user

View File

@ -1,6 +1,6 @@
from allauth.account.models import EmailAddress
from allauth.account.utils import send_email_confirmation
from django.core.cache import cache
from django.utils.translation import gettext as _
from django.utils.translation import ugettext as _
from oauth2_provider.contrib.rest_framework.authentication import (
OAuth2Authentication as BaseOAuth2Authentication,
)
@ -20,13 +20,9 @@ def resend_confirmation_email(request, user):
if cache.get(cache_key):
return False
# We do the sending of the conformation by hand because we don't want to pass the request down
# to the email rendering, which would cause another UnverifiedEmail Exception and restarts the sending
# again and again
email = EmailAddress.objects.get_for_user(user, user.email)
email.send_confirmation()
done = send_email_confirmation(request, user)
cache.set(cache_key, True, THROTTLE_DELAY)
return True
return done
class OAuth2Authentication(BaseOAuth2Authentication):

View File

@ -24,20 +24,8 @@ def privacy_level_query(user, lookup_field="privacy_level", user_field="user"):
if user.is_anonymous:
return models.Q(**{lookup_field: "everyone"})
followers_query = models.Q(
**{
f"{lookup_field}": "followers",
f"{user_field}__actor__in": user.actor.get_approved_followings(),
}
)
# Federated TrackFavorite don't have an user associated with the trackfavorite.actor
no_user_query = models.Q(**{f"{user_field}__isnull": True})
return (
models.Q(**{f"{lookup_field}__in": ["instance", "everyone"]})
| models.Q(**{lookup_field: "me", user_field: user})
| followers_query
| no_user_query
return models.Q(**{f"{lookup_field}__in": ["instance", "everyone"]}) | models.Q(
**{lookup_field: "me", user_field: user}
)

View File

@ -1,4 +1,5 @@
from django.conf import settings
import os
from django.contrib.auth.management.commands.createsuperuser import (
Command as BaseCommand,
)
@ -11,8 +12,7 @@ class Command(BaseCommand):
Creating Django Superusers would bypass some of our username checks, which can lead to unexpected behaviour.
We therefore prohibit the execution of the command.
"""
force = settings.FORCE
if not force == 1:
if not os.environ.get("FORCE") == "1":
raise CommandError(
"Running createsuperuser on your Funkwhale instance bypasses some of our checks "
"which can lead to unexpected behavior of your instance. We therefore suggest to "

View File

@ -68,33 +68,22 @@ def create_taggable_items(dependency):
CONFIG = [
{
"id": "artist_credit",
"model": music_models.ArtistCredit,
"factory": "music.ArtistCredit",
"factory_kwargs": {"joinphrase": ""},
"depends_on": [
{"field": "artist", "id": "artists", "default_factor": 0.5},
],
},
{
"id": "tracks",
"model": music_models.Track,
"factory": "music.Track",
"factory_kwargs": {"album": None},
"factory_kwargs": {"artist": None, "album": None},
"depends_on": [
{"field": "album", "id": "albums", "default_factor": 0.1},
{"field": "artist_credit", "id": "artist_credit", "default_factor": 0.05},
{"field": "artist", "id": "artists", "default_factor": 0.05},
],
},
{
"id": "albums",
"model": music_models.Album,
"factory": "music.Album",
"factory_kwargs": {},
"depends_on": [
{"field": "artist_credit", "id": "artist_credit", "default_factor": 0.3}
],
"factory_kwargs": {"artist": None},
"depends_on": [{"field": "artist", "id": "artists", "default_factor": 0.3}],
},
{"id": "artists", "model": music_models.Artist, "factory": "music.Artist"},
{
@ -321,23 +310,12 @@ class Command(BaseCommand):
candidates = list(queryset.values_list("pk", flat=True))
picked_pks = [random.choice(candidates) for _ in objects]
picked_objects = {o.pk: o for o in queryset.filter(pk__in=picked_pks)}
saved_obj = []
for i, obj in enumerate(objects):
if create_dependencies:
value = random.choice(candidates)
else:
value = picked_objects[picked_pks[i]]
if dependency["field"] == "artist_credit":
obj.save()
obj.artist_credit.set([value])
saved_obj.append(obj)
else:
setattr(obj, dependency["field"], value)
if saved_obj:
return saved_obj
setattr(obj, dependency["field"], value)
if not handler:
objects = row["model"].objects.bulk_create(objects, batch_size=BATCH_SIZE)
results[row["id"]] = objects

View File

@ -1,4 +1,5 @@
from django.conf import settings
import os
from django.core.management.base import CommandError
from django.core.management.commands.makemigrations import Command as BaseCommand
@ -10,8 +11,8 @@ class Command(BaseCommand):
We ensure the command is disabled, unless a specific env var is provided.
"""
force = settings.FORCE
if not force == 1:
force = os.environ.get("FORCE") == "1"
if not force:
raise CommandError(
"Running makemigrations on your Funkwhale instance can have desastrous"
" consequences. This command is disabled, and should only be run in "

View File

@ -10,7 +10,7 @@ class Command(BaseCommand):
self.help = "Helper to generate randomized testdata"
self.type_choices = {"notifications": self.handle_notifications}
self.missing_args_message = f"Please specify one of the following sub-commands: {*self.type_choices.keys(), }"
self.missing_args_message = f"Please specify one of the following sub-commands: { *self.type_choices.keys(), }"
def add_arguments(self, parser):
subparsers = parser.add_subparsers(dest="subcommand")

View File

@ -60,12 +60,12 @@ class NullsLastSQLCompiler(SQLCompiler):
class NullsLastQuery(models.sql.query.Query):
"""Use a custom compiler to inject 'NULLS LAST' (for PostgreSQL)."""
def get_compiler(self, using=None, connection=None, elide_empty=True):
def get_compiler(self, using=None, connection=None):
if using is None and connection is None:
raise ValueError("Need either using or connection")
if using:
connection = connections[using]
return NullsLastSQLCompiler(self, connection, using, elide_empty)
return NullsLastSQLCompiler(self, connection, using)
class NullsLastQuerySet(models.QuerySet):

View File

@ -56,59 +56,3 @@ class OwnerPermission(BasePermission):
if not owner or not request.user.is_authenticated or owner != request.user:
raise owner_exception
return True
class PrivacyLevelPermission(BasePermission):
"""
Ensure the request actor have access to the object considering the privacylevel configuration
of the user.
request.user is None if actor, else its Anonymous if user is not auth.
"""
def has_object_permission(self, request, view, obj):
if (
not hasattr(obj, "user")
and hasattr(obj, "actor")
and not obj.actor.is_local
):
# it's a remote actor object. It should be public.
# But we could trigger an update of the remote actor data
# to avoid leaking data (#2326)
return True
privacy_level = (
obj.actor.user.privacy_level
if hasattr(obj, "actor")
else obj.user.privacy_level
)
obj_actor = obj.actor if hasattr(obj, "actor") else obj.user.actor
if privacy_level == "everyone":
return True
# user is anonymous
if hasattr(request, "actor"):
request_actor = request.actor
elif request.user and request.user.is_authenticated:
request_actor = request.user.actor
else:
return False
if privacy_level == "instance":
# user is local
if request.user and hasattr(request.user, "actor"):
return True
elif hasattr(request, "actor") and request.actor and request.actor.is_local:
return True
else:
return False
elif privacy_level == "me" and obj_actor == request_actor:
return True
elif privacy_level == "followers" and (
request_actor in obj.user.actor.get_approved_followers()
):
return True
else:
return False

View File

@ -2,7 +2,7 @@ import json
from django import forms
from django.conf import settings
from django.forms import JSONField
from django.contrib.postgres.forms import JSONField
from dynamic_preferences import serializers, types
from dynamic_preferences.registries import global_preferences_registry
@ -93,6 +93,7 @@ class SerializedPreference(types.BasePreferenceType):
serializer
"""
serializer = JSONSerializer
data_serializer_class = None
field_class = JSONField
widget = forms.Textarea

File diff suppressed because it is too large Load Diff

View File

@ -5,8 +5,8 @@ import os
import PIL
from django.core.exceptions import ObjectDoesNotExist
from django.core.files.uploadedfile import SimpleUploadedFile
from django.utils.encoding import smart_str
from django.utils.translation import gettext_lazy as _
from django.utils.encoding import smart_text
from django.utils.translation import ugettext_lazy as _
from drf_spectacular.types import OpenApiTypes
from drf_spectacular.utils import extend_schema_field
from rest_framework import serializers
@ -52,7 +52,7 @@ class RelatedField(serializers.RelatedField):
self.fail(
"does_not_exist",
related_field_name=self.related_field_name,
value=smart_str(data),
value=smart_text(data),
)
except (TypeError, ValueError):
self.fail("invalid")
@ -293,17 +293,7 @@ class AttachmentSerializer(serializers.Serializer):
file = StripExifImageField(write_only=True)
urls = serializers.SerializerMethodField()
@extend_schema_field(
{
"type": "object",
"properties": {
"original": {"type": "string"},
"small_square_crop": {"type": "string"},
"medium_square_crop": {"type": "string"},
"large_square_crop": {"type": "string"},
},
}
)
@extend_schema_field(OpenApiTypes.OBJECT)
def get_urls(self, o):
urls = {}
urls["source"] = o.url

View File

@ -1,6 +1,6 @@
import django.dispatch
""" Required args: mutation """
mutation_created = django.dispatch.Signal()
""" Required args: mutation, old_is_approved, new_is_approved """
mutation_updated = django.dispatch.Signal()
mutation_created = django.dispatch.Signal(providing_args=["mutation"])
mutation_updated = django.dispatch.Signal(
providing_args=["mutation", "old_is_approved", "new_is_approved"]
)

View File

@ -6,7 +6,7 @@ from django.core.exceptions import ValidationError
from django.core.files.images import get_image_dimensions
from django.template.defaultfilters import filesizeformat
from django.utils.deconstruct import deconstructible
from django.utils.translation import gettext_lazy as _
from django.utils.translation import ugettext_lazy as _
@deconstructible

View File

@ -1,13 +0,0 @@
import logging
from config import plugins
from funkwhale_api.contrib.archivedl import tasks
from .funkwhale_startup import PLUGIN
logger = logging.getLogger(__name__)
@plugins.register_hook(plugins.TRIGGER_THIRD_PARTY_UPLOAD, PLUGIN)
def lauch_download(track, conf={}):
tasks.archive_download.delay(track_id=track.pk, conf=conf)

View File

@ -1,10 +0,0 @@
from config import plugins
PLUGIN = plugins.get_plugin_config(
name="archivedl",
label="Archive-dl",
description="",
version="0.1",
user=False,
conf=[],
)

View File

@ -1,148 +0,0 @@
import asyncio
import hashlib
import logging
import os
import tempfile
import urllib.parse
import requests
from django.core.files import File
from django.utils import timezone
from funkwhale_api.federation import actors
from funkwhale_api.music import models, utils
from funkwhale_api.taskapp import celery
logger = logging.getLogger(__name__)
def create_upload(url, track, files_data):
mimetype = f"audio/{files_data.get('format', 'unknown')}"
duration = files_data.get("mtime", 0)
filesize = files_data.get("size", 0)
bitrate = files_data.get("bitrate", 0)
service_library = models.Library.objects.create(
privacy_level="everyone",
actor=actors.get_service_actor(),
)
return models.Upload.objects.create(
mimetype=mimetype,
source=url,
third_party_provider="archive-dl",
creation_date=timezone.now(),
track=track,
duration=duration,
size=filesize,
bitrate=bitrate,
library=service_library,
from_activity=None,
import_status="finished",
)
@celery.app.task(name="archivedl.archive_download")
@celery.require_instance(models.Track.objects.select_related(), "track")
def archive_download(track, conf):
artist_name = utils.get_artist_credit_string(track)
query = f"mediatype:audio AND title:{track.title} AND creator:{artist_name}"
with requests.Session() as session:
url = get_search_url(query, page_size=1, page=1)
page_data = fetch_json(url, session)
for obj in page_data["response"]["docs"]:
logger.info(f"launching download item for {str(obj)}")
download_item(
item_data=obj,
session=session,
allowed_extensions=utils.SUPPORTED_EXTENSIONS,
track=track,
)
def fetch_json(url, session):
logger.info(f"Fetching {url}...")
with session.get(url) as response:
return response.json()
def download_item(
item_data,
session,
allowed_extensions,
track,
):
files_data = get_files_data(item_data["identifier"], session)
to_download = list(
filter_files(
files_data["result"],
allowed_extensions=allowed_extensions,
)
)
url = f"https://archive.org/download/{item_data['identifier']}/{to_download[0]['name']}"
upload = create_upload(url, track, to_download[0])
try:
with tempfile.TemporaryDirectory() as temp_dir:
path = os.path.join(temp_dir, to_download[0]["name"])
download_file(
path,
url=url,
session=session,
checksum=to_download[0]["sha1"],
upload=upload,
to_download=to_download,
)
logger.info(f"Finished to download item {item_data['identifier']}...")
except Exception as e:
upload.delete()
raise e
def check_integrity(path, expected_checksum):
with open(path, mode="rb") as f:
hash = hashlib.sha1()
hash.update(f.read())
return expected_checksum == hash.hexdigest()
def get_files_data(identifier, session):
url = f"https://archive.org/metadata/{identifier}/files"
logger.info(f"Fetching files data at {url}...")
with session.get(url) as response:
return response.json()
def download_file(path, url, session, checksum, upload, to_download):
if os.path.exists(path) and check_integrity(path, checksum):
logger.info(f"Skipping already downloaded file at {path}")
return
logger.info(f"Downloading file {url}...")
with open(path, mode="wb") as f:
try:
with session.get(url) as response:
f.write(response.content)
except asyncio.TimeoutError as e:
logger.error(f"Timeout error while downloading {url}: {e}")
with open(path, "rb") as f:
upload.audio_file.save(f"{to_download['name']}", File(f))
upload.import_status = "finished"
upload.url = url
upload.save()
return upload
def filter_files(files, allowed_extensions):
for f in files:
if allowed_extensions:
extension = os.path.splitext(f["name"])[-1][1:]
if extension not in allowed_extensions:
continue
yield f
def get_search_url(query, page_size, page):
q = urllib.parse.urlencode({"q": query})
return f"https://archive.org/advancedsearch.php?{q}&sort[]=addeddate+desc&rows={page_size}&page={page}&output=json"

View File

@ -0,0 +1,168 @@
# Copyright (c) 2018 Philipp Wolfer <ph.wolfer@gmail.com>
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import json
import logging
import ssl
import time
from http.client import HTTPSConnection
HOST_NAME = "api.listenbrainz.org"
PATH_SUBMIT = "/1/submit-listens"
SSL_CONTEXT = ssl.create_default_context()
class Track:
"""
Represents a single track to submit.
See https://listenbrainz.readthedocs.io/en/latest/dev/json.html
"""
def __init__(self, artist_name, track_name, release_name=None, additional_info={}):
"""
Create a new Track instance
@param artist_name as str
@param track_name as str
@param release_name as str
@param additional_info as dict
"""
self.artist_name = artist_name
self.track_name = track_name
self.release_name = release_name
self.additional_info = additional_info
@staticmethod
def from_dict(data):
return Track(
data["artist_name"],
data["track_name"],
data.get("release_name", None),
data.get("additional_info", {}),
)
def to_dict(self):
return {
"artist_name": self.artist_name,
"track_name": self.track_name,
"release_name": self.release_name,
"additional_info": self.additional_info,
}
def __repr__(self):
return f"Track({self.artist_name}, {self.track_name})"
class ListenBrainzClient:
"""
Submit listens to ListenBrainz.org.
See https://listenbrainz.readthedocs.io/en/latest/dev/api.html
"""
def __init__(self, user_token, logger=logging.getLogger(__name__)):
self.__next_request_time = 0
self.user_token = user_token
self.logger = logger
def listen(self, listened_at, track):
"""
Submit a listen for a track
@param listened_at as int
@param entry as Track
"""
payload = _get_payload(track, listened_at)
return self._submit("single", [payload])
def playing_now(self, track):
"""
Submit a playing now notification for a track
@param track as Track
"""
payload = _get_payload(track)
return self._submit("playing_now", [payload])
def import_tracks(self, tracks):
"""
Import a list of tracks as (listened_at, Track) pairs
@param track as [(int, Track)]
"""
payload = _get_payload_many(tracks)
return self._submit("import", payload)
def _submit(self, listen_type, payload, retry=0):
self._wait_for_ratelimit()
self.logger.debug("ListenBrainz %s: %r", listen_type, payload)
data = {"listen_type": listen_type, "payload": payload}
headers = {
"Authorization": "Token %s" % self.user_token,
"Content-Type": "application/json",
}
body = json.dumps(data)
conn = HTTPSConnection(HOST_NAME, context=SSL_CONTEXT)
conn.request("POST", PATH_SUBMIT, body, headers)
response = conn.getresponse()
response_text = response.read()
try:
response_data = json.loads(response_text)
except json.decoder.JSONDecodeError:
response_data = response_text
self._handle_ratelimit(response)
log_msg = f"Response {response.status}: {response_data!r}"
if response.status == 429 and retry < 5: # Too Many Requests
self.logger.warning(log_msg)
return self._submit(listen_type, payload, retry + 1)
elif response.status == 200:
self.logger.debug(log_msg)
else:
self.logger.error(log_msg)
return response
def _wait_for_ratelimit(self):
now = time.time()
if self.__next_request_time > now:
delay = self.__next_request_time - now
self.logger.debug("Rate limit applies, delay %d", delay)
time.sleep(delay)
def _handle_ratelimit(self, response):
remaining = int(response.getheader("X-RateLimit-Remaining", 0))
reset_in = int(response.getheader("X-RateLimit-Reset-In", 0))
self.logger.debug("X-RateLimit-Remaining: %i", remaining)
self.logger.debug("X-RateLimit-Reset-In: %i", reset_in)
if remaining == 0:
self.__next_request_time = time.time() + reset_in
def _get_payload_many(tracks):
payload = []
for listened_at, track in tracks:
data = _get_payload(track, listened_at)
payload.append(data)
return payload
def _get_payload(track, listened_at=None):
data = {"track_metadata": track.to_dict()}
if listened_at is not None:
data["listened_at"] = listened_at
return data

View File

@ -1,31 +1,27 @@
import liblistenbrainz
import funkwhale_api
from config import plugins
from funkwhale_api.favorites import models as favorites_models
from funkwhale_api.history import models as history_models
from . import tasks
from .client import ListenBrainzClient, Track
from .funkwhale_startup import PLUGIN
@plugins.register_hook(plugins.LISTENING_CREATED, PLUGIN)
def submit_listen(listening, conf, **kwargs):
user_token = conf["user_token"]
if not user_token and not conf["submit_listenings"]:
if not user_token:
return
logger = PLUGIN["logger"]
logger.info("Submitting listen to ListenBrainz")
client = liblistenbrainz.ListenBrainz()
client.set_auth_token(user_token)
listen = get_lb_listen(listening)
client.submit_single_listen(listen)
client = ListenBrainzClient(user_token=user_token, logger=logger)
track = get_track(listening.track)
client.listen(int(listening.creation_date.timestamp()), track)
def get_lb_listen(listening):
track = listening.track
def get_track(track):
artist = track.artist.name
title = track.title
album = None
additional_info = {
"media_player": "Funkwhale",
"media_player_version": funkwhale_api.__version__,
@ -40,97 +36,15 @@ def get_lb_listen(listening):
if track.album:
if track.album.title:
release_name = track.album.title
album = track.album.title
if track.album.mbid:
additional_info["release_mbid"] = str(track.album.mbid)
mbids = [ac.artist.mbid for ac in track.artist_credit.all() if ac.artist.mbid]
if mbids:
additional_info["artist_mbids"] = mbids
if track.artist.mbid:
additional_info["artist_mbids"] = [str(track.artist.mbid)]
upload = track.uploads.filter(duration__gte=0).first()
if upload:
additional_info["duration"] = upload.duration
return liblistenbrainz.Listen(
track_name=track.title,
listened_at=listening.creation_date.timestamp(),
artist_name=track.get_artist_credit_string,
release_name=release_name,
additional_info=additional_info,
)
@plugins.register_hook(plugins.FAVORITE_CREATED, PLUGIN)
def submit_favorite_creation(track_favorite, conf, **kwargs):
user_token = conf["user_token"]
if not user_token or not conf["submit_favorites"]:
return
logger = PLUGIN["logger"]
logger.info("Submitting favorite to ListenBrainz")
client = liblistenbrainz.ListenBrainz()
track = track_favorite.track
if not track.mbid:
logger.warning(
"This tracks doesn't have a mbid. Feedback will not be submitted to Listenbrainz"
)
return
client.submit_user_feedback(1, track.mbid)
@plugins.register_hook(plugins.FAVORITE_DELETED, PLUGIN)
def submit_favorite_deletion(track_favorite, conf, **kwargs):
user_token = conf["user_token"]
if not user_token or not conf["submit_favorites"]:
return
logger = PLUGIN["logger"]
logger.info("Submitting favorite deletion to ListenBrainz")
client = liblistenbrainz.ListenBrainz()
track = track_favorite.track
if not track.mbid:
logger.warning(
"This tracks doesn't have a mbid. Feedback will not be submitted to Listenbrainz"
)
return
client.submit_user_feedback(0, track.mbid)
@plugins.register_hook(plugins.LISTENING_SYNC, PLUGIN)
def sync_listenings_from_listenbrainz(user, conf):
user_name = conf["user_name"]
if not user_name or not conf["sync_listenings"]:
return
logger = PLUGIN["logger"]
logger.info("Getting listenings from ListenBrainz")
try:
last_ts = (
history_models.Listening.objects.filter(actor=user.actor)
.filter(source="Listenbrainz")
.latest("creation_date")
.values_list("creation_date", flat=True)
).timestamp()
except funkwhale_api.history.models.Listening.DoesNotExist:
tasks.import_listenbrainz_listenings(user, user_name, 0)
return
tasks.import_listenbrainz_listenings(user, user_name, last_ts)
@plugins.register_hook(plugins.FAVORITE_SYNC, PLUGIN)
def sync_favorites_from_listenbrainz(user, conf):
user_name = conf["user_name"]
if not user_name or not conf["sync_favorites"]:
return
try:
last_ts = (
favorites_models.TrackFavorite.objects.filter(actor=user.actor)
.filter(source="Listenbrainz")
.latest("creation_date")
.creation_date.timestamp()
)
except favorites_models.TrackFavorite.DoesNotExist:
tasks.import_listenbrainz_favorites(user, user_name, 0)
return
tasks.import_listenbrainz_favorites(user, user_name, last_ts)
return Track(artist, title, album, additional_info)

View File

@ -3,7 +3,7 @@ from config import plugins
PLUGIN = plugins.get_plugin_config(
name="listenbrainz",
label="ListenBrainz",
description="A plugin that allows you to submit or sync your listens and favorites to ListenBrainz.",
description="A plugin that allows you to submit your listens to ListenBrainz.",
homepage="https://docs.funkwhale.audio/users/builtinplugins.html#listenbrainz-plugin", # noqa
version="0.3",
user=True,
@ -13,45 +13,6 @@ PLUGIN = plugins.get_plugin_config(
"type": "text",
"label": "Your ListenBrainz user token",
"help": "You can find your user token in your ListenBrainz profile at https://listenbrainz.org/profile/",
},
{
"name": "user_name",
"type": "text",
"required": False,
"label": "Your ListenBrainz user name.",
"help": "Required for importing listenings and favorites with ListenBrainz \
but not to send activities",
},
{
"name": "submit_listenings",
"type": "boolean",
"default": True,
"label": "Enable listening submission to ListenBrainz",
"help": "If enabled, your listenings from Funkwhale will be imported into ListenBrainz.",
},
{
"name": "sync_listenings",
"type": "boolean",
"default": False,
"label": "Enable listenings sync",
"help": "If enabled, your listening from ListenBrainz will be imported into Funkwhale. This means they \
will be used along with Funkwhale listenings to filter out recently listened content or \
generate recommendations",
},
{
"name": "sync_favorites",
"type": "boolean",
"default": False,
"label": "Enable favorite sync",
"help": "If enabled, your favorites from ListenBrainz will be imported into Funkwhale. This means they \
will be used along with Funkwhale favorites (UI display, federation activity)",
},
{
"name": "submit_favorites",
"type": "boolean",
"default": False,
"label": "Enable favorite submission to ListenBrainz services",
"help": "If enabled, your favorites from Funkwhale will be submitted to ListenBrainz",
},
}
],
)

View File

@ -1,165 +0,0 @@
import datetime
import liblistenbrainz
from django.utils import timezone
from config import plugins
from funkwhale_api.favorites import models as favorites_models
from funkwhale_api.history import models as history_models
from funkwhale_api.music import models as music_models
from funkwhale_api.taskapp import celery
from funkwhale_api.users import models
from .funkwhale_startup import PLUGIN
@celery.app.task(name="listenbrainz.trigger_listening_sync_with_listenbrainz")
def trigger_listening_sync_with_listenbrainz():
now = timezone.now()
active_month = now - datetime.timedelta(days=30)
users = (
models.User.objects.filter(plugins__code="listenbrainz")
.filter(plugins__conf__sync_listenings=True)
.filter(last_activity__gte=active_month)
)
for user in users:
plugins.trigger_hook(
plugins.LISTENING_SYNC,
user=user,
confs=plugins.get_confs(user),
)
@celery.app.task(name="listenbrainz.trigger_favorite_sync_with_listenbrainz")
def trigger_favorite_sync_with_listenbrainz():
now = timezone.now()
active_month = now - datetime.timedelta(days=30)
users = (
models.User.objects.filter(plugins__code="listenbrainz")
.filter(plugins__conf__sync_listenings=True)
.filter(last_activity__gte=active_month)
)
for user in users:
plugins.trigger_hook(
plugins.FAVORITE_SYNC,
user=user,
confs=plugins.get_confs(user),
)
@celery.app.task(name="listenbrainz.import_listenbrainz_listenings")
def import_listenbrainz_listenings(user, user_name, since):
client = liblistenbrainz.ListenBrainz()
response = client.get_listens(username=user_name, min_ts=since, count=100)
listens = response["payload"]["listens"]
while listens:
add_lb_listenings_to_db(listens, user)
new_ts = max(
listens,
key=lambda obj: datetime.datetime.fromtimestamp(
obj.listened_at, datetime.timezone.utc
),
)
response = client.get_listens(username=user_name, min_ts=new_ts, count=100)
listens = response["payload"]["listens"]
def add_lb_listenings_to_db(listens, user):
logger = PLUGIN["logger"]
fw_listens = []
for listen in listens:
if (
listen.additional_info.get("submission_client")
and listen.additional_info.get("submission_client")
== "Funkwhale ListenBrainz plugin"
and history_models.Listening.objects.filter(
creation_date=datetime.datetime.fromtimestamp(
listen.listened_at, datetime.timezone.utc
)
).exists()
):
logger.info(
f"Listen with ts {listen.listened_at} skipped because already in db"
)
continue
mbid = (
listen.mbid_mapping
if hasattr(listen, "mbid_mapping")
else listen.recording_mbid
)
if not mbid:
logger.info("Received listening that doesn't have a mbid. Skipping...")
try:
track = music_models.Track.objects.get(mbid=mbid)
except music_models.Track.DoesNotExist:
logger.info(
"Received listening that doesn't exist in fw database. Skipping..."
)
continue
user = user
fw_listen = history_models.Listening(
creation_date=datetime.datetime.fromtimestamp(
listen.listened_at, datetime.timezone.utc
),
track=track,
actor=user.actor,
source="Listenbrainz",
)
fw_listens.append(fw_listen)
history_models.Listening.objects.bulk_create(fw_listens)
@celery.app.task(name="listenbrainz.import_listenbrainz_favorites")
def import_listenbrainz_favorites(user, user_name, since):
client = liblistenbrainz.ListenBrainz()
response = client.get_user_feedback(username=user_name)
offset = 0
while response["feedback"]:
count = response["count"]
offset = offset + count
last_sync = min(
response["feedback"],
key=lambda obj: datetime.datetime.fromtimestamp(
obj["created"], datetime.timezone.utc
),
)["created"]
add_lb_feedback_to_db(response["feedback"], user)
if last_sync <= since or count == 0:
return
response = client.get_user_feedback(username=user_name, offset=offset)
def add_lb_feedback_to_db(feedbacks, user):
logger = PLUGIN["logger"]
for feedback in feedbacks:
try:
track = music_models.Track.objects.get(mbid=feedback["recording_mbid"])
except music_models.Track.DoesNotExist:
logger.info(
"Received feedback track that doesn't exist in fw database. Skipping..."
)
continue
if feedback["score"] == 1:
favorites_models.TrackFavorite.objects.get_or_create(
actor=user.actor,
creation_date=datetime.datetime.fromtimestamp(
feedback["created"], datetime.timezone.utc
),
track=track,
source="Listenbrainz",
)
elif feedback["score"] == 0:
try:
favorites_models.TrackFavorite.objects.get(
actor=user.actor, track=track
).delete()
except favorites_models.TrackFavorite.DoesNotExist:
continue
elif feedback["score"] == -1:
logger.info("Funkwhale doesn't support disliked tracks")

View File

@ -37,7 +37,7 @@ def get_payload(listening, api_key, conf):
# See https://github.com/krateng/maloja/blob/master/API.md
payload = {
"key": api_key,
"artists": [artist.name for artist in track.artist_credit.get_artists_list()],
"artists": [track.artist.name],
"title": track.title,
"time": int(listening.creation_date.timestamp()),
"nofix": bool(conf.get("nofix")),
@ -46,10 +46,8 @@ def get_payload(listening, api_key, conf):
if track.album:
if track.album.title:
payload["album"] = track.album.title
if track.album.artist_credit.all():
payload["albumartists"] = [
artist.name for artist in track.album.artist_credit.get_artists_list()
]
if track.album.artist:
payload["albumartists"] = [track.album.artist.name]
upload = track.uploads.filter(duration__gte=0).first()
if upload:

View File

@ -29,7 +29,7 @@ def forward_to_scrobblers(listening, conf, **kwargs):
(username + " " + password).encode("utf-8")
).hexdigest()
cache_key = "lastfm:sessionkey:{}".format(
":".join([str(listening.actor.pk), hashed_auth])
":".join([str(listening.user.pk), hashed_auth])
)
PLUGIN["logger"].info("Forwarding scrobble to %s", LASTFM_SCROBBLER_URL)
session_key = PLUGIN["cache"].get(cache_key)

View File

@ -84,7 +84,7 @@ def get_scrobble_payload(track, date, suffix="[0]"):
"""
upload = track.uploads.filter(duration__gte=0).first()
data = {
f"a{suffix}": track.get_artist_credit_string,
f"a{suffix}": track.artist.name,
f"t{suffix}": track.title,
f"l{suffix}": upload.duration if upload else 0,
f"b{suffix}": (track.album.title if track.album else "") or "",
@ -103,7 +103,7 @@ def get_scrobble2_payload(track, date, suffix="[0]"):
"""
upload = track.uploads.filter(duration__gte=0).first()
data = {
"artist": track.get_artist_credit_string,
"artist": track.artist.name,
"track": track.title,
"chosenByUser": 1,
}

View File

@ -314,12 +314,9 @@ class FunkwhaleProvider(internet_provider.Provider):
not random enough
"""
def federation_url(self, prefix="", obj_uuid=None, local=False):
if not obj_uuid:
obj_uuid = uuid.uuid4()
def federation_url(self, prefix="", local=False):
def path_generator():
return f"{prefix}/{obj_uuid}"
return f"{prefix}/{uuid.uuid4()}"
domain = settings.FEDERATION_HOSTNAME if local else self.domain_name()
protocol = "https"

View File

@ -8,7 +8,7 @@ record.registry.register_serializer(serializers.TrackFavoriteActivitySerializer)
@record.registry.register_consumer("favorites.TrackFavorite")
def broadcast_track_favorite_to_instance_activity(data, obj):
if obj.actor.user.privacy_level not in ["instance", "everyone"]:
if obj.user.privacy_level not in ["instance", "everyone"]:
return
channels.group_send(

View File

@ -5,5 +5,5 @@ from . import models
@admin.register(models.TrackFavorite)
class TrackFavoriteAdmin(admin.ModelAdmin):
list_display = ["actor", "track", "creation_date"]
list_select_related = ["actor", "track"]
list_display = ["user", "track", "creation_date"]
list_select_related = ["user", "track"]

View File

@ -1,28 +1,14 @@
import factory
from django.conf import settings
from funkwhale_api.factories import NoUpdateOnCreate, registry
from funkwhale_api.federation import models
from funkwhale_api.federation.factories import ActorFactory
from funkwhale_api.music.factories import TrackFactory
from funkwhale_api.users.factories import UserFactory
@registry.register
class TrackFavorite(NoUpdateOnCreate, factory.django.DjangoModelFactory):
track = factory.SubFactory(TrackFactory)
actor = factory.SubFactory(ActorFactory)
fid = factory.Faker("federation_url")
uuid = factory.Faker("uuid4")
user = factory.SubFactory(UserFactory)
class Meta:
model = "favorites.TrackFavorite"
@factory.post_generation
def local(self, create, extracted, **kwargs):
if not extracted and not kwargs:
return
domain = models.Domain.objects.get_or_create(name=settings.FEDERATION_HOSTNAME)[
0
]
self.fid = f"https://{domain}/federation/music/favorite/{self.uuid}"
self.save(update_fields=["fid"])

View File

@ -9,7 +9,7 @@ class TrackFavoriteFilter(moderation_filters.HiddenContentFilterSet):
q = fields.SearchFilter(
search_fields=["track__title", "track__artist__name", "track__album__title"]
)
scope = common_filters.ActorScopeFilter(actor_field="actor", distinct=True)
scope = common_filters.ActorScopeFilter(actor_field="user__actor", distinct=True)
class Meta:
model = models.TrackFavorite

View File

@ -1,18 +0,0 @@
# Generated by Django 3.2.20 on 2023-12-09 14:25
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('favorites', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='trackfavorite',
name='source',
field=models.CharField(blank=True, max_length=100, null=True),
),
]

View File

@ -1,102 +0,0 @@
# Generated by Django 4.2.9 on 2024-03-28 23:32
import uuid
from django.db import migrations, models, transaction
import django.db.models.deletion
from django.conf import settings
from funkwhale_api.federation import utils
from django.urls import reverse
def gen_uuid(apps, schema_editor):
MyModel = apps.get_model("favorites", "TrackFavorite")
for row in MyModel.objects.all():
unique_uuid = uuid.uuid4()
while MyModel.objects.filter(uuid=unique_uuid).exists():
unique_uuid = uuid.uuid4()
fid = utils.full_url(
reverse("federation:music:likes-detail", kwargs={"uuid": unique_uuid})
)
row.uuid = unique_uuid
row.fid = fid
row.save(update_fields=["uuid", "fid"])
def get_user_actor(apps, schema_editor):
MyModel = apps.get_model("favorites", "TrackFavorite")
for row in MyModel.objects.all():
actor = row.user.actor
row.actor = actor
row.save(update_fields=["actor"])
class Migration(migrations.Migration):
dependencies = [
("favorites", "0002_trackfavorite_source"),
]
operations = [
migrations.AddField(
model_name="trackfavorite",
name="actor",
field=models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="track_favorites",
to="federation.actor",
),
),
migrations.AddField(
model_name="trackfavorite",
name="fid",
field=models.URLField(
db_index=True,
default="https://default.fid",
max_length=500,
unique=True,
),
preserve_default=False,
),
migrations.AddField(
model_name="trackfavorite",
name="url",
field=models.URLField(blank=True, max_length=500, null=True),
),
migrations.AddField(
model_name="trackfavorite",
name="uuid",
field=models.UUIDField(null=True),
),
migrations.RunPython(gen_uuid, reverse_code=migrations.RunPython.noop),
migrations.AlterField(
model_name="trackfavorite",
name="uuid",
field=models.UUIDField(default=uuid.uuid4, unique=True, null=False),
),
migrations.RunPython(get_user_actor, reverse_code=migrations.RunPython.noop),
migrations.AlterField(
model_name="trackfavorite",
name="actor",
field=models.ForeignKey(
blank=False,
null=False,
on_delete=django.db.models.deletion.CASCADE,
related_name="track_favorites",
to="federation.actor",
), ),
migrations.AlterUniqueTogether(
name="trackfavorite",
unique_together={("track", "actor")},
),
migrations.RemoveField(
model_name="trackfavorite",
name="user",
),
]

View File

@ -1,91 +1,26 @@
import uuid
from django.db import models
from django.urls import reverse
from django.utils import timezone
from funkwhale_api.common import fields
from funkwhale_api.common import models as common_models
from funkwhale_api.federation import models as federation_models
from funkwhale_api.federation import utils as federation_utils
from funkwhale_api.music.models import Track
FAVORITE_PRIVACY_LEVEL_CHOICES = [
(k, l) for k, l in fields.PRIVACY_LEVEL_CHOICES if k != "followers"
]
class TrackFavoriteQuerySet(models.QuerySet, common_models.LocalFromFidQuerySet):
def viewable_by(self, actor):
if actor is None:
return self.filter(actor__user__privacy_level="everyone")
if hasattr(actor, "user"):
me_query = models.Q(actor__user__privacy_level="me", actor=actor)
me_query = models.Q(actor__user__privacy_level="me", actor=actor)
instance_query = models.Q(
actor__user__privacy_level="instance", actor__domain=actor.domain
)
instance_actor_query = models.Q(
actor__user__privacy_level="instance", actor__domain=actor.domain
)
return self.filter(
me_query
| instance_query
| instance_actor_query
| models.Q(actor__user__privacy_level="everyone")
)
class TrackFavorite(federation_models.FederationMixin):
uuid = models.UUIDField(default=uuid.uuid4, unique=True)
class TrackFavorite(models.Model):
creation_date = models.DateTimeField(default=timezone.now)
actor = models.ForeignKey(
"federation.Actor",
related_name="track_favorites",
on_delete=models.CASCADE,
null=False,
blank=False,
user = models.ForeignKey(
"users.User", related_name="track_favorites", on_delete=models.CASCADE
)
track = models.ForeignKey(
Track, related_name="track_favorites", on_delete=models.CASCADE
)
source = models.CharField(max_length=100, null=True, blank=True)
federation_namespace = "likes"
objects = TrackFavoriteQuerySet.as_manager()
class Meta:
unique_together = ("track", "actor")
unique_together = ("track", "user")
ordering = ("-creation_date",)
@classmethod
def add(cls, track, actor):
favorite, created = cls.objects.get_or_create(actor=actor, track=track)
def add(cls, track, user):
favorite, created = cls.objects.get_or_create(user=user, track=track)
return favorite
def get_activity_url(self):
return f"{self.actor.get_absolute_url()}/favorites/tracks/{self.pk}"
def get_absolute_url(self):
return f"/library/tracks/{self.track.pk}"
def get_federation_id(self):
if self.fid:
return self.fid
return federation_utils.full_url(
reverse(
f"federation:music:{self.federation_namespace}-detail",
kwargs={"uuid": self.uuid},
)
)
def save(self, **kwargs):
if not self.pk and not self.fid:
self.fid = self.get_federation_id()
return super().save(**kwargs)
return f"{self.user.get_activity_url()}/favorites/tracks/{self.pk}"

View File

@ -1,8 +1,10 @@
from drf_spectacular.utils import extend_schema_field
from rest_framework import serializers
from funkwhale_api.activity import serializers as activity_serializers
from funkwhale_api.federation import serializers as federation_serializers
from funkwhale_api.music.serializers import TrackActivitySerializer, TrackSerializer
from funkwhale_api.users.serializers import UserActivitySerializer, UserBasicSerializer
from . import models
@ -10,24 +12,35 @@ from . import models
class TrackFavoriteActivitySerializer(activity_serializers.ModelSerializer):
type = serializers.SerializerMethodField()
object = TrackActivitySerializer(source="track")
actor = federation_serializers.APIActorSerializer(read_only=True)
actor = UserActivitySerializer(source="user")
published = serializers.DateTimeField(source="creation_date")
class Meta:
model = models.TrackFavorite
fields = ["id", "local_id", "object", "type", "actor", "published"]
def get_actor(self, obj):
return UserActivitySerializer(obj.user).data
def get_type(self, obj):
return "Like"
class UserTrackFavoriteSerializer(serializers.ModelSerializer):
track = TrackSerializer(read_only=True)
actor = federation_serializers.APIActorSerializer(read_only=True)
user = UserBasicSerializer(read_only=True)
actor = serializers.SerializerMethodField()
class Meta:
model = models.TrackFavorite
fields = ("id", "actor", "track", "creation_date", "actor")
fields = ("id", "user", "track", "creation_date", "actor")
actor = serializers.SerializerMethodField()
@extend_schema_field(federation_serializers.APIActorSerializer)
def get_actor(self, obj):
actor = obj.user.actor
if actor:
return federation_serializers.APIActorSerializer(actor).data
class UserTrackFavoriteWriteSerializer(serializers.ModelSerializer):

View File

@ -4,10 +4,8 @@ from rest_framework import mixins, status, viewsets
from rest_framework.decorators import action
from rest_framework.response import Response
from config import plugins
from funkwhale_api.activity import record
from funkwhale_api.common import fields, permissions
from funkwhale_api.federation import routes
from funkwhale_api.music import utils as music_utils
from funkwhale_api.music.models import Track
from funkwhale_api.users.oauth import permissions as oauth_permissions
@ -24,7 +22,7 @@ class TrackFavoriteViewSet(
filterset_class = filters.TrackFavoriteFilter
serializer_class = serializers.UserTrackFavoriteSerializer
queryset = models.TrackFavorite.objects.all().select_related(
"actor__attachment_icon"
"user__actor__attachment_icon"
)
permission_classes = [
oauth_permissions.ScopePermission,
@ -33,7 +31,6 @@ class TrackFavoriteViewSet(
required_scope = "favorites"
anonymous_policy = "setting"
owner_checks = ["write"]
owner_field = "actor.user"
def get_serializer_class(self):
if self.request.method.lower() in ["head", "get", "options"]:
@ -47,20 +44,7 @@ class TrackFavoriteViewSet(
instance = self.perform_create(serializer)
serializer = self.get_serializer(instance=instance)
headers = self.get_success_headers(serializer.data)
plugins.trigger_hook(
plugins.FAVORITE_CREATED,
track_favorite=serializer.instance,
confs=plugins.get_confs(self.request.user),
)
record.send(instance)
routes.outbox.dispatch(
{"type": "Like", "object": {"type": "Track"}},
context={
"track": instance.track,
"actor": instance.actor,
"id": instance.fid,
},
)
return Response(
serializer.data, status=status.HTTP_201_CREATED, headers=headers
)
@ -68,30 +52,19 @@ class TrackFavoriteViewSet(
def get_queryset(self):
queryset = super().get_queryset()
queryset = queryset.filter(
fields.privacy_level_query(
self.request.user, "actor__user__privacy_level", "actor__user"
)
fields.privacy_level_query(self.request.user, "user__privacy_level")
)
tracks = (
Track.objects.with_playable_uploads(
music_utils.get_actor_from_request(self.request)
)
.prefetch_related(
"artist_credit__artist",
"album__artist_credit__artist",
)
.select_related(
"attributed_to",
"album__attachment_cover",
)
tracks = Track.objects.with_playable_uploads(
music_utils.get_actor_from_request(self.request)
).select_related(
"artist", "album__artist", "attributed_to", "album__attachment_cover"
)
queryset = queryset.prefetch_related(Prefetch("track", queryset=tracks))
return queryset
def perform_create(self, serializer):
track = Track.objects.get(pk=serializer.data["track"])
favorite = models.TrackFavorite.add(track=track, actor=self.request.user.actor)
favorite = models.TrackFavorite.add(track=track, user=self.request.user)
return favorite
@extend_schema(operation_id="unfavorite_track")
@ -99,19 +72,10 @@ class TrackFavoriteViewSet(
def remove(self, request, *args, **kwargs):
try:
pk = int(request.data["track"])
favorite = request.user.actor.track_favorites.get(track__pk=pk)
favorite = request.user.track_favorites.get(track__pk=pk)
except (AttributeError, ValueError, models.TrackFavorite.DoesNotExist):
return Response({}, status=400)
routes.outbox.dispatch(
{"type": "Dislike", "object": {"type": "Track"}},
context={"favorite": favorite},
)
favorite.delete()
plugins.trigger_hook(
plugins.FAVORITE_DELETED,
track_favorite=favorite,
confs=plugins.get_confs(self.request.user),
)
return Response([], status=status.HTTP_204_NO_CONTENT)
@extend_schema(
@ -128,9 +92,7 @@ class TrackFavoriteViewSet(
if not request.user.is_authenticated:
return Response({"results": [], "count": 0}, status=401)
favorites = request.user.actor.track_favorites.values("id", "track").order_by(
"id"
)
favorites = request.user.track_favorites.values("id", "track").order_by("id")
payload = serializers.AllFavoriteSerializer(favorites).data
return Response(payload, status=200)

View File

@ -119,9 +119,6 @@ def should_reject(fid, actor_id=None, payload={}):
@transaction.atomic
def receive(activity, on_behalf_of, inbox_actor=None):
"""
Receive an activity, find his recipients and save it to the database before dispatching it
"""
from funkwhale_api.moderation import mrf
from . import models, serializers, tasks
@ -226,9 +223,6 @@ class InboxRouter(Router):
"""
from . import api_serializers, models
logger.debug(
f"[federation] Inbox dispatch payload : {payload} with context : {context}"
)
handlers = self.get_matching_handlers(payload)
for handler in handlers:
if call_handlers:
@ -299,59 +293,6 @@ def schedule_key_rotation(actor_id, delay):
tasks.rotate_actor_key.apply_async(kwargs={"actor_id": actor_id}, countdown=delay)
def activity_pass_user_privacy_level(context, routing):
TYPE_FOLLOW_USER_PRIVACY_LEVEL = ["Listen", "Like", "Create"]
TYPE_IGNORE_USER_PRIVACY_LEVEL = ["Delete", "Accept", "Follow"]
MUSIC_OBJECT_TYPE = ["Audio", "Track", "Album", "Artist"]
actor = context.get("actor", False)
type = routing.get("type", False)
object_type = routing.get("object", {}).get("type", None)
if not actor:
logger.warning(
"No actor provided in activity context : \
we cannot follow actor.privacy_level, activity will be sent by default."
)
# We do not consider music metadata has private
if object_type in MUSIC_OBJECT_TYPE:
return True
if type:
if type in TYPE_IGNORE_USER_PRIVACY_LEVEL:
return True
if type in TYPE_FOLLOW_USER_PRIVACY_LEVEL and actor and actor.is_local:
if actor.user.privacy_level in [
"me",
"instance",
]:
return False
return True
return True
def activity_pass_object_privacy_level(context, routing):
MUSIC_OBJECT_TYPE = ["Audio", "Track", "Album", "Artist"]
# we only support playlist federation for now
object = context.get("playlist", False)
obj_privacy_level = object.privacy_level if object else None
object_type = routing.get("object", {}).get("type", None)
# We do not consider music metadata has private
if object_type in MUSIC_OBJECT_TYPE:
return True
if object and obj_privacy_level and obj_privacy_level in ["me", "instance"]:
return False
return True
class OutboxRouter(Router):
@transaction.atomic
def dispatch(self, routing, context):
@ -364,7 +305,6 @@ class OutboxRouter(Router):
from . import models, tasks
logger.debug(f"[federation] Outbox dispatch context : {context}")
allow_list_enabled = preferences.get("moderation__allow_list_enabled")
allowed_domains = None
if allow_list_enabled:
@ -374,18 +314,6 @@ class OutboxRouter(Router):
)
)
if activity_pass_user_privacy_level(context, routing) is False:
logger.info(
"[federation] Discarding outbox dispatch due to user privacy_level"
)
return
if activity_pass_object_privacy_level(context, routing) is False:
logger.info(
"[federation] Discarding outbox dispatch due to object privacy_level"
)
return
for route, handler in self.routes:
if not match_route(route, routing):
continue
@ -469,7 +397,6 @@ class OutboxRouter(Router):
)
for a in activities:
logger.info(f"[federation] OUtbox sending activity : {a.pk}")
funkwhale_utils.on_commit(tasks.dispatch_outbox.delay, activity_id=a.pk)
return activities
@ -627,6 +554,12 @@ def get_actors_from_audience(urls):
final_query, Q(pk__in=actor_follows.values_list("actor", flat=True))
)
library_follows = models.LibraryFollow.objects.filter(
queries["followed"], approved=True
)
final_query = funkwhale_utils.join_queries_or(
final_query, Q(pk__in=library_follows.values_list("actor", flat=True))
)
if not final_query:
return models.Actor.objects.none()
return models.Actor.objects.filter(final_query)

View File

@ -1,5 +1,4 @@
import datetime
from urllib.parse import urlparse
from django.conf import settings
from django.core import validators
@ -56,6 +55,7 @@ class LibrarySerializer(serializers.ModelSerializer):
"uuid",
"actor",
"name",
"description",
"creation_date",
"uploads_count",
"privacy_level",
@ -97,30 +97,6 @@ class LibraryFollowSerializer(serializers.ModelSerializer):
return federation_serializers.APIActorSerializer(o.actor).data
class FollowSerializer(serializers.ModelSerializer):
target = common_serializers.RelatedField(
"fid", federation_serializers.APIActorSerializer(), required=True
)
actor = serializers.SerializerMethodField()
class Meta:
model = models.Follow
fields = ["creation_date", "actor", "uuid", "target", "approved"]
read_only_fields = ["uuid", "actor", "approved", "creation_date"]
def validate_target(self, v):
request_actor = self.context["actor"]
if v == request_actor:
raise serializers.ValidationError("You cannot follow yourself")
if v.received_follows.filter(actor=request_actor).exists():
raise serializers.ValidationError("You are already following this user")
return v
@extend_schema_field(federation_serializers.APIActorSerializer)
def get_actor(self, o):
return federation_serializers.APIActorSerializer(o.actor).data
def serialize_generic_relation(activity, obj):
data = {"type": obj._meta.label}
if data["type"] == "federation.Actor":
@ -130,11 +106,9 @@ def serialize_generic_relation(activity, obj):
if data["type"] == "music.Library":
data["name"] = obj.name
if (
data["type"] == "federation.LibraryFollow"
or data["type"] == "federation.Follow"
):
if data["type"] == "federation.LibraryFollow":
data["approved"] = obj.approved
return data
@ -204,17 +178,6 @@ FETCH_OBJECT_CONFIG = {
FETCH_OBJECT_FIELD = common_fields.GenericRelation(FETCH_OBJECT_CONFIG)
def convert_url_to_webginfer(url):
parsed_url = urlparse(url)
domain = parsed_url.netloc # e.g., "node1.funkwhale.test"
path_parts = parsed_url.path.strip("/").split("/")
# Ensure the path is in the expected format
if len(path_parts) > 0 and path_parts[0].startswith("@"):
username = path_parts[0][1:] # Remove the '@'
return f"{username}@{domain}"
return None
class FetchSerializer(serializers.ModelSerializer):
actor = federation_serializers.APIActorSerializer(read_only=True)
object = serializers.CharField(write_only=True)
@ -244,10 +207,6 @@ class FetchSerializer(serializers.ModelSerializer):
]
def validate_object(self, value):
if value.startswith("https://"):
converted = convert_url_to_webginfer(value)
if converted:
value = converted
# if value is a webginfer lookup, we craft a special url
if value.startswith("@"):
value = value.lstrip("@")

View File

@ -5,7 +5,6 @@ from . import api_views
router = routers.OptionalSlashRouter()
router.register(r"fetches", api_views.FetchViewSet, "fetches")
router.register(r"follows/library", api_views.LibraryFollowViewSet, "library-follows")
router.register(r"follows/user", api_views.UserFollowViewSet, "user-follows")
router.register(r"inbox", api_views.InboxItemViewSet, "inbox")
router.register(r"libraries", api_views.LibraryViewSet, "libraries")
router.register(r"domains", api_views.DomainViewSet, "domains")

View File

@ -311,106 +311,3 @@ class ActorViewSet(mixins.RetrieveModelMixin, viewsets.GenericViewSet):
filter_uploads=lambda o, uploads: uploads.filter(library__actor=o)
)
)
@extend_schema_view(
list=extend_schema(operation_id="get_federation_received_follows"),
create=extend_schema(operation_id="create_federation_user_follow"),
)
class UserFollowViewSet(
mixins.CreateModelMixin,
mixins.ListModelMixin,
mixins.RetrieveModelMixin,
mixins.DestroyModelMixin,
viewsets.GenericViewSet,
):
lookup_field = "uuid"
queryset = (
models.Follow.objects.all()
.order_by("-creation_date")
.select_related("actor", "target")
.filter(actor__type="Person")
)
serializer_class = api_serializers.FollowSerializer
permission_classes = [oauth_permissions.ScopePermission]
required_scope = "follows"
ordering_fields = ("creation_date",)
@extend_schema(operation_id="get_federation_user_follow")
def retrieve(self, request, *args, **kwargs):
return super().retrieve(request, *args, **kwargs)
@extend_schema(operation_id="delete_federation_user_follow")
def destroy(self, request, uuid=None):
return super().destroy(request, uuid)
def get_queryset(self):
qs = super().get_queryset()
return qs.filter(
Q(target=self.request.user.actor) | Q(actor=self.request.user.actor)
).exclude(approved=False)
def perform_create(self, serializer):
follow = serializer.save(actor=self.request.user.actor)
routes.outbox.dispatch({"type": "Follow"}, context={"follow": follow})
@transaction.atomic
def perform_destroy(self, instance):
routes.outbox.dispatch(
{"type": "Undo", "object": {"type": "Follow"}}, context={"follow": instance}
)
instance.delete()
def get_serializer_context(self):
context = super().get_serializer_context()
context["actor"] = self.request.user.actor
return context
@extend_schema(
operation_id="accept_federation_user_follow",
responses={404: None, 204: None},
)
@decorators.action(methods=["post"], detail=True)
def accept(self, request, *args, **kwargs):
try:
follow = self.queryset.get(
target=self.request.user.actor, uuid=kwargs["uuid"]
)
except models.Follow.DoesNotExist:
return response.Response({}, status=404)
update_follow(follow, approved=True)
return response.Response(status=204)
@extend_schema(operation_id="reject_federation_user_follow")
@decorators.action(methods=["post"], detail=True)
def reject(self, request, *args, **kwargs):
try:
follow = self.queryset.get(
target=self.request.user.actor, uuid=kwargs["uuid"]
)
except models.Follow.DoesNotExist:
return response.Response({}, status=404)
update_follow(follow, approved=False)
return response.Response(status=204)
@extend_schema(operation_id="get_all_federation_library_follows")
@decorators.action(methods=["get"], detail=False)
def all(self, request, *args, **kwargs):
"""
Return all the subscriptions of the current user, with only limited data
to have a performant endpoint and avoid lots of queries just to display
subscription status in the UI
"""
follows = list(
self.get_queryset().values_list("uuid", "target__fid", "approved")
)
payload = {
"results": [
{"uuid": str(u[0]), "actor": str(u[1]), "approved": u[2]}
for u in follows
],
"count": len(follows),
}
return response.Response(payload, status=200)

View File

@ -81,12 +81,11 @@ class SignatureAuthentication(authentication.BaseAuthentication):
fetch_delay = 24 * 3600
now = timezone.now()
last_fetch = actor.domain.nodeinfo_fetch_date
if not actor.domain.is_local:
if not last_fetch or (
last_fetch < (now - datetime.timedelta(seconds=fetch_delay))
):
tasks.update_domain_nodeinfo(domain_name=actor.domain.name)
actor.domain.refresh_from_db()
if not last_fetch or (
last_fetch < (now - datetime.timedelta(seconds=fetch_delay))
):
tasks.update_domain_nodeinfo(domain_name=actor.domain.name)
actor.domain.refresh_from_db()
return actor
def authenticate(self, request):

View File

@ -293,10 +293,7 @@ CONTEXTS = [
"Album": "fw:Album",
"Track": "fw:Track",
"Artist": "fw:Artist",
"ArtistCredit": "fw:ArtistCredit",
"Library": "fw:Library",
"Playlist": "fw:Playlist",
"PlaylistTrack": "fw:PlaylistTrack",
"bitrate": {"@id": "fw:bitrate", "@type": "xsd:nonNegativeInteger"},
"size": {"@id": "fw:size", "@type": "xsd:nonNegativeInteger"},
"position": {"@id": "fw:position", "@type": "xsd:nonNegativeInteger"},
@ -305,23 +302,13 @@ CONTEXTS = [
"track": {"@id": "fw:track", "@type": "@id"},
"cover": {"@id": "fw:cover", "@type": "as:Link"},
"album": {"@id": "fw:album", "@type": "@id"},
"artist": {"@id": "fw:artist", "@type": "@id"},
"artists": {"@id": "fw:artists", "@type": "@id", "@container": "@list"},
"artist_credit": {
"@id": "fw:artist_credit",
"@type": "@id",
"@container": "@list",
},
"joinphrase": {"@id": "fw:joinphrase", "@type": "xsd:string"},
"credit": {"@id": "fw:credit", "@type": "xsd:string"},
"index": {"@id": "fw:index", "@type": "xsd:nonNegativeInteger"},
"released": {"@id": "fw:released", "@type": "xsd:date"},
"musicbrainzId": "fw:musicbrainzId",
"license": {"@id": "fw:license", "@type": "@id"},
"copyright": "fw:copyright",
"category": "schema:category",
"language": "schema:inLanguage",
"playlist": {"@id": "fw:playlist", "@type": "@id"},
}
},
},

View File

@ -128,6 +128,11 @@ class ActorFactory(NoUpdateOnCreate, factory.django.DjangoModelFactory):
class Meta:
model = models.Actor
class Params:
with_real_keys = factory.Trait(
keys=factory.LazyFunction(keys.get_key_pair),
)
@factory.post_generation
def local(self, create, extracted, **kwargs):
if not extracted and not kwargs:
@ -148,26 +153,6 @@ class ActorFactory(NoUpdateOnCreate, factory.django.DjangoModelFactory):
extracted.actor = self
extracted.save(update_fields=["user"])
else:
user = UserFactory(actor=self, **kwargs)
user.actor = self
user.save()
@factory.post_generation
def user(self, create, extracted, **kwargs):
"""
Handle the creation or assignment of the related user instance.
If `actor__user` is passed, it will be linked; otherwise, no user is created.
"""
if not create:
return
if extracted: # If a User instance is provided
extracted.actor = self
extracted.save(update_fields=["actor"])
elif kwargs:
from funkwhale_api.users.factories import UserFactory
# Create a User linked to this Actor
self.user = UserFactory(actor=self, **kwargs)
@ -185,25 +170,22 @@ class FollowFactory(NoUpdateOnCreate, factory.django.DjangoModelFactory):
@registry.register
class MusicLibraryFactory(NoUpdateOnCreate, factory.django.DjangoModelFactory):
uuid = factory.Faker("uuid4")
actor = factory.SubFactory(ActorFactory)
privacy_level = "me"
name = privacy_level
name = factory.Faker("sentence")
description = factory.Faker("sentence")
uploads_count = 0
fid = factory.Faker("federation_url")
followers_url = factory.LazyAttribute(
lambda o: o.fid + "/followers" if o.fid else None
)
class Meta:
model = "music.Library"
class Params:
local = factory.Trait(
fid=factory.Faker(
"federation_url",
local=True,
prefix="federation/music/libraries",
obj_uuid=factory.SelfAttribute("..uuid"),
),
actor=factory.SubFactory(ActorFactory, local=True),
fid=None, actor=factory.SubFactory(ActorFactory, local=True)
)

View File

@ -191,6 +191,7 @@ def prepare_for_serializer(payload, config, fallbacks={}):
value = noop
if not aliases:
continue
for a in aliases:
try:
value = get_value(
@ -278,6 +279,7 @@ class JsonLdSerializer(serializers.Serializer):
for field in dereferenced_fields:
for i in get_ids(data[field]):
dereferenced_ids.add(i)
if dereferenced_ids:
try:
loop = asyncio.get_event_loop()

View File

@ -9,7 +9,7 @@ MODELS = [
(music_models.Album, ["fid"]),
(music_models.Track, ["fid"]),
(music_models.Upload, ["fid"]),
(music_models.Library, ["fid"]),
(music_models.Library, ["fid", "followers_url"]),
(
federation_models.Actor,
[

View File

@ -218,6 +218,7 @@ class Actor(models.Model):
on_delete=models.SET_NULL,
related_name="iconed_actor",
)
objects = ActorQuerySet.as_manager()
class Meta:
@ -250,15 +251,9 @@ class Actor(models.Model):
follows = self.received_follows.filter(approved=True)
return self.followers.filter(pk__in=follows.values_list("actor", flat=True))
def get_approved_followings(self):
follows = self.emitted_follows.filter(approved=True)
return Actor.objects.filter(pk__in=follows.values_list("target", flat=True))
def should_autoapprove_follow(self, actor):
if self.get_channel():
return True
if self.user.privacy_level == "public":
return True
return False
def get_user(self):

View File

@ -3,10 +3,7 @@ import uuid
from django.db.models import Q
from funkwhale_api.favorites import models as favorites_models
from funkwhale_api.history import models as history_models
from funkwhale_api.music import models as music_models
from funkwhale_api.playlists import models as playlist_models
from . import activity, actors, models, serializers
@ -166,7 +163,7 @@ def outbox_follow(context):
def outbox_create_audio(context):
upload = context["upload"]
channel = upload.library.get_channel()
followers_target = channel.actor if channel else upload.library.actor
followers_target = channel.actor if channel else upload.library
actor = channel.actor if channel else upload.library.actor
if channel:
serializer = serializers.ChannelCreateUploadSerializer(upload)
@ -296,7 +293,7 @@ def inbox_delete_audio(payload, context):
upload_fids = [payload["object"]["id"]]
query = Q(fid__in=upload_fids) & (
Q(library__actor=actor) | Q(track__artist_credit__artist__channel__actor=actor)
Q(library__actor=actor) | Q(track__artist__channel__actor=actor)
)
candidates = music_models.Upload.objects.filter(query)
@ -310,8 +307,8 @@ def outbox_delete_audio(context):
uploads = context["uploads"]
library = uploads[0].library
channel = library.get_channel()
followers_target = channel.actor if channel else library
actor = channel.actor if channel else library.actor
followers_target = channel.actor if channel else actor
serializer = serializers.ActivitySerializer(
{
"type": "Delete",
@ -580,9 +577,7 @@ def inbox_delete_album(payload, context):
logger.debug("Discarding deletion of empty library")
return
query = Q(fid=album_id) & (
Q(attributed_to=actor) | Q(artist_credit__artist__channel__actor=actor)
)
query = Q(fid=album_id) & (Q(attributed_to=actor) | Q(artist__channel__actor=actor))
try:
album = music_models.Album.objects.get(query)
except music_models.Album.DoesNotExist:
@ -595,10 +590,9 @@ def inbox_delete_album(payload, context):
@outbox.register({"type": "Delete", "object.type": "Album"})
def outbox_delete_album(context):
album = context["album"]
album_artist = album.artist_credit.all()[0].artist
actor = (
album_artist.channel.actor
if album_artist.get_channel()
album.artist.channel.actor
if album.artist.get_channel()
else album.attributed_to
)
actor = actor or actors.get_service_actor()
@ -614,231 +608,3 @@ def outbox_delete_album(context):
to=[activity.PUBLIC_ADDRESS, {"type": "instances_with_followers"}],
),
}
@outbox.register({"type": "Like", "object.type": "Track"})
def outbox_create_track_favorite(context):
track = context["track"]
actor = context["actor"]
serializer = serializers.ActivitySerializer(
{
"type": "Like",
"id": context["id"],
"object": {"type": "Track", "id": track.fid},
}
)
yield {
"type": "Like",
"actor": actor,
"payload": with_recipients(
serializer.data,
to=[{"type": "followers", "target": actor}],
),
}
@outbox.register({"type": "Dislike", "object.type": "Track"})
def outbox_delete_favorite(context):
favorite = context["favorite"]
actor = favorite.actor
serializer = serializers.ActivitySerializer(
{"type": "Dislike", "object": {"type": "Track", "id": favorite.track.fid}}
)
yield {
"type": "Dislike",
"actor": actor,
"payload": with_recipients(
serializer.data,
to=[{"type": "followers", "target": actor}],
),
}
@inbox.register({"type": "Like", "object.type": "Track"})
def inbox_create_favorite(payload, context):
serializer = serializers.TrackFavoriteSerializer(data=payload)
serializer.is_valid(raise_exception=True)
instance = serializer.save()
return {"object": instance}
@inbox.register({"type": "Dislike", "object.type": "Track"})
def inbox_delete_favorite(payload, context):
actor = context["actor"]
track_id = payload["object"].get("id")
query = Q(track__fid=track_id) & Q(actor=actor)
try:
favorite = favorites_models.TrackFavorite.objects.get(query)
except favorites_models.TrackFavorite.DoesNotExist:
logger.debug(
"Discarding deletion of unkwnown favorite with track : %s", track_id
)
return
favorite.delete()
# to do : test listening routes and broadcast
@outbox.register({"type": "Listen", "object.type": "Track"})
def outbox_create_listening(context):
track = context["track"]
actor = context["actor"]
serializer = serializers.ActivitySerializer(
{
"type": "Listen",
"id": context["id"],
"object": {"type": "Track", "id": track.fid},
}
)
yield {
"type": "Listen",
"actor": actor,
"payload": with_recipients(
serializer.data,
to=[{"type": "followers", "target": actor}],
),
}
@outbox.register({"type": "Delete", "object.type": "Listen"})
def outbox_delete_listening(context):
listening = context["listening"]
actor = listening.actor
serializer = serializers.ActivitySerializer(
{"type": "Delete", "object": {"type": "Listen", "id": listening.fid}}
)
yield {
"type": "Delete",
"actor": actor,
"payload": with_recipients(
serializer.data,
to=[{"type": "followers", "target": actor}],
),
}
@inbox.register({"type": "Listen", "object.type": "Track"})
def inbox_create_listening(payload, context):
serializer = serializers.ListeningSerializer(data=payload)
serializer.is_valid(raise_exception=True)
instance = serializer.save()
return {"object": instance}
@inbox.register({"type": "Delete", "object.type": "Listen"})
def inbox_delete_listening(payload, context):
actor = context["actor"]
listening_id = payload["object"].get("id")
query = Q(fid=listening_id) & Q(actor=actor)
try:
favorite = history_models.Listening.objects.get(query)
except history_models.Listening.DoesNotExist:
logger.debug("Discarding deletion of unkwnown listening %s", listening_id)
return
favorite.delete()
@outbox.register({"type": "Create", "object.type": "Playlist"})
def outbox_create_playlist(context):
playlist = context["playlist"]
serializer = serializers.ActivitySerializer(
{
"type": "Create",
"actor": playlist.actor,
"id": playlist.fid,
"object": serializers.PlaylistSerializer(playlist).data,
}
)
yield {
"type": "Create",
"actor": playlist.actor,
"payload": with_recipients(
serializer.data,
to=[{"type": "followers", "target": playlist.actor}],
),
}
@outbox.register({"type": "Delete", "object.type": "Playlist"})
def outbox_delete_playlist(context):
playlist = context["playlist"]
actor = playlist.actor
serializer = serializers.ActivitySerializer(
{"type": "Delete", "object": {"type": "Playlist", "id": playlist.fid}}
)
yield {
"type": "Delete",
"actor": actor,
"payload": with_recipients(
serializer.data,
to=[activity.PUBLIC_ADDRESS, {"type": "instances_with_followers"}],
),
}
@inbox.register({"type": "Create", "object.type": "Playlist"})
def inbox_create_playlist(payload, context):
serializer = serializers.PlaylistSerializer(data=payload["object"])
serializer.is_valid(raise_exception=True)
instance = serializer.save()
return {"object": instance}
@inbox.register({"type": "Delete", "object.type": "Playlist"})
def inbox_delete_playlist(payload, context):
actor = context["actor"]
playlist_id = payload["object"].get("id")
query = Q(fid=playlist_id) & Q(actor=actor)
try:
playlist = playlist_models.Playlist.objects.get(query)
except playlist_models.Playlist.DoesNotExist:
logger.debug("Discarding deletion of unkwnown listening %s", playlist_id)
return
playlist.playlist_tracks.all().delete()
playlist.delete()
@inbox.register({"type": "Update", "object.type": "Playlist"})
def inbox_update_playlist(payload, context):
actor = context["actor"]
playlist_id = payload["object"].get("id")
if not actor.playlists.filter(fid=playlist_id).exists():
logger.debug("Discarding update of unkwnown playlist_id %s", playlist_id)
return
serializer = serializers.PlaylistSerializer(data=payload["object"])
if serializer.is_valid(raise_exception=True):
playlist = serializer.save()
# we trigger a scan since we use this activity to avoid sending many PlaylistTracks activities
playlist.schedule_scan(actors.get_service_actor())
return
else:
logger.debug(
"Discarding update of playlist_id %s because of payload errors: %s",
playlist_id,
serializer.errors,
)
@outbox.register({"type": "Update", "object.type": "Playlist"})
def outbox_update_playlist(context):
playlist = context["playlist"]
serializer = serializers.ActivitySerializer(
{"type": "Update", "object": serializers.PlaylistSerializer(playlist).data}
)
yield {
"type": "Update",
"actor": playlist.actor,
"payload": with_recipients(
serializer.data,
to=[{"type": "followers", "target": playlist.actor}],
),
}

View File

@ -1,31 +1,27 @@
import logging
import os
import re
import urllib.parse
import uuid
from django.core.exceptions import ObjectDoesNotExist
from django.core.paginator import Paginator
from django.db import transaction
from django.db.models import Q
from django.urls import reverse
from django.utils import timezone
from rest_framework import serializers
from funkwhale_api.common import models as common_models
from funkwhale_api.common import utils as common_utils
from funkwhale_api.favorites import models as favorites_models
from funkwhale_api.federation import activity, actors, contexts, jsonld, models, utils
from funkwhale_api.history import models as history_models
from funkwhale_api.moderation import models as moderation_models
from funkwhale_api.moderation import serializers as moderation_serializers
from funkwhale_api.moderation import signals as moderation_signals
from funkwhale_api.music import licenses
from funkwhale_api.music import models as music_models
from funkwhale_api.music import tasks as music_tasks
from funkwhale_api.playlists import models as playlists_models
from funkwhale_api.tags import models as tags_models
from . import activity, actors, contexts, jsonld, models, utils
logger = logging.getLogger(__name__)
@ -344,11 +340,9 @@ class ActorSerializer(jsonld.JsonLdSerializer):
ret["url"] = [
{
"type": "Link",
"href": (
instance.channel.get_absolute_url()
if instance.channel.artist.is_local
else instance.get_absolute_url()
),
"href": instance.channel.get_absolute_url()
if instance.channel.artist.is_local
else instance.get_absolute_url(),
"mediaType": "text/html",
},
{
@ -442,11 +436,9 @@ class ActorSerializer(jsonld.JsonLdSerializer):
common_utils.attach_file(
actor,
"attachment_icon",
(
{"url": new_value["url"], "mimetype": new_value.get("mediaType")}
if new_value
else None
),
{"url": new_value["url"], "mimetype": new_value.get("mediaType")}
if new_value
else None,
)
rss_url = get_by_media_type(
@ -499,11 +491,9 @@ def create_or_update_channel(actor, rss_url, attributed_to_fid, **validated_data
common_utils.attach_file(
artist,
"attachment_cover",
(
{"url": new_value["url"], "mimetype": new_value.get("mediaType")}
if new_value
else None
),
{"url": new_value["url"], "mimetype": new_value.get("mediaType")}
if new_value
else None,
)
tags = [t["name"] for t in validated_data.get("tags", []) or []]
tags_models.set_tags(artist, *tags)
@ -654,6 +644,7 @@ class FollowSerializer(serializers.Serializer):
def save(self, **kwargs):
target = self.validated_data["object"]
if target._meta.label == "music.Library":
follow_class = models.LibraryFollow
else:
@ -821,9 +812,7 @@ class UndoFollowSerializer(serializers.Serializer):
actor=validated_data["actor"], target=target
).get()
except follow_class.DoesNotExist:
raise serializers.ValidationError(
f"No follow to remove follow_class = {follow_class}"
)
raise serializers.ValidationError("No follow to remove")
return validated_data
def to_representation(self, instance):
@ -890,6 +879,7 @@ class ActivitySerializer(serializers.Serializer):
object_serializer = OBJECT_SERIALIZERS[type]
except KeyError:
raise serializers.ValidationError(f"Unsupported type {type}")
serializer = object_serializer(data=value)
serializer.is_valid(raise_exception=True)
return serializer.data
@ -974,7 +964,7 @@ class PaginatedCollectionSerializer(jsonld.JsonLdSerializer):
first = common_utils.set_query_parameter(conf["id"], page=1)
current = first
last = common_utils.set_query_parameter(conf["id"], page=paginator.num_pages)
data = {
d = {
"id": conf["id"],
"attributedTo": conf["actor"].fid,
"totalItems": paginator.count,
@ -983,10 +973,10 @@ class PaginatedCollectionSerializer(jsonld.JsonLdSerializer):
"first": first,
"last": last,
}
data.update(get_additional_fields(conf))
d.update(get_additional_fields(conf))
if self.context.get("include_ap_context", True):
data["@context"] = jsonld.get_default_context()
return data
d["@context"] = jsonld.get_default_context()
return d
class LibrarySerializer(PaginatedCollectionSerializer):
@ -996,6 +986,8 @@ class LibrarySerializer(PaginatedCollectionSerializer):
actor = serializers.URLField(max_length=500, required=False)
attributedTo = serializers.URLField(max_length=500, required=False)
name = serializers.CharField()
summary = serializers.CharField(allow_blank=True, allow_null=True, required=False)
followers = serializers.URLField(max_length=500)
audience = serializers.ChoiceField(
choices=["", "./", None, "https://www.w3.org/ns/activitystreams#Public"],
required=False,
@ -1012,7 +1004,9 @@ class LibrarySerializer(PaginatedCollectionSerializer):
PAGINATED_COLLECTION_JSONLD_MAPPING,
{
"name": jsonld.first_val(contexts.AS.name),
"summary": jsonld.first_val(contexts.AS.summary),
"audience": jsonld.first_id(contexts.AS.audience),
"followers": jsonld.first_id(contexts.AS.followers),
"actor": jsonld.first_id(contexts.AS.actor),
"attributedTo": jsonld.first_id(contexts.AS.attributedTo),
},
@ -1034,6 +1028,7 @@ class LibrarySerializer(PaginatedCollectionSerializer):
conf = {
"id": library.fid,
"name": library.name,
"summary": library.description,
"page_size": 100,
"attributedTo": library.actor,
"actor": library.actor,
@ -1044,6 +1039,7 @@ class LibrarySerializer(PaginatedCollectionSerializer):
r["audience"] = (
contexts.AS.Public if library.privacy_level == "everyone" else ""
)
r["followers"] = library.followers_url
return r
def create(self, validated_data):
@ -1063,6 +1059,8 @@ class LibrarySerializer(PaginatedCollectionSerializer):
defaults={
"uploads_count": validated_data["totalItems"],
"name": validated_data["name"],
"description": validated_data.get("summary"),
"followers_url": validated_data["followers"],
"privacy_level": privacy[validated_data["audience"]],
},
)
@ -1223,22 +1221,12 @@ class MusicEntitySerializer(jsonld.JsonLdSerializer):
self.updateable_fields, validated_data, instance
)
updated_fields = self.validate_updated_data(instance, updated_fields)
set_ac = False
if "artist_credit" in updated_fields:
artist_credit = updated_fields.pop("artist_credit")
set_ac = True
if creating:
instance, created = self.Meta.model.objects.get_or_create(
fid=validated_data["id"], defaults=updated_fields
)
if set_ac:
instance.artist_credit.set(artist_credit)
else:
obj = music_tasks.update_library_entity(instance, updated_fields)
if set_ac:
obj.artist_credit.set(artist_credit)
music_tasks.update_library_entity(instance, updated_fields)
tags = [t["name"] for t in validated_data.get("tags", []) or []]
tags_models.set_tags(instance, *tags)
@ -1300,6 +1288,7 @@ class ArtistSerializer(MusicEntitySerializer):
MUSIC_ENTITY_JSONLD_MAPPING,
{
"released": jsonld.first_val(contexts.FW.released),
"artists": jsonld.first_attr(contexts.FW.artists, "@list"),
"image": jsonld.first_obj(contexts.AS.image),
},
)
@ -1311,9 +1300,9 @@ class ArtistSerializer(MusicEntitySerializer):
"name": instance.name,
"published": instance.creation_date.isoformat(),
"musicbrainzId": str(instance.mbid) if instance.mbid else None,
"attributedTo": (
instance.attributed_to.fid if instance.attributed_to else None
),
"attributedTo": instance.attributed_to.fid
if instance.attributed_to
else None,
"tag": self.get_tags_repr(instance),
}
include_content(d, instance.description)
@ -1325,53 +1314,12 @@ class ArtistSerializer(MusicEntitySerializer):
create = MusicEntitySerializer.update_or_create
class ArtistCreditSerializer(jsonld.JsonLdSerializer):
artist = ArtistSerializer()
joinphrase = serializers.CharField(
trim_whitespace=False, required=False, allow_null=True, allow_blank=True
)
credit = serializers.CharField(
trim_whitespace=False, required=False, allow_null=True, allow_blank=True
)
published = serializers.DateTimeField()
id = serializers.URLField(max_length=500)
updateable_fields = [
("credit", "credit"),
("artist", "artist"),
("joinphrase", "joinphrase"),
]
class Meta:
model = music_models.ArtistCredit
jsonld_mapping = {
"artist": jsonld.first_obj(contexts.FW.artist),
"credit": jsonld.first_val(contexts.FW.credit),
"index": jsonld.first_val(contexts.FW.index),
"joinphrase": jsonld.first_val(contexts.FW.joinphrase),
"published": jsonld.first_val(contexts.AS.published),
}
def to_representation(self, instance):
data = {
"type": "ArtistCredit",
"id": instance.fid,
"artist": ArtistSerializer(
instance.artist, context={"include_ap_context": False}
).data,
"joinphrase": instance.joinphrase,
"credit": instance.credit,
"index": instance.index,
"published": instance.creation_date.isoformat(),
}
if self.context.get("include_ap_context", self.parent is None):
data["@context"] = jsonld.get_default_context()
return data
class AlbumSerializer(MusicEntitySerializer):
released = serializers.DateField(allow_null=True, required=False)
artist_credit = serializers.ListField(child=ArtistCreditSerializer(), min_length=1)
artists = serializers.ListField(
child=MultipleSerializer(allowed=[BasicActorSerializer, ArtistSerializer]),
min_length=1,
)
image = ImageSerializer(
allowed_mimetypes=["image/*"],
allow_null=True,
@ -1384,7 +1332,7 @@ class AlbumSerializer(MusicEntitySerializer):
("musicbrainzId", "mbid"),
("attributedTo", "attributed_to"),
("released", "release_date"),
("artist_credit", "artist_credit"),
("_artist", "artist"),
]
class Meta:
@ -1393,60 +1341,62 @@ class AlbumSerializer(MusicEntitySerializer):
MUSIC_ENTITY_JSONLD_MAPPING,
{
"released": jsonld.first_val(contexts.FW.released),
"artist_credit": jsonld.first_attr(contexts.FW.artist_credit, "@list"),
"artists": jsonld.first_attr(contexts.FW.artists, "@list"),
"image": jsonld.first_obj(contexts.AS.image),
},
)
def to_representation(self, instance):
data = {
d = {
"type": "Album",
"id": instance.fid,
"name": instance.title,
"published": instance.creation_date.isoformat(),
"musicbrainzId": str(instance.mbid) if instance.mbid else None,
"released": (
instance.release_date.isoformat() if instance.release_date else None
),
"attributedTo": (
instance.attributed_to.fid if instance.attributed_to else None
),
"released": instance.release_date.isoformat()
if instance.release_date
else None,
"attributedTo": instance.attributed_to.fid
if instance.attributed_to
else None,
"tag": self.get_tags_repr(instance),
}
data["artist_credit"] = ArtistCreditSerializer(
instance.artist_credit.all(),
context={"include_ap_context": False},
many=True,
).data
include_content(data, instance.description)
if instance.artist.get_channel():
d["artists"] = [
{
"type": instance.artist.channel.actor.type,
"id": instance.artist.channel.actor.fid,
}
]
else:
d["artists"] = [
ArtistSerializer(
instance.artist, context={"include_ap_context": False}
).data
]
include_content(d, instance.description)
if instance.attachment_cover:
include_image(data, instance.attachment_cover)
include_image(d, instance.attachment_cover)
if self.context.get("include_ap_context", self.parent is None):
data["@context"] = jsonld.get_default_context()
return data
d["@context"] = jsonld.get_default_context()
return d
def validate(self, data):
validated_data = super().validate(data)
if not self.parent:
artist_credit_data = validated_data["artist_credit"]
if artist_credit_data[0]["artist"].get("type", "Artist") == "Artist":
acs = []
for ac in validated_data["artist_credit"]:
acs.append(
utils.retrieve_ap_object(
ac["id"],
actor=self.context.get("fetch_actor"),
queryset=music_models.ArtistCredit,
serializer_class=ArtistCreditSerializer,
)
)
validated_data["artist_credit"] = acs
artist_data = validated_data["artists"][0]
if artist_data.get("type", "Artist") == "Artist":
validated_data["_artist"] = utils.retrieve_ap_object(
artist_data["id"],
actor=self.context.get("fetch_actor"),
queryset=music_models.Artist,
serializer_class=ArtistSerializer,
)
else:
# we have an actor as an artist, so it's a channel
actor = actors.get_actor(artist_credit_data[0]["artist"]["id"])
validated_data["artist_credit"] = [{"artist": actor.channel.artist}]
actor = actors.get_actor(artist_data["id"])
validated_data["_artist"] = actor.channel.artist
return validated_data
@ -1456,7 +1406,7 @@ class AlbumSerializer(MusicEntitySerializer):
class TrackSerializer(MusicEntitySerializer):
position = serializers.IntegerField(min_value=0, allow_null=True, required=False)
disc = serializers.IntegerField(min_value=1, allow_null=True, required=False)
artist_credit = serializers.ListField(child=ArtistCreditSerializer(), min_length=1)
artists = serializers.ListField(child=ArtistSerializer(), min_length=1)
album = AlbumSerializer()
license = serializers.URLField(allow_null=True, required=False)
copyright = serializers.CharField(allow_null=True, required=False)
@ -1484,7 +1434,7 @@ class TrackSerializer(MusicEntitySerializer):
MUSIC_ENTITY_JSONLD_MAPPING,
{
"album": jsonld.first_obj(contexts.FW.album),
"artist_credit": jsonld.first_attr(contexts.FW.artist_credit, "@list"),
"artists": jsonld.first_attr(contexts.FW.artists, "@list"),
"copyright": jsonld.first_val(contexts.FW.copyright),
"disc": jsonld.first_val(contexts.FW.disc),
"license": jsonld.first_id(contexts.FW.license),
@ -1494,7 +1444,7 @@ class TrackSerializer(MusicEntitySerializer):
)
def to_representation(self, instance):
data = {
d = {
"type": "Track",
"id": instance.fid,
"name": instance.title,
@ -1502,32 +1452,29 @@ class TrackSerializer(MusicEntitySerializer):
"musicbrainzId": str(instance.mbid) if instance.mbid else None,
"position": instance.position,
"disc": instance.disc_number,
"license": (
instance.local_license["identifiers"][0]
if instance.local_license
else None
),
"license": instance.local_license["identifiers"][0]
if instance.local_license
else None,
"copyright": instance.copyright if instance.copyright else None,
"artist_credit": ArtistCreditSerializer(
instance.artist_credit.all(),
context={"include_ap_context": False},
many=True,
).data,
"artists": [
ArtistSerializer(
instance.artist, context={"include_ap_context": False}
).data
],
"album": AlbumSerializer(
instance.album, context={"include_ap_context": False}
).data,
"attributedTo": (
instance.attributed_to.fid if instance.attributed_to else None
),
"attributedTo": instance.attributed_to.fid
if instance.attributed_to
else None,
"tag": self.get_tags_repr(instance),
}
include_content(data, instance.description)
include_image(data, instance.attachment_cover)
include_content(d, instance.description)
include_image(d, instance.attachment_cover)
if self.context.get("include_ap_context", self.parent is None):
data["@context"] = jsonld.get_default_context()
return data
d["@context"] = jsonld.get_default_context()
return d
@transaction.atomic
def create(self, validated_data):
from funkwhale_api.music import tasks as music_tasks
@ -1543,21 +1490,18 @@ class TrackSerializer(MusicEntitySerializer):
validated_data, "album.attributedTo", permissive=True
)
)
artist_credit = (
artists = (
common_utils.recursive_getattr(validated_data, "artists", permissive=True)
or []
)
album_artists = (
common_utils.recursive_getattr(
validated_data, "artist_credit", permissive=True
validated_data, "album.artists", permissive=True
)
or []
)
album_artists_credit = (
common_utils.recursive_getattr(
validated_data, "album.artist_credit", permissive=True
)
or []
)
for ac in artist_credit + album_artists_credit:
actors_to_fetch.add(ac["artist"].get("attributedTo"))
for artist in artists + album_artists:
actors_to_fetch.add(artist.get("attributedTo"))
for url in actors_to_fetch:
if not url:
@ -1570,9 +1514,8 @@ class TrackSerializer(MusicEntitySerializer):
from_activity = self.context.get("activity")
if from_activity:
metadata["from_activity_id"] = from_activity.pk
track = music_tasks.get_track_from_import_metadata(
metadata, update_cover=True, query_mb=False
)
track = music_tasks.get_track_from_import_metadata(metadata, update_cover=True)
return track
def update(self, obj, validated_data):
@ -1581,50 +1524,6 @@ class TrackSerializer(MusicEntitySerializer):
return super().update(obj, validated_data)
def duration_int_to_xml(duration):
if not duration:
return None
multipliers = {"S": 1, "M": 60, "H": 3600, "D": 86400}
ret = "P"
days, seconds = divmod(int(duration), multipliers["D"])
ret += f"{days:d}DT" if days > 0 else "T"
hours, seconds = divmod(seconds, multipliers["H"])
ret += f"{hours:d}H" if hours > 0 else ""
minutes, seconds = divmod(seconds, multipliers["M"])
ret += f"{minutes:d}M" if minutes > 0 else ""
ret += f"{seconds:d}S" if seconds > 0 or ret == "PT" else ""
return ret
class DayTimeDurationSerializer(serializers.DurationField):
multipliers = {"S": 1, "M": 60, "H": 3600, "D": 86400}
def to_internal_value(self, value):
if isinstance(value, float):
return value
parsed = re.match(
r"P([0-9]+D)?T([0-9]+H)?([0-9]+M)?([0-9]+(?:\.[0-9]+)?S)?", str(value)
)
if parsed is not None:
return int(
sum(
[
self.multipliers[s[-1]] * float("0" + s[:-1])
for s in parsed.groups()
if s is not None
]
)
)
self.fail(
"invalid", format="https://www.w3.org/TR/xmlschema11-2/#dayTimeDuration"
)
def to_representation(self, value):
duration_int_to_xml(value)
class UploadSerializer(jsonld.JsonLdSerializer):
type = serializers.ChoiceField(choices=[contexts.AS.Audio])
id = serializers.URLField(max_length=500)
@ -1634,7 +1533,7 @@ class UploadSerializer(jsonld.JsonLdSerializer):
updated = serializers.DateTimeField(required=False, allow_null=True)
bitrate = serializers.IntegerField(min_value=0)
size = serializers.IntegerField(min_value=0)
duration = DayTimeDurationSerializer(min_value=0)
duration = serializers.IntegerField(min_value=0)
track = TrackSerializer(required=True)
@ -1746,7 +1645,7 @@ class UploadSerializer(jsonld.JsonLdSerializer):
"published": instance.creation_date.isoformat(),
"bitrate": instance.bitrate,
"size": instance.size,
"duration": duration_int_to_xml(instance.duration),
"duration": instance.duration,
"url": [
{
"href": utils.full_url(instance.listen_url_no_download),
@ -1760,11 +1659,9 @@ class UploadSerializer(jsonld.JsonLdSerializer):
},
],
"track": TrackSerializer(track, context={"include_ap_context": False}).data,
"to": (
contexts.AS.Public
if instance.library.privacy_level == "everyone"
else ""
),
"to": contexts.AS.Public
if instance.library.privacy_level == "everyone"
else "",
"attributedTo": instance.library.actor.fid,
}
if instance.modification_date:
@ -1883,7 +1780,7 @@ class ChannelOutboxSerializer(PaginatedCollectionSerializer):
"actor": channel.actor,
"items": channel.library.uploads.for_federation()
.order_by("-creation_date")
.filter(track__artist_credit__artist=channel.artist),
.filter(track__artist=channel.artist),
"type": "OrderedCollection",
}
r = super().to_representation(conf)
@ -1896,7 +1793,7 @@ class ChannelUploadSerializer(jsonld.JsonLdSerializer):
url = LinkListSerializer(keep_mediatype=["audio/*"], min_length=1)
name = serializers.CharField()
published = serializers.DateTimeField(required=False)
duration = DayTimeDurationSerializer(required=False)
duration = serializers.IntegerField(min_value=0, required=False)
position = serializers.IntegerField(min_value=0, allow_null=True, required=False)
disc = serializers.IntegerField(min_value=1, allow_null=True, required=False)
album = serializers.URLField(max_length=500, required=False)
@ -1953,7 +1850,7 @@ class ChannelUploadSerializer(jsonld.JsonLdSerializer):
actor=actors.get_service_actor(),
serializer_class=AlbumSerializer,
queryset=music_models.Album.objects.filter(
artist_credit__artist__channel=self.context["channel"]
artist__channel=self.context["channel"]
),
)
@ -1984,9 +1881,9 @@ class ChannelUploadSerializer(jsonld.JsonLdSerializer):
"name": upload.track.title,
"attributedTo": upload.library.channel.actor.fid,
"published": upload.creation_date.isoformat(),
"to": (
contexts.AS.Public if upload.library.privacy_level == "everyone" else ""
),
"to": contexts.AS.Public
if upload.library.privacy_level == "everyone"
else "",
"url": [
{
"type": "Link",
@ -2005,7 +1902,7 @@ class ChannelUploadSerializer(jsonld.JsonLdSerializer):
if upload.track.local_license:
data["license"] = upload.track.local_license["identifiers"][0]
include_if_not_none(data, duration_int_to_xml(upload.duration), "duration")
include_if_not_none(data, upload.duration, "duration")
include_if_not_none(data, upload.track.position, "position")
include_if_not_none(data, upload.track.disc_number, "disc")
include_if_not_none(data, upload.track.copyright, "copyright")
@ -2032,6 +1929,7 @@ class ChannelUploadSerializer(jsonld.JsonLdSerializer):
now = timezone.now()
track_defaults = {
"fid": validated_data["id"],
"artist": channel.artist,
"position": validated_data.get("position", 1),
"disc_number": validated_data.get("disc", 1),
"title": validated_data["name"],
@ -2044,42 +1942,17 @@ class ChannelUploadSerializer(jsonld.JsonLdSerializer):
track_defaults["license"] = licenses.match(validated_data["license"])
track, created = music_models.Track.objects.update_or_create(
fid=validated_data["id"],
defaults=track_defaults,
artist__channel=channel, fid=validated_data["id"], defaults=track_defaults
)
# only one artist_credit per channel
query = (
Q(
artist=channel.artist,
)
& Q(credit__iexact=channel.artist.name)
& Q(joinphrase="")
)
defaults = {
"artist": channel.artist,
"joinphrase": "",
"credit": channel.artist.name,
}
ac_obj = music_tasks.get_best_candidate_or_create(
music_models.ArtistCredit,
query,
defaults=defaults,
sort_fields=["mbid", "fid"],
)
track.artist_credit.set([ac_obj[0].id])
if "image" in validated_data:
new_value = self.validated_data["image"]
common_utils.attach_file(
track,
"attachment_cover",
(
{"url": new_value["url"], "mimetype": new_value.get("mediaType")}
if new_value
else None
),
{"url": new_value["url"], "mimetype": new_value.get("mediaType")}
if new_value
else None,
)
common_utils.attach_content(
@ -2203,254 +2076,3 @@ class IndexSerializer(jsonld.JsonLdSerializer):
if self.context.get("include_ap_context", True):
d["@context"] = jsonld.get_default_context()
return d
class TrackFavoriteSerializer(jsonld.JsonLdSerializer):
type = serializers.ChoiceField(choices=[contexts.AS.Like])
id = serializers.URLField(max_length=500)
object = serializers.URLField(max_length=500)
actor = serializers.URLField(max_length=500)
class Meta:
jsonld_mapping = {
"object": jsonld.first_id(contexts.AS.object),
"actor": jsonld.first_id(contexts.AS.actor),
}
def to_representation(self, favorite):
payload = {
"type": "Like",
"id": favorite.fid,
"actor": favorite.actor.fid,
"object": favorite.track.fid,
}
if self.context.get("include_ap_context", True):
payload["@context"] = jsonld.get_default_context()
return payload
def create(self, validated_data):
actor = actors.get_actor(validated_data["actor"])
track = utils.retrieve_ap_object(
validated_data["object"],
actor=actors.get_service_actor(),
serializer_class=TrackSerializer,
)
return favorites_models.TrackFavorite.objects.create(
fid=validated_data.get("id"),
uuid=uuid.uuid4(),
actor=actor,
track=track,
)
class ListeningSerializer(jsonld.JsonLdSerializer):
type = serializers.ChoiceField(choices=[contexts.AS.Listen])
id = serializers.URLField(max_length=500)
object = serializers.URLField(max_length=500)
actor = serializers.URLField(max_length=500)
class Meta:
jsonld_mapping = {
"object": jsonld.first_id(contexts.AS.object),
"actor": jsonld.first_id(contexts.AS.actor),
}
def to_representation(self, listening):
payload = {
"type": "Listen",
"id": listening.fid,
"actor": listening.actor.fid,
"object": listening.track.fid,
}
if self.context.get("include_ap_context", True):
payload["@context"] = jsonld.get_default_context()
return payload
def create(self, validated_data):
actor = actors.get_actor(validated_data["actor"])
track = utils.retrieve_ap_object(
validated_data["object"],
actor=actors.get_service_actor(),
serializer_class=TrackSerializer,
)
return history_models.Listening.objects.create(
fid=validated_data.get("id"),
uuid=validated_data["id"].rstrip("/").split("/")[-1],
actor=actor,
track=track,
)
class PlaylistTrackSerializer(jsonld.JsonLdSerializer):
type = serializers.ChoiceField(choices=[contexts.FW.PlaylistTrack])
id = serializers.URLField(max_length=500)
track = serializers.URLField(max_length=500)
index = serializers.IntegerField()
creation_date = serializers.DateTimeField()
playlist = serializers.URLField(max_length=500, required=False)
class Meta:
model = playlists_models.PlaylistTrack
jsonld_mapping = {
"track": jsonld.first_id(contexts.FW.track),
"playlist": jsonld.first_id(contexts.FW.playlist),
"index": jsonld.first_val(contexts.FW.index),
"creation_date": jsonld.first_val(contexts.AS.published),
}
def to_representation(self, plt):
payload = {
"type": "PlaylistTrack",
"id": plt.fid,
"track": plt.track.fid,
"index": plt.index,
"attributedTo": plt.playlist.actor.fid,
"published": plt.creation_date.isoformat(),
}
if self.context.get("include_ap_context", True):
payload["@context"] = jsonld.get_default_context()
if self.context.get("include_playlist", True):
payload["playlist"] = plt.playlist.fid
return payload
def create(self, validated_data):
track = utils.retrieve_ap_object(
validated_data["track"],
actor=self.context.get("fetch_actor"),
queryset=music_models.Track,
serializer_class=TrackSerializer,
)
playlist = utils.retrieve_ap_object(
validated_data["playlist"],
actor=self.context.get("fetch_actor"),
queryset=playlists_models.Playlist,
serializer_class=PlaylistTrackSerializer,
)
defaults = {
"track": track,
"index": validated_data["index"],
"creation_date": validated_data["creation_date"],
"playlist": playlist,
}
plt, created = playlists_models.PlaylistTrack.objects.update_or_create(
defaults,
**{
"uuid": validated_data["id"].rstrip("/").split("/")[-1],
"fid": validated_data["id"],
},
)
return plt
class PlaylistSerializer(jsonld.JsonLdSerializer):
"""
Used for playlist activities
"""
type = serializers.ChoiceField(choices=[contexts.FW.Playlist, contexts.AS.Create])
id = serializers.URLField(max_length=500)
uuid = serializers.UUIDField(required=False)
name = serializers.CharField(required=False)
attributedTo = serializers.URLField(max_length=500, required=False)
published = serializers.DateTimeField(required=False)
updated = serializers.DateTimeField(required=False)
audience = serializers.ChoiceField(
choices=[None, "https://www.w3.org/ns/activitystreams#Public"],
required=False,
allow_null=True,
allow_blank=True,
)
updateable_fields = [
("name", "title"),
("attributedTo", "attributed_to"),
]
class Meta:
model = playlists_models.Playlist
jsonld_mapping = common_utils.concat_dicts(
MUSIC_ENTITY_JSONLD_MAPPING,
{
"updated": jsonld.first_val(contexts.AS.published),
"audience": jsonld.first_id(contexts.AS.audience),
"attributedTo": jsonld.first_id(contexts.AS.attributedTo),
},
)
def to_representation(self, playlist):
payload = {
"type": "Playlist",
"id": playlist.fid,
"name": playlist.name,
"attributedTo": playlist.actor.fid,
"published": playlist.creation_date.isoformat(),
"audience": playlist.privacy_level,
}
payload["audience"] = (
contexts.AS.Public if playlist.privacy_level == "everyone" else ""
)
if playlist.modification_date:
payload["updated"] = playlist.modification_date.isoformat()
if self.context.get("include_ap_context", True):
payload["@context"] = jsonld.get_default_context()
return payload
def create(self, validated_data):
actor = utils.retrieve_ap_object(
validated_data["attributedTo"],
actor=self.context.get("fetch_actor"),
queryset=models.Actor,
serializer_class=ActorSerializer,
)
ap_to_fw_data = {
"actor": actor,
"name": validated_data["name"],
"creation_date": validated_data["published"],
"privacy_level": validated_data["audience"],
}
playlist, created = playlists_models.Playlist.objects.update_or_create(
defaults=ap_to_fw_data,
**{
"fid": validated_data["id"],
"uuid": validated_data.get(
"uuid", validated_data["id"].rstrip("/").split("/")[-1]
),
},
)
return playlist
def validate(self, data):
validated_data = super().validate(data)
if validated_data["audience"] not in [
"https://www.w3.org/ns/activitystreams#Public",
"everyone",
]:
raise serializers.ValidationError("Privacy_level must be everyone")
validated_data["audience"] = "everyone"
return validated_data
class PlaylistCollectionSerializer(PaginatedCollectionSerializer):
"""
Used for the federation view.
"""
type = serializers.ChoiceField(choices=[contexts.FW.Playlist])
def to_representation(self, playlist):
conf = {
"id": playlist.fid,
"name": playlist.name,
"page_size": 100,
"actor": playlist.actor,
"items": playlist.playlist_tracks.order_by("index").prefetch_related(
"tracks",
),
"type": "Playlist",
}
r = super().to_representation(conf)
return r

View File

@ -30,7 +30,7 @@ def verify_date(raw_date):
ts = parse_http_date(raw_date)
except ValueError as e:
raise forms.ValidationError(str(e))
dt = datetime.datetime.fromtimestamp(ts, datetime.timezone.utc)
dt = datetime.datetime.utcfromtimestamp(ts)
dt = dt.replace(tzinfo=ZoneInfo("UTC"))
delta = datetime.timedelta(seconds=DATE_HEADER_VALID_FOR)
now = timezone.now()

View File

@ -5,7 +5,6 @@ import os
import requests
from django.conf import settings
from django.core.cache import cache
from django.db import transaction
from django.db.models import F, Q
from django.db.models.deletion import Collector
@ -19,7 +18,6 @@ from funkwhale_api.common import preferences, session
from funkwhale_api.common import utils as common_utils
from funkwhale_api.moderation import mrf
from funkwhale_api.music import models as music_models
from funkwhale_api.playlists import models as playlists_models
from funkwhale_api.taskapp import celery
from . import (
@ -667,14 +665,3 @@ def check_single_remote_instance_availability(domain):
domain.reachable = False
domain.save()
return domain.reachable
@celery.app.task(name="federation.trigger_playlist_ap_update")
def trigger_playlist_ap_update(playlist):
for playlist_uuid in cache.get("playlists_for_ap_update"):
routes.outbox.dispatch(
{"type": "Update", "object": {"type": "Playlist"}},
context={
"playlist": playlists_models.Playlist.objects.get(uuid=playlist_uuid)
},
)

View File

@ -1,5 +1,4 @@
from django.conf.urls import include
from django.urls import re_path
from django.conf.urls import include, url
from rest_framework import routers
from . import views
@ -17,18 +16,13 @@ router.register(r".well-known", views.WellKnownViewSet, "well-known")
music_router.register(r"libraries", views.MusicLibraryViewSet, "libraries")
music_router.register(r"uploads", views.MusicUploadViewSet, "uploads")
music_router.register(r"artists", views.MusicArtistViewSet, "artists")
music_router.register(r"artistcredit", views.MusicArtistCreditViewSet, "artistcredit")
music_router.register(r"albums", views.MusicAlbumViewSet, "albums")
music_router.register(r"tracks", views.MusicTrackViewSet, "tracks")
music_router.register(r"likes", views.TrackFavoriteViewSet, "likes")
music_router.register(r"listenings", views.ListeningsViewSet, "listenings")
music_router.register(r"playlists", views.PlaylistViewSet, "playlists")
index_router.register(r"index", views.IndexViewSet, "index")
urlpatterns = router.urls + [
re_path(
"federation/music/", include((music_router.urls, "music"), namespace="music")
),
re_path("federation/", include((index_router.urls, "index"), namespace="index")),
url("federation/music/", include((music_router.urls, "music"), namespace="music")),
url("federation/", include((index_router.urls, "index"), namespace="index")),
]

View File

@ -7,16 +7,12 @@ from django.urls import reverse
from rest_framework import exceptions, mixins, permissions, response, viewsets
from rest_framework.decorators import action
from funkwhale_api.common import permissions as common_permissions
from funkwhale_api.common import preferences
from funkwhale_api.common import utils as common_utils
from funkwhale_api.favorites import models as favorites_models
from funkwhale_api.federation import utils as federation_utils
from funkwhale_api.history import models as history_models
from funkwhale_api.moderation import models as moderation_models
from funkwhale_api.music import models as music_models
from funkwhale_api.music import utils as music_utils
from funkwhale_api.playlists import models as playlists_models
from . import (
activity,
@ -165,9 +161,7 @@ class ActorViewSet(FederationMixin, mixins.RetrieveModelMixin, viewsets.GenericV
"actor": channel.actor,
"items": channel.library.uploads.for_federation()
.order_by("-creation_date")
.prefetch_related(
"library__channel__actor", "track__artist_credit__artist"
),
.prefetch_related("library__channel__actor", "track__artist"),
"item_serializer": serializers.ChannelCreateUploadSerializer,
}
return get_collection_response(
@ -176,115 +170,17 @@ class ActorViewSet(FederationMixin, mixins.RetrieveModelMixin, viewsets.GenericV
collection_serializer=serializers.ChannelOutboxSerializer(channel),
)
@action(
methods=["get"],
detail=True,
permission_classes=[common_permissions.PrivacyLevelPermission],
)
@action(methods=["get"], detail=True)
def followers(self, request, *args, **kwargs):
actor = self.get_object()
followers = list(actor.get_approved_followers())
conf = {
"id": federation_utils.full_url(
reverse(
"federation:actors-followers",
kwargs={"preferred_username": actor.preferred_username},
)
),
"items": followers,
"item_serializer": serializers.ActorSerializer,
"page_size": 100,
"actor": None,
}
response = get_collection_response(
conf=conf,
querystring=request.GET,
collection_serializer=serializers.IndexSerializer(conf),
)
return response
self.get_object()
# XXX to implement
return response.Response({})
@action(
methods=["get"],
detail=True,
permission_classes=[common_permissions.PrivacyLevelPermission],
)
@action(methods=["get"], detail=True)
def following(self, request, *args, **kwargs):
actor = self.get_object()
followings = list(
actor.emitted_follows.filter(approved=True).values_list("target", flat=True)
)
conf = {
"id": federation_utils.full_url(
reverse(
"federation:actors-following",
kwargs={"preferred_username": actor.preferred_username},
)
),
"items": followings,
"item_serializer": serializers.ActorSerializer,
"page_size": 100,
"actor": None,
}
response = get_collection_response(
conf=conf,
querystring=request.GET,
collection_serializer=serializers.IndexSerializer(conf),
)
return response
@action(
methods=["get"],
detail=True,
permission_classes=[common_permissions.PrivacyLevelPermission],
)
def listens(self, request, *args, **kwargs):
actor = self.get_object()
listenings = history_models.Listening.objects.filter(actor=actor)
conf = {
"id": federation_utils.full_url(
reverse(
"federation:actors-listens",
kwargs={"preferred_username": actor.preferred_username},
)
),
"items": listenings,
"item_serializer": serializers.ListeningSerializer,
"page_size": 100,
"actor": None,
}
response = get_collection_response(
conf=conf,
querystring=request.GET,
collection_serializer=serializers.IndexSerializer(conf),
)
return response
@action(
methods=["get"],
detail=True,
permission_classes=[common_permissions.PrivacyLevelPermission],
)
def likes(self, request, *args, **kwargs):
actor = self.get_object()
likes = favorites_models.TrackFavorite.objects.filter(actor=actor)
conf = {
"id": federation_utils.full_url(
reverse(
"federation:actors-likes",
kwargs={"preferred_username": actor.preferred_username},
)
),
"items": likes,
"item_serializer": serializers.TrackFavoriteSerializer,
"page_size": 100,
"actor": None,
}
response = get_collection_response(
conf=conf,
querystring=request.GET,
collection_serializer=serializers.IndexSerializer(conf),
)
return response
self.get_object()
# XXX to implement
return response.Response({})
class EditViewSet(FederationMixin, mixins.RetrieveModelMixin, viewsets.GenericViewSet):
@ -387,27 +283,28 @@ class MusicLibraryViewSet(
"id": lb.get_federation_id(),
"actor": lb.actor,
"name": lb.name,
"summary": lb.description,
"items": lb.uploads.for_federation()
.order_by("-creation_date")
.prefetch_related(
Prefetch(
"track",
queryset=music_models.Track.objects.select_related(
"album__artist__attributed_to",
"artist__attributed_to",
"artist__attachment_cover",
"attachment_cover",
"album__attributed_to",
"attributed_to",
"album__attachment_cover",
"album__artist__attachment_cover",
"description",
).prefetch_related(
"album__artist_credit__artist__attributed_to",
"artist_credit__artist__attributed_to",
"artist_credit__artist__attachment_cover",
"tagged_items__tag",
"album__tagged_items__tag",
"album__artist_credit__artist__tagged_items__tag",
"album__artist_credit__artist__attachment_cover",
"artist_credit__artist__tagged_items__tag",
"artist_credit__artist__description",
"album__artist__tagged_items__tag",
"artist__tagged_items__tag",
"artist__description",
"album__description",
),
)
@ -434,20 +331,15 @@ class MusicUploadViewSet(
):
authentication_classes = [authentication.SignatureAuthentication]
renderer_classes = renderers.get_ap_renderers()
queryset = (
music_models.Upload.objects.local()
.select_related(
"library__actor",
"track__description",
"track__album__attachment_cover",
"track__attachment_cover",
)
.prefetch_related(
"track__artist_credit__artist",
"track__album__artist_credit__artist",
"track__album__artist_credit__artist__attachment_cover",
"track__artist_credit__artist__attachment_cover",
)
queryset = music_models.Upload.objects.local().select_related(
"library__actor",
"track__artist",
"track__album__artist",
"track__description",
"track__album__attachment_cover",
"track__album__artist__attachment_cover",
"track__artist__attachment_cover",
"track__attachment_cover",
)
serializer_class = serializers.UploadSerializer
lookup_field = "uuid"
@ -501,35 +393,13 @@ class MusicArtistViewSet(
return response.Response(serializer.data)
class MusicArtistCreditViewSet(
FederationMixin, mixins.RetrieveModelMixin, viewsets.GenericViewSet
):
authentication_classes = [authentication.SignatureAuthentication]
renderer_classes = renderers.get_ap_renderers()
queryset = music_models.ArtistCredit.objects.local().prefetch_related("artist")
serializer_class = serializers.ArtistCreditSerializer
lookup_field = "uuid"
def retrieve(self, request, *args, **kwargs):
instance = self.get_object()
serializer = self.get_serializer(instance)
return response.Response(serializer.data)
class MusicAlbumViewSet(
FederationMixin, mixins.RetrieveModelMixin, viewsets.GenericViewSet
):
authentication_classes = [authentication.SignatureAuthentication]
renderer_classes = renderers.get_ap_renderers()
queryset = (
music_models.Album.objects.local()
.prefetch_related(
"artist_credit__artist__description",
"artist_credit__artist__attachment_cover",
)
.select_related(
"description",
)
queryset = music_models.Album.objects.local().select_related(
"artist__description", "description", "artist__attachment_cover"
)
serializer_class = serializers.AlbumSerializer
lookup_field = "uuid"
@ -548,22 +418,16 @@ class MusicTrackViewSet(
):
authentication_classes = [authentication.SignatureAuthentication]
renderer_classes = renderers.get_ap_renderers()
queryset = (
music_models.Track.objects.local()
.select_related(
"album__description",
"description",
"attachment_cover",
"album__attachment_cover",
)
.prefetch_related(
"album__artist_credit__artist",
"artist_credit__artist__description",
"artist_credit__artist__attachment_cover",
"album__artist_credit__artist__attachment_cover",
)
queryset = music_models.Track.objects.local().select_related(
"album__artist",
"album__description",
"artist__description",
"description",
"attachment_cover",
"album__artist__attachment_cover",
"album__attachment_cover",
"artist__attachment_cover",
)
serializer_class = serializers.TrackSerializer
lookup_field = "uuid"
@ -663,74 +527,3 @@ class IndexViewSet(FederationMixin, viewsets.GenericViewSet):
)
return response.Response({}, status=200)
class TrackFavoriteViewSet(
FederationMixin, mixins.RetrieveModelMixin, viewsets.GenericViewSet
):
authentication_classes = [authentication.SignatureAuthentication]
permission_classes = [common_permissions.PrivacyLevelPermission]
renderer_classes = renderers.get_ap_renderers()
queryset = favorites_models.TrackFavorite.objects.local().select_related(
"track", "actor"
)
serializer_class = serializers.TrackFavoriteSerializer
lookup_field = "uuid"
def retrieve(self, request, *args, **kwargs):
instance = self.get_object()
if utils.should_redirect_ap_to_html(request.headers.get("accept")):
return redirect_to_html(instance.get_absolute_url())
serializer = self.get_serializer(instance)
return response.Response(serializer.data)
class ListeningsViewSet(
FederationMixin, mixins.RetrieveModelMixin, viewsets.GenericViewSet
):
authentication_classes = [authentication.SignatureAuthentication]
permission_classes = [common_permissions.PrivacyLevelPermission]
renderer_classes = renderers.get_ap_renderers()
queryset = history_models.Listening.objects.local().select_related("track", "actor")
serializer_class = serializers.ListeningSerializer
lookup_field = "uuid"
def retrieve(self, request, *args, **kwargs):
instance = self.get_object()
if utils.should_redirect_ap_to_html(request.headers.get("accept")):
return redirect_to_html(instance.get_absolute_url())
serializer = self.get_serializer(instance)
return response.Response(serializer.data)
class PlaylistViewSet(
FederationMixin, mixins.RetrieveModelMixin, viewsets.GenericViewSet
):
authentication_classes = [authentication.SignatureAuthentication]
permission_classes = [common_permissions.PrivacyLevelPermission]
renderer_classes = renderers.get_ap_renderers()
queryset = playlists_models.Playlist.objects.local().select_related("actor")
serializer_class = serializers.PlaylistCollectionSerializer
lookup_field = "uuid"
def retrieve(self, request, *args, **kwargs):
playlist = self.get_object()
if utils.should_redirect_ap_to_html(request.headers.get("accept")):
return redirect_to_html(playlist.get_absolute_url())
conf = {
"id": playlist.fid,
"actor": playlist.actor,
"name": playlist.name,
"items": playlist.playlist_tracks.order_by("index").prefetch_related(
"track",
),
"item_serializer": serializers.PlaylistTrackSerializer,
}
return get_collection_response(
conf=conf,
querystring=request.GET,
collection_serializer=serializers.PlaylistCollectionSerializer(playlist),
)

View File

@ -8,7 +8,7 @@ record.registry.register_serializer(serializers.ListeningActivitySerializer)
@record.registry.register_consumer("history.Listening")
def broadcast_listening_to_instance_activity(data, obj):
if obj.actor.user.privacy_level not in ["instance", "everyone"]:
if obj.user.privacy_level not in ["instance", "everyone"]:
return
channels.group_send(

View File

@ -5,6 +5,6 @@ from . import models
@admin.register(models.Listening)
class ListeningAdmin(admin.ModelAdmin):
list_display = ["track", "creation_date", "actor", "session_key"]
search_fields = ["track__name", "actor__user__username"]
list_select_related = ["actor", "track"]
list_display = ["track", "creation_date", "user", "session_key"]
search_fields = ["track__name", "user__username"]
list_select_related = ["user", "track"]

View File

@ -1,28 +1,14 @@
import factory
from django.conf import settings
from funkwhale_api.factories import NoUpdateOnCreate, registry
from funkwhale_api.federation import models
from funkwhale_api.federation.factories import ActorFactory
from funkwhale_api.music import factories
from funkwhale_api.users.factories import UserFactory
@registry.register
class ListeningFactory(NoUpdateOnCreate, factory.django.DjangoModelFactory):
actor = factory.SubFactory(ActorFactory)
user = factory.SubFactory(UserFactory)
track = factory.SubFactory(factories.TrackFactory)
fid = factory.Faker("federation_url")
uuid = factory.Faker("uuid4")
class Meta:
model = "history.Listening"
@factory.post_generation
def local(self, create, extracted, **kwargs):
if not extracted and not kwargs:
return
domain = models.Domain.objects.get_or_create(name=settings.FEDERATION_HOSTNAME)[
0
]
self.fid = f"https://{domain}/federation/music/favorite/{self.uuid}"
self.save(update_fields=["fid"])

View File

@ -7,9 +7,9 @@ from . import models
class ListeningFilter(moderation_filters.HiddenContentFilterSet):
username = django_filters.CharFilter("actor__user__username")
domain = django_filters.CharFilter("actor__domain_id")
scope = common_filters.ActorScopeFilter(actor_field="actor", distinct=True)
username = django_filters.CharFilter("user__username")
domain = django_filters.CharFilter("user__actor__domain_id")
scope = common_filters.ActorScopeFilter(actor_field="user__actor", distinct=True)
class Meta:
model = models.Listening

View File

@ -1,18 +0,0 @@
# Generated by Django 3.2.20 on 2023-12-09 14:23
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('history', '0002_auto_20180325_1433'),
]
operations = [
migrations.AddField(
model_name='listening',
name='source',
field=models.CharField(blank=True, max_length=100, null=True),
),
]

View File

@ -1,107 +0,0 @@
import uuid
from django.db import migrations, models
from django.urls import reverse
from funkwhale_api.federation import utils
import django.db.models.deletion
def get_user_actor(apps, schema_editor):
MyModel = apps.get_model("history", "Listening")
for row in MyModel.objects.all():
actor = row.user.actor
row.actor = actor
row.save(update_fields=["actor"])
def gen_uuid(apps, schema_editor):
MyModel = apps.get_model("history", "Listening")
for row in MyModel.objects.all():
unique_uuid = uuid.uuid4()
while MyModel.objects.filter(uuid=unique_uuid).exists():
unique_uuid = uuid.uuid4()
fid = utils.full_url(
reverse("federation:music:listenings-detail", kwargs={"uuid": unique_uuid})
)
row.uuid = unique_uuid
row.fid = fid
row.save(update_fields=["uuid", "fid"])
def get_user_actor(apps, schema_editor):
MyModel = apps.get_model("history", "Listening")
for row in MyModel.objects.all():
actor = row.user.actor
row.actor = actor
row.save(update_fields=["actor"])
class Migration(migrations.Migration):
dependencies = [
("history", "0003_listening_source"),
("federation", "0028_auto_20221027_1141"),
]
operations = [
migrations.AddField(
model_name="listening",
name="actor",
field=models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="listenings",
to="federation.actor",
),
),
migrations.AddField(
model_name="listening",
name="fid",
field=models.URLField(
max_length=500,
null=True,
),
),
migrations.AddField(
model_name="listening",
name="url",
field=models.URLField(blank=True, max_length=500, null=True),
),
migrations.AddField(
model_name="listening",
name="uuid",
field=models.UUIDField(default=uuid.uuid4, null=True),
),
migrations.RunPython(gen_uuid, reverse_code=migrations.RunPython.noop),
migrations.AlterField(
model_name="listening",
name="uuid",
field=models.UUIDField(default=uuid.uuid4, unique=True),
),
migrations.AlterField(
model_name="listening",
name="fid",
field=models.URLField(
unique=True,
db_index=True,
max_length=500,
),
),
migrations.RunPython(get_user_actor, reverse_code=migrations.RunPython.noop),
migrations.RemoveField(
model_name="listening",
name="user",
),
migrations.AlterField(
model_name="listening",
name="actor",
field=models.ForeignKey(
blank=False,
null=False,
on_delete=django.db.models.deletion.CASCADE,
related_name="listenings",
to="federation.actor",
),
),
]

View File

@ -1,59 +1,25 @@
import uuid
from django.db import models
from django.urls import reverse
from django.utils import timezone
from funkwhale_api.common import models as common_models
from funkwhale_api.federation import models as federation_models
from funkwhale_api.federation import utils as federation_utils
from funkwhale_api.music.models import Track
class ListeningQuerySet(models.QuerySet, common_models.LocalFromFidQuerySet):
pass
class Listening(federation_models.FederationMixin):
uuid = models.UUIDField(default=uuid.uuid4, unique=True)
class Listening(models.Model):
creation_date = models.DateTimeField(default=timezone.now, null=True, blank=True)
track = models.ForeignKey(
Track, related_name="listenings", on_delete=models.CASCADE
)
actor = models.ForeignKey(
"federation.Actor",
user = models.ForeignKey(
"users.User",
related_name="listenings",
null=True,
blank=True,
on_delete=models.CASCADE,
null=False,
blank=False,
)
session_key = models.CharField(max_length=100, null=True, blank=True)
source = models.CharField(max_length=100, null=True, blank=True)
federation_namespace = "listenings"
objects = ListeningQuerySet.as_manager()
class Meta:
ordering = ("-creation_date",)
def get_activity_url(self):
return f"{self.actor.get_absolute_url()}/listenings/tracks/{self.pk}"
def get_absolute_url(self):
return f"/library/tracks/{self.track.pk}"
def get_federation_id(self):
if self.fid:
return self.fid
return federation_utils.full_url(
reverse(
f"federation:music:{self.federation_namespace}-detail",
kwargs={"uuid": self.uuid},
)
)
def save(self, **kwargs):
if not self.pk and not self.fid:
self.fid = self.get_federation_id()
return super().save(**kwargs)
return f"{self.user.get_activity_url()}/listenings/tracks/{self.pk}"

View File

@ -1,8 +1,10 @@
from drf_spectacular.utils import extend_schema_field
from rest_framework import serializers
from funkwhale_api.activity import serializers as activity_serializers
from funkwhale_api.federation import serializers as federation_serializers
from funkwhale_api.music.serializers import TrackActivitySerializer, TrackSerializer
from funkwhale_api.users.serializers import UserActivitySerializer, UserBasicSerializer
from . import models
@ -10,39 +12,47 @@ from . import models
class ListeningActivitySerializer(activity_serializers.ModelSerializer):
type = serializers.SerializerMethodField()
object = TrackActivitySerializer(source="track")
actor = federation_serializers.APIActorSerializer()
actor = UserActivitySerializer(source="user")
published = serializers.DateTimeField(source="creation_date")
class Meta:
model = models.Listening
fields = ["id", "local_id", "object", "type", "actor", "published"]
def get_actor(self, obj):
return UserActivitySerializer(obj.user).data
def get_type(self, obj):
return "Listen"
class ListeningSerializer(serializers.ModelSerializer):
track = TrackSerializer(read_only=True)
actor = federation_serializers.APIActorSerializer(read_only=True)
user = UserBasicSerializer(read_only=True)
actor = serializers.SerializerMethodField()
class Meta:
model = models.Listening
fields = ("id", "actor", "track", "creation_date", "actor")
fields = ("id", "user", "track", "creation_date", "actor")
def create(self, validated_data):
validated_data["actor"] = self.context["user"].actor
validated_data["user"] = self.context["user"]
return super().create(validated_data)
@extend_schema_field(federation_serializers.APIActorSerializer)
def get_actor(self, obj):
actor = obj.user.actor
if actor:
return federation_serializers.APIActorSerializer(actor).data
class ListeningWriteSerializer(serializers.ModelSerializer):
actor = federation_serializers.APIActorSerializer(read_only=True, required=False)
class Meta:
model = models.Listening
fields = ("id", "actor", "track", "creation_date")
fields = ("id", "user", "track", "creation_date")
def create(self, validated_data):
validated_data["actor"] = self.context["user"].actor
validated_data["user"] = self.context["user"]
return super().create(validated_data)

View File

@ -4,7 +4,6 @@ from rest_framework import mixins, viewsets
from config import plugins
from funkwhale_api.activity import record
from funkwhale_api.common import fields, permissions
from funkwhale_api.federation import routes
from funkwhale_api.music import utils as music_utils
from funkwhale_api.music.models import Track
from funkwhale_api.users.oauth import permissions as oauth_permissions
@ -19,7 +18,9 @@ class ListeningViewSet(
viewsets.GenericViewSet,
):
serializer_class = serializers.ListeningSerializer
queryset = models.Listening.objects.all().select_related("actor__attachment_icon")
queryset = models.Listening.objects.all().select_related(
"user__actor__attachment_icon"
)
permission_classes = [
oauth_permissions.ScopePermission,
@ -28,7 +29,6 @@ class ListeningViewSet(
required_scope = "listenings"
anonymous_policy = "setting"
owner_checks = ["write"]
owner_field = "actor.user"
filterset_class = filters.ListeningFilter
def get_serializer_class(self):
@ -38,40 +38,23 @@ class ListeningViewSet(
def perform_create(self, serializer):
r = super().perform_create(serializer)
instance = serializer.instance
plugins.trigger_hook(
plugins.LISTENING_CREATED,
listening=instance,
listening=serializer.instance,
confs=plugins.get_confs(self.request.user),
)
routes.outbox.dispatch(
{"type": "Listen", "object": {"type": "Track"}},
context={
"track": instance.track,
"actor": instance.actor,
"id": instance.fid,
},
)
record.send(serializer.instance)
return r
def get_queryset(self):
queryset = super().get_queryset()
queryset = queryset.filter(
fields.privacy_level_query(
self.request.user, "actor__user__privacy_level", "actor__user"
)
fields.privacy_level_query(self.request.user, "user__privacy_level")
)
tracks = (
Track.objects.with_playable_uploads(
music_utils.get_actor_from_request(self.request)
)
.prefetch_related(
"artist_credit",
"album__artist_credit__artist",
"artist_credit__artist__attachment_cover",
)
.select_related("attributed_to")
tracks = Track.objects.with_playable_uploads(
music_utils.get_actor_from_request(self.request)
).select_related(
"artist", "album__artist", "attributed_to", "artist__attachment_cover"
)
return queryset.prefetch_related(Prefetch("track", queryset=tracks))

View File

@ -37,7 +37,7 @@ def get_content():
def get_top_music_categories():
return (
models.Track.objects.filter(artist_credit__artist__content_category="music")
models.Track.objects.filter(artist__content_category="music")
.exclude(tagged_items__tag_id=None)
.values(name=F("tagged_items__tag__name"))
.annotate(count=Count("name"))
@ -47,7 +47,7 @@ def get_top_music_categories():
def get_top_podcast_categories():
return (
models.Track.objects.filter(artist_credit__artist__content_category="podcast")
models.Track.objects.filter(artist__content_category="podcast")
.exclude(tagged_items__tag_id=None)
.values(name=F("tagged_items__tag__name"))
.annotate(count=Count("name"))

View File

@ -1,4 +1,4 @@
from django.urls import re_path
from django.conf.urls import url
from funkwhale_api.common import routers
@ -8,7 +8,7 @@ admin_router = routers.OptionalSlashRouter()
admin_router.register(r"admin/settings", views.AdminSettings, "admin-settings")
urlpatterns = [
re_path(r"^nodeinfo/2.0/?$", views.NodeInfo20.as_view(), name="nodeinfo-2.0"),
re_path(r"^settings/?$", views.InstanceSettings.as_view(), name="settings"),
re_path(r"^spa-manifest.json", views.SpaManifest.as_view(), name="spa-manifest"),
url(r"^nodeinfo/2.0/?$", views.NodeInfo20.as_view(), name="nodeinfo-2.0"),
url(r"^settings/?$", views.InstanceSettings.as_view(), name="settings"),
url(r"^spa-manifest.json", views.SpaManifest.as_view(), name="spa-manifest"),
] + admin_router.urls

View File

@ -1,14 +1,7 @@
from django.urls import re_path
from funkwhale_api.common import routers
from django.conf.urls import url
from . import views
admin_router = routers.OptionalSlashRouter()
admin_router.register(r"admin/settings", views.AdminSettings, "admin-settings")
urlpatterns = [
re_path(r"^nodeinfo/2.1/?$", views.NodeInfo21.as_view(), name="nodeinfo-2.1"),
re_path(r"^settings/?$", views.InstanceSettings.as_view(), name="settings"),
re_path(r"^spa-manifest.json", views.SpaManifest.as_view(), name="spa-manifest"),
] + admin_router.urls
url(r"^nodeinfo/2.1/?$", views.NodeInfo21.as_view(), name="nodeinfo-2.1"),
]

View File

@ -171,9 +171,6 @@ class NodeInfo21(NodeInfo20):
if pref.get("federation__enabled"):
data["features"].append("federation")
if pref.get("music__only_allow_musicbrainz_tagged_files"):
data["features"].append("onlyMbidTaggedContent")
serializer = self.serializer_class(data)
return Response(
serializer.data, status=200, content_type=NODEINFO_2_CONTENT_TYPE

View File

@ -1,7 +1,6 @@
import django_filters
from django import forms
from django.db.models import Q
from django.db.models.functions import Collate
from django_filters import rest_framework as filters
from funkwhale_api.audio import models as audio_models
@ -97,15 +96,12 @@ class ManageAlbumFilterSet(filters.FilterSet):
search_fields={
"title": {"to": "title"},
"fid": {"to": "fid"},
"artist": {"to": "artist_credit__artist__name"},
"artist": {"to": "artist__name"},
"mbid": {"to": "mbid"},
},
filter_fields={
"uuid": {"to": "uuid"},
"artist_id": {
"to": "artist_credit__artist_id",
"field": forms.IntegerField(),
},
"artist_id": {"to": "artist_id", "field": forms.IntegerField()},
"domain": {
"handler": lambda v: federation_utils.get_domain_query_from_url(v)
},
@ -121,7 +117,7 @@ class ManageAlbumFilterSet(filters.FilterSet):
class Meta:
model = music_models.Album
fields = ["title", "mbid", "fid", "artist_credit"]
fields = ["title", "mbid", "fid", "artist"]
class ManageTrackFilterSet(filters.FilterSet):
@ -131,9 +127,9 @@ class ManageTrackFilterSet(filters.FilterSet):
"title": {"to": "title"},
"fid": {"to": "fid"},
"mbid": {"to": "mbid"},
"artist": {"to": "artist_credit__artist__name"},
"artist": {"to": "artist__name"},
"album": {"to": "album__title"},
"album_artist": {"to": "album__artist_credit__artist__name"},
"album_artist": {"to": "album__artist__name"},
"copyright": {"to": "copyright"},
},
filter_fields={
@ -160,7 +156,7 @@ class ManageTrackFilterSet(filters.FilterSet):
class Meta:
model = music_models.Track
fields = ["title", "mbid", "fid", "artist_credit", "album", "license"]
fields = ["title", "mbid", "fid", "artist", "album", "license"]
class ManageLibraryFilterSet(filters.FilterSet):
@ -374,13 +370,6 @@ class ManageTagFilterSet(filters.FilterSet):
model = tags_models.Tag
fields = []
def get_queryset(self, request):
return (
super()
.get_queryset(request)
.annotate(tag_deterministic=Collate("name", "und-x-icu"))
)
class ManageReportFilterSet(filters.FilterSet):
q = fields.SmartSearchFilter(

View File

@ -67,8 +67,8 @@ class ManageUserSerializer(serializers.ModelSerializer):
"date_joined",
"last_activity",
"permissions",
"upload_quota",
"privacy_level",
"upload_quota",
"full_username",
)
read_only_fields = [
@ -451,25 +451,17 @@ class ManageNestedArtistSerializer(ManageBaseArtistSerializer):
pass
class ManageNestedArtistCreditSerializer(ManageBaseArtistSerializer):
artist = ManageNestedArtistSerializer()
class Meta:
model = music_models.ArtistCredit
fields = ["artist"]
class ManageAlbumSerializer(
music_serializers.OptionalDescriptionMixin, ManageBaseAlbumSerializer
):
attributed_to = ManageBaseActorSerializer()
artist_credit = ManageNestedArtistCreditSerializer(many=True)
artist = ManageNestedArtistSerializer()
tags = serializers.SerializerMethodField()
class Meta:
model = music_models.Album
fields = ManageBaseAlbumSerializer.Meta.fields + [
"artist_credit",
"artist",
"attributed_to",
"tags",
"tracks_count",
@ -485,17 +477,17 @@ class ManageAlbumSerializer(
class ManageTrackAlbumSerializer(ManageBaseAlbumSerializer):
artist_credit = ManageNestedArtistCreditSerializer(many=True)
artist = ManageNestedArtistSerializer()
class Meta:
model = music_models.Album
fields = ManageBaseAlbumSerializer.Meta.fields + ["artist_credit"]
fields = ManageBaseAlbumSerializer.Meta.fields + ["artist"]
class ManageTrackSerializer(
music_serializers.OptionalDescriptionMixin, ManageNestedTrackSerializer
):
artist_credit = ManageNestedArtistCreditSerializer(many=True)
artist = ManageNestedArtistSerializer()
album = ManageTrackAlbumSerializer(allow_null=True)
attributed_to = ManageBaseActorSerializer(allow_null=True)
uploads_count = serializers.SerializerMethodField()
@ -505,7 +497,7 @@ class ManageTrackSerializer(
class Meta:
model = music_models.Track
fields = ManageNestedTrackSerializer.Meta.fields + [
"artist_credit",
"artist",
"album",
"attributed_to",
"uploads_count",
@ -572,6 +564,7 @@ class ManageLibrarySerializer(serializers.ModelSerializer):
domain = serializers.CharField(source="domain_name")
actor = ManageBaseActorSerializer()
uploads_count = serializers.SerializerMethodField()
followers_count = serializers.SerializerMethodField()
class Meta:
model = music_models.Library
@ -581,11 +574,14 @@ class ManageLibrarySerializer(serializers.ModelSerializer):
"fid",
"url",
"name",
"description",
"domain",
"is_local",
"creation_date",
"privacy_level",
"uploads_count",
"followers_count",
"followers_url",
"actor",
]
read_only_fields = [
@ -601,6 +597,10 @@ class ManageLibrarySerializer(serializers.ModelSerializer):
def get_uploads_count(self, obj) -> int:
return getattr(obj, "_uploads_count", int(obj.uploads_count))
@extend_schema_field(OpenApiTypes.INT)
def get_followers_count(self, obj):
return getattr(obj, "followers_count", None)
class ManageNestedLibrarySerializer(serializers.ModelSerializer):
domain = serializers.CharField(source="domain_name")
@ -614,10 +614,12 @@ class ManageNestedLibrarySerializer(serializers.ModelSerializer):
"fid",
"url",
"name",
"description",
"domain",
"is_local",
"creation_date",
"privacy_level",
"followers_url",
"actor",
]

View File

@ -1,5 +1,4 @@
from django.conf.urls import include
from django.urls import re_path
from django.conf.urls import include, url
from funkwhale_api.common import routers
@ -33,16 +32,14 @@ other_router.register(r"channels", views.ManageChannelViewSet, "channels")
other_router.register(r"tags", views.ManageTagViewSet, "tags")
urlpatterns = [
re_path(
url(
r"^federation/",
include((federation_router.urls, "federation"), namespace="federation"),
),
re_path(
r"^library/", include((library_router.urls, "instance"), namespace="library")
),
re_path(
url(r"^library/", include((library_router.urls, "instance"), namespace="library")),
url(
r"^moderation/",
include((moderation_router.urls, "moderation"), namespace="moderation"),
),
re_path(r"^users/", include((users_router.urls, "instance"), namespace="users")),
url(r"^users/", include((users_router.urls, "instance"), namespace="users")),
] + other_router.urls

Some files were not shown because too many files have changed in this diff Show More