Compare commits

..

26 Commits

Author SHA1 Message Date
Petitminion 7c3206bf83 fix build_docs pipeline 2025-04-14 18:34:55 +02:00
petitminion 6fac158374 Update .gitlab-ci.yml file, disable linux/arm/v7 for 1.4.1 2025-04-14 15:29:35 +00:00
Petitminion 06494a538c Version bump and changelog for 1.4.1 2025-04-14 16:19:17 +02:00
petitminion a9d56911ef pin setuptools to 60.10.0 to fix builds 1.4.0 2025-04-14 14:16:08 +00:00
Renovate Bot 7f7f4a1fff chore(api): update dependency django to v3.2.25 2024-05-15 12:34:16 +00:00
Georg Krause 6b8bbc5f80 fix(api): Make trailing slashes for each endpoint optional
Part-of: <https://dev.funkwhale.audio/funkwhale/funkwhale/-/merge_requests/2766>
2024-02-26 13:21:14 +00:00
Renovate Bot 673db74dd2 chore(front): update dependency standardized-audio-context to v25.3.64
Part-of: <https://dev.funkwhale.audio/funkwhale/funkwhale/-/merge_requests/2765>
2024-02-26 11:06:11 +00:00
Renovate Bot 9b0f538c08 chore(docs): update dependency django to v3.2.24
Part-of: <https://dev.funkwhale.audio/funkwhale/funkwhale/-/merge_requests/2764>
2024-02-26 10:35:55 +00:00
Renovate Bot c1608b3459 chore(api): update dependency pyld to v2.0.4
Part-of: <https://dev.funkwhale.audio/funkwhale/funkwhale/-/merge_requests/2761>
2024-02-21 11:06:45 +00:00
Renovate Bot 809b972772 chore(api): update dependency django to v3.2.24
Part-of: <https://dev.funkwhale.audio/funkwhale/funkwhale/-/merge_requests/2760>
2024-02-21 10:33:53 +00:00
Renovate Bot 288bdd163d chore(front): lock file maintenance
Part-of: <https://dev.funkwhale.audio/funkwhale/funkwhale/-/merge_requests/2751>
2024-02-05 14:34:02 +00:00
Renovate Bot 59de22a2fd chore(front): lock file maintenance
Part-of: <https://dev.funkwhale.audio/funkwhale/funkwhale/-/merge_requests/2747>
2024-02-05 00:06:26 +00:00
Renovate Bot 2ec71111a7 chore(front): update dependency standardized-audio-context to v25.3.63
Part-of: <https://dev.funkwhale.audio/funkwhale/funkwhale/-/merge_requests/2718>
2024-01-31 21:03:39 +00:00
Renovate Bot cb307cf296 chore(api): update dependency unidecode to v1.3.8
Part-of: <https://dev.funkwhale.audio/funkwhale/funkwhale/-/merge_requests/2717>
2024-01-31 20:05:25 +00:00
Renovate Bot 06f0cf90f0 chore(api): update dependency pytest to v7.4.4
Part-of: <https://dev.funkwhale.audio/funkwhale/funkwhale/-/merge_requests/2716>
2024-01-31 18:34:17 +00:00
Renovate Bot de5063afef chore(api): update dependency prompt-toolkit to v3.0.43
Part-of: <https://dev.funkwhale.audio/funkwhale/funkwhale/-/merge_requests/2715>
2024-01-31 17:07:21 +00:00
Renovate Bot f43278a709 chore(api): update dependency feedparser to v6.0.11
Part-of: <https://dev.funkwhale.audio/funkwhale/funkwhale/-/merge_requests/2714>
2024-01-31 16:06:02 +00:00
Renovate Bot 15b261d7fd chore: pin dependency poetry-core to ==1.8.1
Part-of: <https://dev.funkwhale.audio/funkwhale/funkwhale/-/merge_requests/2713>
2024-01-31 15:04:08 +00:00
Renovate Bot ec913cfa64 chore(docs): pin dependency poetry-core to ==1.8.1
Part-of: <https://dev.funkwhale.audio/funkwhale/funkwhale/-/merge_requests/2712>
2024-01-31 14:33:45 +00:00
Renovate Bot 5ef3366974 chore(api): pin dependency poetry-core to ==1.8.1
Part-of: <https://dev.funkwhale.audio/funkwhale/funkwhale/-/merge_requests/2711>
2024-01-11 10:05:00 +00:00
Baudouin Feildel c5f582d0e3 Add changelog entry.
Part-of: <https://dev.funkwhale.audio/funkwhale/funkwhale/-/merge_requests/2699>
2024-01-11 11:00:02 +01:00
Baudouin Feildel d0a997643b Fix Apache configuration
Built assets are fetched using path like this: `/assets/foo-a1b2c3.js`. Apache failed to serve those, as it was missing disabling the proxy pass for the static assets folder. This commit adds the necessary configuration for properly serving the static assets.

Part-of: <https://dev.funkwhale.audio/funkwhale/funkwhale/-/merge_requests/2699>
2024-01-11 10:59:56 +01:00
Georg Krause c21bfce9d0 fix(docs): Fix build script for documentation to properly deploy swagger 2023-12-13 15:55:54 +01:00
Georg Krause 98d4f4ef54 Merge branch 'develop' into stable 2023-12-12 13:29:29 +01:00
Renovate Bot e31f0043b0 chore(front): update dependency standardized-audio-context to v25.3.59
Part-of: <https://dev.funkwhale.audio/funkwhale/funkwhale/-/merge_requests/2645>
2023-11-23 12:34:23 +00:00
Renovate Bot c87a22fb15 chore(api): update dependency aioresponses to v0.7.6
Part-of: <https://dev.funkwhale.audio/funkwhale/funkwhale/-/merge_requests/2644>
2023-11-23 11:07:38 +00:00
888 changed files with 50463 additions and 180559 deletions

View File

@ -7,7 +7,6 @@ nd
readby
serie
upto
afterall
# Names
nin

View File

@ -67,6 +67,3 @@ mailhog
*.sqlite3
api/music
api/media
# Docker state
.state

23
.env.dev Normal file
View File

@ -0,0 +1,23 @@
DJANGO_ALLOWED_HOSTS=.funkwhale.test,localhost,nginx,0.0.0.0,127.0.0.1,.gitpod.io
DJANGO_SETTINGS_MODULE=config.settings.local
DJANGO_SECRET_KEY=dev
C_FORCE_ROOT=true
FUNKWHALE_HOSTNAME=localhost
FUNKWHALE_PROTOCOL=http
PYTHONDONTWRITEBYTECODE=true
VUE_PORT=8080
MUSIC_DIRECTORY_PATH=/music
BROWSABLE_API_ENABLED=True
FORWARDED_PROTO=http
LDAP_ENABLED=False
FUNKWHALE_SPA_HTML_ROOT=http://nginx/
PYTHONTRACEMALLOC=0
MEDIA_ROOT=/data/media
# Uncomment this if you're using traefik/https
# FORCE_HTTPS_URLS=True
# Customize to your needs
POSTGRES_VERSION=11
DEBUG=true
TYPESENSE_API_KEY="apikey"

View File

@ -1,62 +0,0 @@
COMPOSE_BAKE=true
# api + celeryworker
DEBUG=True
DEFAULT_FROM_EMAIL=hello@funkwhale.test
FUNKWHALE_DOMAIN=funkwhale.test
FUNKWHALE_PROTOCOL=https
DJANGO_SECRET_KEY=dev
DJANGO_ALLOWED_HOSTS=.funkwhale.test,nginx
DJANGO_SETTINGS_MODULE=config.settings.local
DATABASE_URL=postgresql://postgres@postgres/postgres
CACHE_URL=redis://redis:6379/0
EMAIL_CONFIG=smtp://mailpit.funkwhale.test:1025
FORCE_HTTPS_URLS=True
EXTERNAL_REQUESTS_VERIFY_SSL=false
C_FORCE_ROOT=true
PYTHONDONTWRITEBYTECODE=true
PYTHONTRACEMALLOC=0
CELERY_TASK_TIME_LIMIT=300
# api
FUNKWHALE_SPA_HTML_ROOT=http://nginx/
LDAP_ENABLED=False
BROWSABLE_API_ENABLED=True
# celeryworker
CELERYD_CONCURRENCY=0
# api + nginx
STATIC_ROOT=/staticfiles
MEDIA_ROOT=/data/media
# api + Typesense
TYPESENSE_API_KEY=apikey
# front
HOST=0.0.0.0
VUE_PORT=8080
# nginx
NGINX_MAX_BODY_SIZE=10G
FUNKWHALE_API_HOST=api
FUNKWHALE_API_PORT=5000
FUNKWHALE_FRONT_IP=front
FUNKWHALE_FRONT_PORT=${VUE_PORT}
# postgres
POSTGRES_HOST_AUTH_METHOD=trust

38
.gitignore vendored
View File

@ -1,5 +1,3 @@
/dist
### OSX ###
.DS_Store
.AppleDouble
@ -34,8 +32,6 @@ pip-log.txt
.tox
nosetests.xml
htmlcov
coverage.xml
report.xml
# Translations
*.mo
@ -77,36 +73,20 @@ api/staticfiles
api/static
api/.pytest_cache
api/celerybeat-*
# Front
oldfront/node_modules/
front/static/translations
front/node_modules/
front/dist/
front/dev-dist/
front/npm-debug.log*
front/yarn-debug.log*
front/yarn-error.log*
front/tests/unit/coverage
front/tests/e2e/reports
front/test_results.xml
front/coverage/
front/selenium-debug.log
# Vitepress
front/ui-docs/.vitepress/.vite
front/ui-docs/.vitepress/cache
front/ui-docs/.vitepress/dist
front/ui-docs/public
# Docs
docs/_build
#Tauri
front/tauri/gen
/data/
.state
.env
po/*.po
@ -117,26 +97,10 @@ _build
# Docker
docker-bake.*.json
metadata.json
compose/var/test.*
# Linting
.eslintcache
tsconfig.tsbuildinfo
# Nix
.direnv/
.envrc
flake.nix
flake.lock
# VS Code
# Vscode
.vscode/
# Zed
.zed/
# Node version (asdf)
.tool-versions
# Lychee link checker
.lycheecache

View File

@ -8,56 +8,50 @@ include:
file: /templates/ssh-agent.yml
variables:
PYTHONDONTWRITEBYTECODE: 'true'
PYTHONDONTWRITEBYTECODE: "true"
PIP_CACHE_DIR: $CI_PROJECT_DIR/.cache/pip
YARN_CACHE_FOLDER: $CI_PROJECT_DIR/.cache/yarn
POETRY_VIRTUALENVS_IN_PROJECT: 'true'
POETRY_VIRTUALENVS_IN_PROJECT: "true"
.shared_variables:
# Keep the git files permissions during job setup
keep_git_files_permissions: &keep_git_files_permissions
GIT_STRATEGY: clone
GIT_DEPTH: '5'
FF_DISABLE_UMASK_FOR_DOCKER_EXECUTOR: 'true'
GIT_DEPTH: "5"
FF_DISABLE_UMASK_FOR_DOCKER_EXECUTOR: "true"
.shared_caches:
# Cache for front related jobs
yarn_cache: &yarn_cache
key: front-yarn-$CI_COMMIT_REF_SLUG
paths: [$YARN_CACHE_FOLDER]
node_cache: &node_cache
key:
prefix: front-node_modules-$CI_COMMIT_REF_SLUG
files: [front/yarn.lock]
paths: [front/node_modules]
lint_cache: &lint_cache
key:
prefix: front-lint-$CI_COMMIT_REF_SLUG
files:
- front/.eslintcache
- front/tsconfig.tsbuildinfo
cypress_cache: &cypress_cache
key: cypress-cache-$CI_COMMIT_REF_SLUG
paths:
- /root/.cache/Cypress
front_cache: &front_cache
- key: front-yarn
paths: [$YARN_CACHE_FOLDER]
- key:
prefix: front-node_modules
files: [front/yarn.lock]
paths: [front/node_modules]
- key:
prefix: front-lint
files:
- front/.eslintcache
- front/tsconfig.tsbuildinfo
# Cache for api related jobs
# Include the python version to prevent loosing caches in the test matrix
api_cache: &api_cache
- key: api-pip-$CI_COMMIT_REF_SLUG
- key: api-pip-$PYTHON_VERSION
paths: [$PIP_CACHE_DIR]
- key:
prefix: api-venv-$CI_COMMIT_REF_SLUG
prefix: api-venv-$PYTHON_VERSION
files: [api/poetry.lock]
paths: [api/.venv]
# Cache for docs related jobs
docs_cache: &docs_cache
- key: docs-pip-$CI_COMMIT_REF_SLUG
- key: docs-pip
paths: [$PIP_CACHE_DIR]
- key:
prefix: docs-venv-$CI_COMMIT_REF_SLUG
prefix: docs-venv
files: [docs/poetry.lock]
paths: [docs/.venv]
@ -77,8 +71,6 @@ workflow:
)
# Run for merge requests from any repo or branches
- if: $CI_MERGE_REQUEST_ID
# run if NOCHANGELOG is added in the title
- if: $CI_MERGE_REQUEST_TITLE =~ /NOCHANGELOG/
stages:
- review
@ -105,10 +97,7 @@ review_front:
environment:
name: review/front/$CI_COMMIT_REF_NAME
url: http://$CI_PROJECT_NAMESPACE.pages.funkwhale.audio/-/$CI_PROJECT_NAME/-/jobs/$CI_JOB_ID/artifacts/front-review/index.html
cache:
- *yarn_cache
- *node_cache
- *lint_cache
cache: *front_cache
before_script:
- mkdir front-review
- cd front
@ -154,21 +143,20 @@ find_broken_links:
lychee
--cache
--no-progress
--include-fragments
--exclude-all-private
--exclude-mail
--exclude 'demo\.funkwhale\.audio'
--exclude 'nginx\.com'
--exclude-path 'docs/_templates/'
-- . || exit $?
require_changelog:
allow_failure: false
stage: lint
rules:
# Don't run on merge request that mention NOCHANGELOG or renovate bot commits
- if: >
$CI_MERGE_REQUEST_TITLE =~ /NOCHANGELOG/ ||
$CI_COMMIT_AUTHOR == "RenovateBot <bot@dev.funkwhale.audio>"
$CI_COMMIT_AUTHOR == "Renovate Bot <bot@dev.funkwhale.audio>"
when: never
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
@ -187,8 +175,7 @@ lint_api:
- if: $CI_COMMIT_BRANCH =~ /(stable|develop)/
- changes: [api/**/*]
image: $CI_REGISTRY/funkwhale/ci/python-funkwhale-api:3.13
cache: *api_cache
image: $CI_REGISTRY/funkwhale/ci/python-funkwhale-api:3.11
before_script:
- cd api
- make install
@ -203,15 +190,13 @@ lint_front:
- changes: [front/**/*]
image: $CI_REGISTRY/funkwhale/ci/node-python:18
cache:
- *yarn_cache
- *node_cache
- *lint_cache
cache: *front_cache
before_script:
- cd front
- yarn install --frozen-lockfile
script:
- yarn lint
- yarn lint --max-warnings 0
- yarn lint:tsc
test_scripts:
stage: test
@ -246,7 +231,7 @@ test_api:
image: $CI_REGISTRY/funkwhale/ci/python-funkwhale-api:$PYTHON_VERSION
parallel:
matrix:
- PYTHON_VERSION: ['3.11', '3.12', '3.13']
- PYTHON_VERSION: ["3.8", "3.9", "3.10", "3.11"]
services:
- name: postgres:15-alpine
command:
@ -256,14 +241,17 @@ test_api:
- name: redis:7-alpine
cache: *api_cache
variables:
DATABASE_URL: 'postgresql://postgres@postgres/postgres'
FUNKWHALE_URL: 'https://funkwhale.ci'
DATABASE_URL: "postgresql://postgres@postgres/postgres"
FUNKWHALE_URL: "https://funkwhale.ci"
DJANGO_SETTINGS_MODULE: config.settings.local
POSTGRES_HOST_AUTH_METHOD: trust
CACHE_URL: 'redis://redis:6379/0'
CACHE_URL: "redis://redis:6379/0"
before_script:
- cd api
- make install
- poetry env info
- poetry run pip install "setuptools==60.10.0" wheel
- poetry run pip install --no-use-pep517 django-allauth==0.42.0
- poetry install --all-extras
script:
- >
poetry run pytest
@ -291,10 +279,7 @@ test_front:
- changes: [front/**/*]
image: $CI_REGISTRY/funkwhale/ci/node-python:18
cache:
- *yarn_cache
- *node_cache
- *lint_cache
cache: *front_cache
before_script:
- cd front
- yarn install --frozen-lockfile
@ -306,7 +291,6 @@ test_front:
coverage_report:
coverage_format: cobertura
path: front/coverage/cobertura-coverage.xml
coverage: '/All files\s+(?:\|\s+((?:\d+\.)?\d+)\s+){4}.*/'
build_metadata:
stage: build
@ -332,14 +316,13 @@ test_integration:
interruptible: true
image:
name: cypress/included:13.6.4
entrypoint: ['']
name: cypress/included:12.14.0
entrypoint: [""]
cache:
- *yarn_cache
- *node_cache
- *lint_cache
- *cypress_cache
- *front_cache
- key:
paths:
- /root/.cache/Cypress
before_script:
- cd front
- yarn install
@ -357,29 +340,26 @@ build_api_schema:
# Add build_docs rules because it depends on the build_api_schema artifact
- changes: [docs/**/*]
image: $CI_REGISTRY/funkwhale/ci/python-funkwhale-api:3.13
image: $CI_REGISTRY/funkwhale/ci/python-funkwhale-api:3.11
services:
- postgres:15-alpine
- redis:7-alpine
cache: *api_cache
variables:
DATABASE_URL: 'postgresql://postgres@postgres/postgres'
FUNKWHALE_URL: 'https://funkwhale.ci'
DATABASE_URL: "postgresql://postgres@postgres/postgres"
FUNKWHALE_URL: "https://funkwhale.ci"
DJANGO_SETTINGS_MODULE: config.settings.local
POSTGRES_HOST_AUTH_METHOD: trust
CACHE_URL: 'redis://redis:6379/0'
API_TYPE: 'v1'
CACHE_URL: "redis://redis:6379/0"
API_TYPE: "v1"
before_script:
- cd api
- make install
- poetry run pip install "setuptools==60.10.0" wheel
- poetry run pip install --no-use-pep517 django-allauth==0.42.0
- poetry install --all-extras
- poetry run funkwhale-manage migrate
script:
- poetry run funkwhale-manage spectacular --file ../docs/schema.yml
- diff ../docs/schema.yml ./funkwhale_api/common/schema.yml || (
echo "Schema files do not match! run sudo docker compose run --rm
api funkwhale-manage spectacular > ./api/funkwhale_api/common/schema.yml" &&
exit 1
)
artifacts:
expire_in: 2 weeks
paths:
@ -421,10 +401,7 @@ build_front:
variables:
<<: *keep_git_files_permissions
NODE_OPTIONS: --max-old-space-size=4096
cache:
- *yarn_cache
- *node_cache
- *lint_cache
cache: *front_cache
before_script:
- cd front
- yarn install --frozen-lockfile
@ -458,25 +435,6 @@ build_api:
paths:
- api
# build_tauri:
# stage: build
# rules:
# - if: $CI_COMMIT_BRANCH =~ /(stable|develop)/
# - changes: [front/**/*]
# image: $CI_REGISTRY/funkwhale/ci/node-tauri:18
# variables:
# <<: *keep_git_files_permissions
# before_script:
# - source /root/.cargo/env
# - yarn install
# script:
# - yarn tauri build --verbose
# artifacts:
# name: desktop_${CI_COMMIT_REF_NAME}
# paths:
# - front/tauri/target/release/bundle/appimage/*.AppImage
deploy_docs:
interruptible: false
extends: .ssh-agent
@ -508,24 +466,23 @@ docker:
- if: $CI_COMMIT_TAG
variables:
BUILD_ARGS: >
--set *.platform=linux/amd64,linux/arm64,linux/arm/v7
--set *.platform=linux/amd64,linux/arm64
--no-cache
--push
- if: $CI_COMMIT_BRANCH =~ /(stable|develop)/
variables:
BUILD_ARGS: >
--set *.platform=linux/amd64,linux/arm64,linux/arm/v7
--set *.cache-from=type=registry,ref=$DOCKER_CACHE_IMAGE:$CI_COMMIT_BRANCH
--set *.cache-to=type=registry,ref=$DOCKER_CACHE_IMAGE:$CI_COMMIT_BRANCH,mode=max
--set *.platform=linux/amd64,linux/arm64
--set *.cache-from=type=registry,ref=$DOCKER_CACHE_IMAGE:$CI_COMMIT_BRANCH,oci-mediatypes=false
--set *.cache-to=type=registry,ref=$DOCKER_CACHE_IMAGE:$CI_COMMIT_BRANCH,mode=max,oci-mediatypes=false
--push
- if: $CI_PIPELINE_SOURCE == "merge_request_event" && $CI_PROJECT_NAMESPACE == "funkwhale"
# We don't provide priviledged runners to everyone, so we can only build docker images in the funkwhale group
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
variables:
BUILD_ARGS: >
--set *.platform=linux/amd64
--set *.cache-from=type=registry,ref=$DOCKER_CACHE_IMAGE:$CI_MERGE_REQUEST_TARGET_BRANCH_NAME
--set *.cache-from=type=registry,ref=$DOCKER_CACHE_IMAGE:$CI_MERGE_REQUEST_TARGET_BRANCH_NAME,oci-mediatypes=false
image: $CI_REGISTRY/funkwhale/ci/docker:20
services:
@ -534,7 +491,7 @@ docker:
<<: *keep_git_files_permissions
DOCKER_HOST: tcp://docker:2375/
DOCKER_DRIVER: overlay2
DOCKER_TLS_CERTDIR: ''
DOCKER_TLS_CERTDIR: ""
BUILDKIT_PROGRESS: plain
DOCKER_CACHE_IMAGE: $CI_REGISTRY/funkwhale/funkwhale/cache
@ -556,24 +513,3 @@ docker:
name: docker_metadata_${CI_COMMIT_REF_NAME}
paths:
- metadata.json
package:
stage: publish
needs:
- job: build_metadata
artifacts: true
- job: build_api
artifacts: true
- job: build_front
artifacts: true
# - job: build_tauri
# artifacts: true
rules:
- if: $CI_COMMIT_BRANCH =~ /(stable|develop)/
image: $CI_REGISTRY/funkwhale/ci/python:3.11
variables:
<<: *keep_git_files_permissions
script:
- make package
- scripts/ci-upload-packages.sh

View File

@ -9,14 +9,14 @@
"group:monorepos",
"group:recommended"
],
"baseBranches": ["develop"],
"baseBranches": ["stable", "develop"],
"branchConcurrentLimit": 2,
"prConcurrentLimit": 1,
"rangeStrategy": "pin",
"ignoreDeps": ["$CI_REGISTRY/funkwhale/backend-test-docker"],
"packageRules": [
{
"matchFileNames": ["api/*", "front/*", "docs/*"],
"matchPaths": ["api/*", "front/*", "docs/*"],
"additionalBranchPrefix": "{{parentDir}}-",
"semanticCommitScope": "{{parentDir}}"
},
@ -25,16 +25,6 @@
"branchConcurrentLimit": 0,
"prConcurrentLimit": 0
},
{
"matchBaseBranches": ["develop"],
"matchUpdateTypes": ["major"],
"prPriority": 2
},
{
"matchBaseBranches": ["develop"],
"matchUpdateTypes": ["minor"],
"prPriority": 1
},
{
"matchUpdateTypes": ["major", "minor"],
"matchBaseBranches": ["stable"],
@ -54,20 +44,20 @@
"addLabels": ["Area::Backend"]
},
{
"groupName": "vueuse",
"matchDepNames": ["/^@vueuse/.*/"]
"matchPackagePatterns": ["^@vueuse/.*"],
"groupName": "vueuse"
},
{
"matchDepNames": ["channels", "channels-redis", "daphne"],
"matchPackageNames": ["channels", "channels-redis", "daphne"],
"groupName": "channels"
},
{
"matchDepNames": ["node"],
"matchPackageNames": ["node"],
"allowedVersions": "/\\d+[02468]$/"
},
{
"matchFileNames": ["deploy/docker-compose.yml"],
"matchDepNames": ["postgres"],
"matchFiles": ["deploy/docker-compose.yml"],
"matchPackageNames": ["postgres"],
"postUpgradeTasks": {
"commands": [
"echo 'Upgrade Postgres to version {{ newVersion }}. [Make sure to migrate!](https://docs.funkwhale.audio/administrator_documentation/upgrade_docs/docker.html#upgrade-the-postgres-container)' > changes/changelog.d/postgres.update"
@ -76,7 +66,7 @@
}
},
{
"matchDepNames": ["python"],
"matchPackageNames": ["python"],
"rangeStrategy": "widen"
}
]

View File

@ -14,7 +14,7 @@ tasks:
docker-compose up -d
poetry env use python
make install
poetry install
gp ports await 5432

View File

@ -6,8 +6,6 @@ RUN sudo apt update -y \
RUN pyenv install 3.11 && pyenv global 3.11
RUN brew install neovim
RUN pip install poetry pre-commit jinja2 towncrier \
&& poetry config virtualenvs.create true \
&& poetry config virtualenvs.in-project true

View File

@ -1,19 +1,19 @@
version: '3'
version: "3"
services:
postgres:
image: postgres:15-alpine
environment:
- 'POSTGRES_HOST_AUTH_METHOD=trust'
- "POSTGRES_HOST_AUTH_METHOD=trust"
volumes:
- '../data/postgres:/var/lib/postgresql/data'
- "../data/postgres:/var/lib/postgresql/data"
ports:
- 5432:5432
redis:
image: redis:7-alpine
volumes:
- '../data/redis:/data'
- "../data/redis:/data"
ports:
- 6379:6379
@ -26,14 +26,14 @@ services:
extra_hosts:
- host.docker.internal:host-gateway
environment:
- 'NGINX_MAX_BODY_SIZE=100M'
- 'FUNKWHALE_API_IP=host.docker.internal'
- 'FUNKWHALE_API_HOST=host.docker.internal'
- 'FUNKWHALE_API_PORT=5000'
- 'FUNKWHALE_FRONT_IP=host.docker.internal'
- 'FUNKWHALE_FRONT_PORT=8080'
- 'FUNKWHALE_HOSTNAME=${FUNKWHALE_HOSTNAME-host.docker.internal}'
- 'FUNKWHALE_PROTOCOL=https'
- "NGINX_MAX_BODY_SIZE=100M"
- "FUNKWHALE_API_IP=host.docker.internal"
- "FUNKWHALE_API_HOST=host.docker.internal"
- "FUNKWHALE_API_PORT=5000"
- "FUNKWHALE_FRONT_IP=host.docker.internal"
- "FUNKWHALE_FRONT_PORT=8080"
- "FUNKWHALE_HOSTNAME=${FUNKWHALE_HOSTNAME-host.docker.internal}"
- "FUNKWHALE_PROTOCOL=https"
volumes:
- ../data/media:/workspace/funkwhale/data/media:ro
- ../data/music:/music:ro

View File

@ -6,13 +6,6 @@ repos:
rev: v4.4.0
hooks:
- id: check-added-large-files
exclude: |
(?x)(
^api/funkwhale_api/common/schema.yml|
^api/tests/music/test_coverart.ogg|
^front/src/generated/types.ts
)
- id: check-case-conflict
- id: check-executables-have-shebangs
- id: check-shebang-scripts-are-executable
@ -60,7 +53,7 @@ repos:
- id: isort
- repo: https://github.com/pycqa/flake8
rev: 6.1.0
rev: 6.0.0
hooks:
- id: flake8
@ -69,7 +62,6 @@ repos:
hooks:
- id: prettier
files: \.(md|yml|yaml|json)$
exclude: 'api/funkwhale_api/common/schema.yml'
- repo: https://github.com/codespell-project/codespell
rev: v2.2.6

View File

@ -1,28 +0,0 @@
{
"bracketSameLine": false,
"bracketSpacing": true,
"embeddedLanguageFormatting": "off",
"htmlWhitespaceSensitivity": "strict",
"printWidth": 160,
"semi": false,
"singleAttributePerLine": true,
"singleQuote": true,
"trailingComma": "none",
"tabWidth": 2,
"useTabs": false,
"overrides": [
{
"files": "*.html",
"options": {
"singleAttributePerLine": false
}
},
{
"files": "*.json",
"options": {
"parser": "json",
"printWidth": 80
}
}
]
}

View File

@ -9,101 +9,19 @@ This changelog is viewable on the web at https://docs.funkwhale.audio/changelog.
<!-- towncrier -->
## 2.0.0-alpha.2 (2025-06-10)
## 1.4.1 (2025-04-14)
Upgrade instructions are available at https://docs.funkwhale.audio/administrator/upgrade/index.html
Enhancements:
- Make playlist detail page reactive to plugin upload updates (#2464)
- Only refresh_nodeinfo_known_nodes for Funkwhale instances (#2442)
Bugfixes:
- Fixed database migrations for trackfavorite, playlist and playlisttrack
## 2.0.0-alpha.1 (2025-05-23)
Carefully read [this blog post](https://blog.funkwhale.audio/2025-funkwhale-2-news.html) before upgrading. This alpha release might break your db.
Upgrade instructions are available at https://docs.funkwhale.audio/administrator/upgrade/index.html
Features:
- Add a command to create playlists from folder structure (#2223)
- Add Musicbrainz genres to funkwhale tag table and allow Musicbrainz tag sync (#2143)
- Add support for deprecated COVERART fields in ogg files.
- Add support for Python 3.12
- Allow specifying more parameters on fs-import API endpoint
- Build desktop app with Tauri
- Drop libraries in favor of playlist (#2366)
- Entering an input in the global search in the sidebar opens up a modal that show all possible results at once with collapseable sections for each category, including federated searches for other users, channels and rss feeds. (#2910)
- Extend Subsonic API with OpenSubsonic support (#2270)
- Generate mock test server from openapi schema
- Implement Subsonic getArtistInfo2 response
- Improve mobile design (#2090)
- Improve visuals & layout (#2091)
- Playlist federation (#1458)
- Quality filter for content frontend (#1469)
- Quality filters backend (#2275)
- Support multiples artists for tracks and albums (#1568)
- Use playlists to privately share audio files (#2417)
- User follow with listening and track favorite activities (#1810)
Enhancements:
- Add cli command to prune non mbid content from db (#2083)
- Add favorite and listening sync ith Listenbrainz (#2079)
- Add tests for track cache
- Allow special characters in tags (#2009)
- disable some linter rule to avoid noise on the api lint process (#2346)
- Drop python 3.8 and 3.9 (#2282)
- Migrate frontend to api V2 (#2324)
- Optimize radios queryset to support large tables (#2450)
- Publish releases from CI job
- Subsonic getAlbumInfo, getAlbumInfo2 and getTopSongs endpoints (#2392)
- Support setting CELERY_TASK_TIME_LIMIT from environment
- Update dj-rest-auth to 5.0.2 and django-allauth to 0.55.2
- Update django to version 4.2
Refactoring:
- Add compatibility for lru-cache v10
Bugfixes:
- ActivityStreams compliance: duration (#1566)
- Docker mac compatibility, dynamic DNS + Debian image (#2337)
- Federation API returns a 404 response when an actor is not found
- fix fakedata generation regression from multiartist (#2343)
- Fix schema generation to allow propre types in front (#2404)
- Fix third party upload triggers and plugin example (#2405)
- Fixed the exceptions caught in two places in the getCoverArt subsonic API (#2122)
- regression:multiple albums with same name and artsit creating during import (#2365)
- Resolve forbidden tags due to filter database error (#2325)
- Resolve Radio playing fails when unauthenticated (#2319)
- Fix 1.4.0 builds
- Fix build script for documentation to properly deploy swagger
- Make trailing slashes optional for all endpoints
Documentation:
- Add API v2 overview.
- Add genre tags spec.
- Add import group details to content upload spec (#2268)
- add listenings-favorites-sync-with-lb spec
- Added explanation of AWS_CUSTOM_DOMAIN and AWS_S3_URL_PROTOCOL.
- Added new upload process spec
- Added user deletion spec
- Fixed the sample Apache configuration
- Playlist federation spec
- update federation doc with the new artist credit object (#2335)
Other:
- Use Alpine 3.19 as base for docker images
Deprecation:
- The "funkwhaleVersion" field in Subsonic responses is deprecated. Clients
should use the OpenSubsonic field "serverVersion" instead.
## 1.4.0 (2023-12-12)
@ -498,13 +416,13 @@ Update instructions:
2. Stop your containers using the **docker-compose** syntax.
```sh
docker compose down
sudo docker-compose down
```
3. Bring the containers back up using the **docker compose** syntax.
```sh
docker compose up -d
sudo docker compose up -d
```
After this you can continue to use the **docker compose** syntax for all Docker management tasks.

View File

@ -17,41 +17,3 @@ docker-build: docker-metadata
build-metadata:
./scripts/build_metadata.py --format env | tee build_metadata.env
BUILD_DIR = dist
package:
rm -Rf $(BUILD_DIR)
mkdir -p $(BUILD_DIR)
tar --create --gunzip --file='$(BUILD_DIR)/funkwhale-api.tar.gz' \
--owner='root' \
--group='root' \
--exclude-vcs \
api/config \
api/funkwhale_api \
api/install_os_dependencies.sh \
api/manage.py \
api/poetry.lock \
api/pyproject.toml \
api/Readme.md
cd '$(BUILD_DIR)' && \
tar --extract --gunzip --file='funkwhale-api.tar.gz' && \
zip -q 'funkwhale-api.zip' -r api && \
rm -Rf api
tar --create --gunzip --file='$(BUILD_DIR)/funkwhale-front.tar.gz' \
--owner='root' \
--group='root' \
--exclude-vcs \
--transform='s/^front\/dist/front/' \
front/dist
cd '$(BUILD_DIR)' && \
tar --extract --gunzip --file='funkwhale-front.tar.gz' && \
zip -q 'funkwhale-front.zip' -r front && \
rm -Rf front
# cd '$(BUILD_DIR)' && \
# cp ../front/tauri/target/release/bundle/appimage/funkwhale_*.AppImage FunkwhaleDesktop.AppImage
cd '$(BUILD_DIR)' && sha256sum * > SHA256SUMS

View File

@ -1 +0,0 @@
Dockerfile.alpine

124
api/Dockerfile Normal file
View File

@ -0,0 +1,124 @@
FROM alpine:3.17 as requirements
# We need this additional step to avoid having poetrys deps interacting with our
# dependencies. This is only required until alpine 3.16 is released, since this
# allows us to install poetry as package.
RUN set -eux; \
apk add --no-cache \
poetry \
py3-cryptography \
py3-pip \
python3
COPY pyproject.toml poetry.lock /
RUN set -eux; \
poetry export --without-hashes --extras typesense > requirements.txt; \
poetry export --without-hashes --with dev > dev-requirements.txt;
FROM alpine:3.17 as builder
ENV PYTHONDONTWRITEBYTECODE=1
ENV PYTHONUNBUFFERED=1
ARG PIP_NO_CACHE_DIR=1
ENV CARGO_NET_GIT_FETCH_WITH_CLI=true
RUN set -eux; \
apk add --no-cache \
cargo \
curl \
gcc \
g++ \
git \
jpeg-dev \
libffi-dev \
libldap \
libxml2-dev \
libxslt-dev \
make \
musl-dev \
openldap-dev \
openssl-dev \
postgresql-dev \
zlib-dev \
py3-cryptography=38.0.3-r1 \
py3-lxml=4.9.3-r1 \
py3-pillow=9.3.0-r0 \
py3-psycopg2=2.9.5-r0 \
py3-watchfiles=0.18.1-r0 \
python3-dev
# Create virtual env
RUN python3 -m venv --system-site-packages /venv
ENV PATH="/venv/bin:$PATH"
COPY --from=requirements /requirements.txt /requirements.txt
COPY --from=requirements /dev-requirements.txt /dev-requirements.txt
RUN --mount=type=cache,target=~/.cache/pip; \
set -eux; \
pip3 install --upgrade pip; \
pip3 install setuptools wheel; \
# Currently we are unable to relieably build rust-based packages on armv7. This
# is why we need to use the packages shipped by Alpine Linux.
# Since poetry does not allow in-place dependency pinning, we need
# to install the deps using pip.
grep -Ev 'cryptography|lxml|pillow|psycopg2|watchfiles' /requirements.txt \
| pip3 install -r /dev/stdin \
cryptography==38.0.3 \
lxml==4.9.3 \
pillow==9.3.0 \
psycopg2==2.9.5 \
watchfiles==0.18.1
ARG install_dev_deps=0
RUN --mount=type=cache,target=~/.cache/pip; \
set -eux; \
if [ "$install_dev_deps" = "1" ] ; then \
grep -Ev 'cryptography|lxml|pillow|psycopg2|watchfiles' /dev-requirements.txt \
| pip3 install -r /dev/stdin \
cryptography==38.0.3 \
lxml==4.9.3 \
pillow==9.3.0 \
psycopg2==2.9.5 \
watchfiles==0.18.1; \
fi
FROM alpine:3.17 as production
ENV PYTHONDONTWRITEBYTECODE=1
ENV PYTHONUNBUFFERED=1
ARG PIP_NO_CACHE_DIR=1
RUN set -eux; \
apk add --no-cache \
bash \
ffmpeg \
gettext \
jpeg-dev \
libldap \
libmagic \
libpq \
libxml2 \
libxslt \
py3-cryptography=38.0.3-r1 \
py3-lxml=4.9.3-r1 \
py3-pillow=9.3.0-r0 \
py3-psycopg2=2.9.5-r0 \
py3-watchfiles=0.18.1-r0 \
python3 \
tzdata
COPY --from=builder /venv /venv
ENV PATH="/venv/bin:$PATH"
COPY . /app
WORKDIR /app
RUN --mount=type=cache,target=~/.cache/pip; \
set -eux; \
pip3 install --no-deps --editable .
ENV IS_DOCKER_SETUP=true
CMD ["./docker/server.sh"]

View File

@ -1,137 +0,0 @@
FROM alpine:3.21 AS requirements
RUN set -eux; \
apk add --no-cache \
poetry \
py3-cryptography \
py3-pip \
python3
COPY pyproject.toml poetry.lock /
RUN set -eux; \
poetry export --without-hashes --extras typesense > requirements.txt; \
poetry export --without-hashes --with dev > dev-requirements.txt;
FROM alpine:3.21 AS builder
ENV PYTHONDONTWRITEBYTECODE=1
ENV PYTHONUNBUFFERED=1
ARG PIP_NO_CACHE_DIR=1
ENV CARGO_NET_GIT_FETCH_WITH_CLI=true
RUN set -eux; \
apk add --no-cache \
cargo \
curl \
gcc \
g++ \
git \
jpeg-dev \
libffi-dev \
libldap \
libxml2-dev \
libxslt-dev \
make \
musl-dev \
openldap-dev \
openssl-dev \
postgresql-dev \
zlib-dev \
py3-cryptography \
py3-lxml \
py3-pillow \
py3-psycopg2 \
py3-watchfiles \
python3-dev \
gfortran \
libgfortran \
openblas-dev \
py3-scipy \
py3-scikit-learn;
# Create virtual env
RUN python3 -m venv --system-site-packages /venv
ENV PATH="/venv/bin:$PATH"
COPY --from=requirements /requirements.txt /requirements.txt
COPY --from=requirements /dev-requirements.txt /dev-requirements.txt
RUN --mount=type=cache,target=~/.cache/pip; \
set -eux; \
pip3 install --upgrade pip;
RUN --mount=type=cache,target=~/.cache/pip; \
set -eux; \
pip3 install setuptools wheel;
RUN --mount=type=cache,target=~/.cache/pip; \
set -eux; \
# Currently we are unable to relieably build rust-based packages on armv7. This
# is why we need to use the packages shipped by Alpine Linux.
# Since poetry does not allow in-place dependency pinning, we need
# to install the deps using pip.
grep -Ev 'cryptography|lxml|pillow|psycopg2|watchfiles|scipy|scikit-learn' /requirements.txt \
| pip3 install -r /dev/stdin \
cryptography \
lxml \
pillow \
psycopg2 \
watchfiles \
scipy \
scikit-learn;
ARG install_dev_deps=0
RUN --mount=type=cache,target=~/.cache/pip; \
set -eux; \
if [ "$install_dev_deps" = "1" ] ; then \
grep -Ev 'cryptography|lxml|pillow|psycopg2|watchfiles' /dev-requirements.txt \
| pip3 install -r /dev/stdin \
cryptography \
lxml \
pillow \
psycopg2 \
watchfiles; \
fi
FROM alpine:3.21 AS production
ENV PYTHONDONTWRITEBYTECODE=1
ENV PYTHONUNBUFFERED=1
ARG PIP_NO_CACHE_DIR=1
RUN set -eux; \
apk add --no-cache \
bash \
ffmpeg \
gettext \
jpeg-dev \
libldap \
libmagic \
libpq \
libxml2 \
libxslt \
py3-cryptography \
py3-lxml \
py3-pillow \
py3-psycopg2 \
py3-watchfiles \
py3-scipy \
py3-scikit-learn \
python3 \
tzdata
COPY --from=builder /venv /venv
ENV PATH="/venv/bin:$PATH"
COPY . /app
WORKDIR /app
RUN apk add --no-cache gfortran
RUN --mount=type=cache,target=~/.cache/pip; \
set -eux; \
pip3 install --no-deps --editable .
ENV IS_DOCKER_SETUP=true
CMD ["./docker/server.sh"]

View File

@ -1,71 +0,0 @@
FROM python:3.13-slim AS builder
ARG POETRY_VERSION=1.8
ENV DEBIAN_FRONTEND=noninteractive
ENV VIRTUAL_ENV=/venv
ENV PATH="/venv/bin:$PATH"
ENV POETRY_HOME=/opt/poetry
ENV POETRY_NO_INTERACTION=1
ENV POETRY_VIRTUALENVS_IN_PROJECT=1
ENV POETRY_VIRTUALENVS_CREATE=1
ENV PYTHONDONTWRITEBYTECODE=1
ENV PYTHONUNBUFFERED=1
# Tell Poetry where to place its cache and virtual environment
ENV POETRY_CACHE_DIR=/opt/.cache
RUN pip install "poetry==${POETRY_VERSION}"
RUN --mount=type=cache,target=/var/lib/apt/lists \
apt update; \
apt install -y \
build-essential \
python3-dev \
libldap-dev \
libsasl2-dev \
slapd \
ldap-utils \
tox \
lcov \
valgrind
WORKDIR /app
COPY pyproject.toml .
RUN python3 -m venv --system-site-packages ${VIRTUAL_ENV} && . ${VIRTUAL_ENV}/bin/activate
RUN --mount=type=cache,target=/opt/.cache \
poetry install --no-root --extras typesense
FROM python:3.13-slim AS runtime
ARG POETRY_VERSION=1.8
ENV DEBIAN_FRONTEND=noninteractive
ENV VIRTUAL_ENV=/venv
ENV PATH="/venv/bin:$PATH"
RUN --mount=type=cache,target=/var/lib/apt/lists \
apt update; \
apt install -y \
ffmpeg \
gettext \
libjpeg-dev \
libldap-2.5-0 \
libmagic1 \
libpq5 \
libxml2 \
libxslt1.1
RUN pip install "poetry==${POETRY_VERSION}"
COPY --from=builder ${VIRTUAL_ENV} ${VIRTUAL_ENV}
WORKDIR /app
COPY . /app
RUN poetry install --extras typesense
CMD ["./docker/server.sh"]

View File

@ -4,12 +4,11 @@ CPU_CORES := $(shell N=$$(nproc); echo $$(( $$N > 4 ? 4 : $$N )))
.PHONY: install lint
install:
poetry install --all-extras
poetry install
lint:
poetry run pylint \
--jobs=$(CPU_CORES) \
--output-format=colorized \
--recursive=true \
--disable=C,R,W,I \
config funkwhale_api tests

View File

@ -299,31 +299,10 @@ def background_task(name):
# HOOKS
TRIGGER_THIRD_PARTY_UPLOAD = "third_party_upload"
"""
Called when a track is being listened
"""
LISTENING_CREATED = "listening_created"
"""
Called when a track is being listened
"""
LISTENING_SYNC = "listening_sync"
"""
Called by the task manager to trigger listening sync
"""
FAVORITE_CREATED = "favorite_created"
"""
Called when a track is being favorited
"""
FAVORITE_DELETED = "favorite_deleted"
"""
Called when a favorited track is being unfavorited
"""
FAVORITE_SYNC = "favorite_sync"
"""
Called by the task manager to trigger favorite sync
"""
SCAN = "scan"
"""

View File

@ -1,7 +1,7 @@
from channels.auth import AuthMiddlewareStack
from channels.routing import ProtocolTypeRouter, URLRouter
from django.conf.urls import url
from django.core.asgi import get_asgi_application
from django.urls import re_path
from funkwhale_api.instance import consumers
@ -10,12 +10,7 @@ application = ProtocolTypeRouter(
# Empty for now (http->django views is added by default)
"websocket": AuthMiddlewareStack(
URLRouter(
[
re_path(
"^api/v1/activity$",
consumers.InstanceActivityConsumer.as_asgi(),
)
]
[url("^api/v1/activity$", consumers.InstanceActivityConsumer.as_asgi())]
)
),
"http": get_asgi_application(),

View File

@ -1,3 +1,5 @@
import os
from drf_spectacular.contrib.django_oauth_toolkit import OpenApiAuthenticationExtension
from drf_spectacular.plumbing import build_bearer_security_scheme_object
@ -42,6 +44,7 @@ def custom_preprocessing_hook(endpoints):
filtered = []
# your modifications to the list of operations that are exposed in the schema
api_type = os.environ.get("API_TYPE", "v1")
for path, path_regex, method, callback in endpoints:
if path.startswith("/api/v1/providers"):
@ -53,7 +56,7 @@ def custom_preprocessing_hook(endpoints):
if path.startswith("/api/v1/oauth/authorize"):
continue
if path.startswith("/api/v1") or path.startswith("/api/v2"):
if path.startswith(f"/api/{api_type}"):
filtered.append((path, path_regex, method, callback))
return filtered

View File

@ -2,7 +2,7 @@ import logging.config
import sys
import warnings
from collections import OrderedDict
from urllib.parse import urlparse, urlsplit
from urllib.parse import urlsplit
import environ
from celery.schedules import crontab
@ -114,7 +114,6 @@ else:
logger.info("Loaded env file at %s/.env", path)
break
FUNKWHALE_PLUGINS = env("FUNKWHALE_PLUGINS", default="")
FUNKWHALE_PLUGINS_PATH = env(
"FUNKWHALE_PLUGINS_PATH", default="/srv/funkwhale/plugins/"
)
@ -225,16 +224,6 @@ ALLOWED_HOSTS = env.list("DJANGO_ALLOWED_HOSTS", default=[]) + [FUNKWHALE_HOSTNA
List of allowed hostnames for which the Funkwhale server will answer.
"""
CSRF_TRUSTED_ORIGINS = [
urlparse("//" + o, FUNKWHALE_PROTOCOL).geturl() for o in ALLOWED_HOSTS
]
"""
List of origins that are trusted for unsafe requests
We simply consider all allowed hosts to be trusted origins
See DJANGO_ALLOWED_HOSTS in .env.example for details
See https://docs.djangoproject.com/en/4.2/ref/settings/#csrf-trusted-origins
"""
# APP CONFIGURATION
# ------------------------------------------------------------------------------
DJANGO_APPS = (
@ -280,7 +269,6 @@ LOCAL_APPS = (
# Your stuff: custom apps go here
"funkwhale_api.instance",
"funkwhale_api.audio",
"funkwhale_api.contrib.listenbrainz",
"funkwhale_api.music",
"funkwhale_api.requests",
"funkwhale_api.favorites",
@ -315,7 +303,6 @@ MIDDLEWARE = (
tuple(plugins.trigger_filter(plugins.MIDDLEWARES_BEFORE, [], enabled=True))
+ tuple(ADDITIONAL_MIDDLEWARES_BEFORE)
+ (
"allauth.account.middleware.AccountMiddleware",
"django.middleware.security.SecurityMiddleware",
"django.middleware.clickjacking.XFrameOptionsMiddleware",
"corsheaders.middleware.CorsMiddleware",
@ -556,15 +543,7 @@ The path where static files are collected.
"""
# See: https://docs.djangoproject.com/en/dev/ref/settings/#static-url
STATIC_URL = env("STATIC_URL", default=FUNKWHALE_URL + "/staticfiles/")
STORAGES = {
"default": {
"BACKEND": "funkwhale_api.common.storage.ASCIIFileSystemStorage",
},
"staticfiles": {
"BACKEND": "django.contrib.staticfiles.storage.StaticFilesStorage",
},
}
DEFAULT_FILE_STORAGE = "funkwhale_api.common.storage.ASCIIFileSystemStorage"
PROXY_MEDIA = env.bool("PROXY_MEDIA", default=True)
"""
@ -622,20 +601,7 @@ if AWS_ACCESS_KEY_ID:
"""
AWS_S3_CUSTOM_DOMAIN = env("AWS_S3_CUSTOM_DOMAIN", default=None)
"""
Custom domain for serving your S3 files.
Useful if your provider offers a CDN-like service for your bucket.
.. important::
The URL must not contain a scheme (:attr:`AWS_S3_URL_PROTOCOL` is
automatically prepended) nor a trailing slash.
"""
AWS_S3_URL_PROTOCOL = env("AWS_S3_URL_PROTOCOL", default="https:")
"""
Protocol to use when constructing the custom domain (see :attr:`AWS_S3_CUSTOM_DOMAIN`)
.. important::
It must end with a `:`, remove `//`.
Custom domain to use for your S3 storage.
"""
AWS_S3_ENDPOINT_URL = env("AWS_S3_ENDPOINT_URL", default=None)
"""
@ -663,7 +629,7 @@ if AWS_ACCESS_KEY_ID:
A directory in your S3 bucket where you store files.
Use this if you plan to share the bucket between services.
"""
STORAGES["default"]["BACKEND"] = "funkwhale_api.common.storage.ASCIIS3Boto3Storage"
DEFAULT_FILE_STORAGE = "funkwhale_api.common.storage.ASCIIS3Boto3Storage"
# See:
@ -924,7 +890,7 @@ Example:
# Your common stuff: Below this line define 3rd party library settings
CELERY_TASK_DEFAULT_RATE_LIMIT = 1
CELERY_TASK_TIME_LIMIT = env.int("CELERY_TASK_TIME_LIMIT", default=300)
CELERY_TASK_TIME_LIMIT = 300
CELERY_BEAT_SCHEDULE = {
"audio.fetch_rss_feeds": {
"task": "audio.fetch_rss_feeds",
@ -976,29 +942,12 @@ CELERY_BEAT_SCHEDULE = {
),
"options": {"expires": 60 * 60},
},
"listenbrainz.trigger_listening_sync_with_listenbrainz": {
"task": "listenbrainz.trigger_listening_sync_with_listenbrainz",
"schedule": crontab(day_of_week="*", minute="0", hour="3"),
"options": {"expires": 60 * 60 * 24},
},
"listenbrainz.trigger_favorite_sync_with_listenbrainz": {
"task": "listenbrainz.trigger_favorite_sync_with_listenbrainz",
"schedule": crontab(day_of_week="*", minute="0", hour="3"),
"options": {"expires": 60 * 60 * 24},
},
"tags.update_musicbrainz_genre": {
"task": "tags.update_musicbrainz_genre",
"schedule": crontab(day_of_month="2", minute="30", hour="3"),
"options": {"expires": 60 * 60 * 24},
},
}
if env.str("TYPESENSE_API_KEY", default=None):
CELERY_BEAT_SCHEDULE["typesense.build_canonical_index"] = {
"typesense.build_canonical_index": {
"task": "typesense.build_canonical_index",
"schedule": crontab(day_of_week="*/2", minute="0", hour="3"),
"options": {"expires": 60 * 60 * 24},
}
},
}
if env.bool("ADD_ALBUM_TAGS_FROM_TRACKS", default=True):
CELERY_BEAT_SCHEDULE["music.albums_set_tags_from_tracks"] = {
@ -1244,7 +1193,7 @@ if BROWSABLE_API_ENABLED:
"rest_framework.renderers.BrowsableAPIRenderer",
)
REST_AUTH = {
REST_AUTH_SERIALIZERS = {
"PASSWORD_RESET_SERIALIZER": "funkwhale_api.users.serializers.PasswordResetSerializer", # noqa
"PASSWORD_RESET_CONFIRM_SERIALIZER": "funkwhale_api.users.serializers.PasswordResetConfirmSerializer", # noqa
}
@ -1406,7 +1355,6 @@ VERSATILEIMAGEFIELD_RENDITION_KEY_SETS = {
],
"attachment_square": [
("original", "url"),
("small_square_crop", "crop__50x50"),
("medium_square_crop", "crop__200x200"),
("large_square_crop", "crop__600x600"),
],
@ -1543,10 +1491,3 @@ Typesense hostname. Defaults to `localhost` on non-Docker deployments and to `ty
Docker deployments.
"""
TYPESENSE_NUM_TYPO = env("TYPESENSE_NUM_TYPO", default=5)
"""
Max tracks to be downloaded when the THIRD_PARTY_UPLOAD plugin hook is triggered.
Each api request to playlist tracks or radio tracks trigger the hook if tracks upload are missing.
If your instance is big your ip might get rate limited.
"""
THIRD_PARTY_UPLOAD_MAX_UPLOADS = env.int("THIRD_PARTY_UPLOAD_MAX_UPLOADS", default=10)

View File

@ -2,7 +2,8 @@
Local settings
- Run in Debug mode
- Add Django Debug Toolbar when INTERNAL_IPS are given and match the request
- Use console backend for e-mails
- Add Django Debug Toolbar
- Add django-extensions as app
"""
@ -24,6 +25,11 @@ SECRET_KEY = env(
"DJANGO_SECRET_KEY", default="mc$&b=5j#6^bv7tld1gyjp2&+^-qrdy=0sw@r5sua*1zp4fmxc"
)
# Mail settings
# ------------------------------------------------------------------------------
EMAIL_HOST = "localhost"
EMAIL_PORT = 1025
# django-debug-toolbar
# ------------------------------------------------------------------------------
@ -90,6 +96,8 @@ CELERY_TASK_ALWAYS_EAGER = False
# Your local stuff: Below this line define 3rd party library settings
CSRF_TRUSTED_ORIGINS = [o for o in ALLOWED_HOSTS]
REST_FRAMEWORK["DEFAULT_SCHEMA_CLASS"] = "funkwhale_api.schema.CustomAutoSchema"
SPECTACULAR_SETTINGS = {
"TITLE": "Funkwhale API",
@ -142,16 +150,4 @@ MIDDLEWARE = (
"funkwhale_api.common.middleware.PymallocMiddleware",
) + MIDDLEWARE
REST_FRAMEWORK.update(
{
"TEST_REQUEST_RENDERER_CLASSES": [
"rest_framework.renderers.MultiPartRenderer",
"rest_framework.renderers.JSONRenderer",
"rest_framework.renderers.TemplateHTMLRenderer",
"funkwhale_api.playlists.renderers.PlaylistXspfRenderer",
],
}
)
# allows makemigrations and superuser creation
FORCE = env("FORCE", default=True)
TYPESENSE_API_KEY = "apikey"

View File

@ -41,6 +41,18 @@ SECRET_KEY = env("DJANGO_SECRET_KEY")
# SESSION_COOKIE_HTTPONLY = True
# SECURE_SSL_REDIRECT = env.bool("DJANGO_SECURE_SSL_REDIRECT", default=True)
# SITE CONFIGURATION
# ------------------------------------------------------------------------------
# Hosts/domain names that are valid for this site
# See https://docs.djangoproject.com/en/1.6/ref/settings/#allowed-hosts
CSRF_TRUSTED_ORIGINS = ALLOWED_HOSTS
# END SITE CONFIGURATION
# Static Assets
# ------------------------
STATICFILES_STORAGE = "django.contrib.staticfiles.storage.StaticFilesStorage"
# TEMPLATE CONFIGURATION
# ------------------------------------------------------------------------------
# See:

View File

@ -0,0 +1,9 @@
import os
os.environ.setdefault("FUNKWHALE_URL", "http://funkwhale.dev")
from .common import * # noqa
DEBUG = True
SECRET_KEY = "a_super_secret_key!"
TYPESENSE_API_KEY = "apikey"

View File

@ -1,6 +1,7 @@
from django.conf import settings
from django.conf.urls import url
from django.conf.urls.static import static
from django.urls import include, path, re_path
from django.urls import include, path
from django.views import defaults as default_views
from config import plugins
@ -9,41 +10,34 @@ from funkwhale_api.common import admin
plugins_patterns = plugins.trigger_filter(plugins.URLS, [], enabled=True)
api_patterns = [
re_path("v1/", include("config.urls.api")),
re_path("v2/", include("config.urls.api_v2")),
re_path("subsonic/", include("config.urls.subsonic")),
url("v1/", include("config.urls.api")),
url("v2/", include("config.urls.api_v2")),
url("subsonic/", include("config.urls.subsonic")),
]
urlpatterns = [
# Django Admin, use {% url 'admin:index' %}
re_path(settings.ADMIN_URL, admin.site.urls),
re_path(r"^api/", include((api_patterns, "api"), namespace="api")),
re_path(
url(settings.ADMIN_URL, admin.site.urls),
url(r"^api/", include((api_patterns, "api"), namespace="api")),
url(
r"^",
include(
("funkwhale_api.federation.urls", "federation"), namespace="federation"
),
),
re_path(
r"^api/v1/auth/",
include("funkwhale_api.users.rest_auth_urls"),
),
re_path(
r"^api/v2/auth/",
include("funkwhale_api.users.rest_auth_urls"),
),
re_path(r"^accounts/", include("allauth.urls")),
url(r"^api/v1/auth/", include("funkwhale_api.users.rest_auth_urls")),
url(r"^accounts/", include("allauth.urls")),
] + plugins_patterns
if settings.DEBUG:
# This allows the error pages to be debugged during development, just visit
# these url in browser to see how these error pages look like.
urlpatterns += [
re_path(r"^400/$", default_views.bad_request),
re_path(r"^403/$", default_views.permission_denied),
re_path(r"^404/$", default_views.page_not_found),
re_path(r"^500/$", default_views.server_error),
url(r"^400/?$", default_views.bad_request),
url(r"^403/?$", default_views.permission_denied),
url(r"^404/?$", default_views.page_not_found),
url(r"^500/?$", default_views.server_error),
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
if "debug_toolbar" in settings.INSTALLED_APPS:
@ -55,5 +49,5 @@ if settings.DEBUG:
if "silk" in settings.INSTALLED_APPS:
urlpatterns = [
re_path(r"^api/silk/", include("silk.urls", namespace="silk"))
url(r"^api/silk/", include("silk.urls", namespace="silk"))
] + urlpatterns

View File

@ -1,5 +1,4 @@
from django.conf.urls import include
from django.urls import re_path
from django.conf.urls import include, url
from funkwhale_api.activity import views as activity_views
from funkwhale_api.audio import views as audio_views
@ -29,61 +28,61 @@ router.register(r"attachments", common_views.AttachmentViewSet, "attachments")
v1_patterns = router.urls
v1_patterns += [
re_path(r"^oembed/$", views.OembedView.as_view(), name="oembed"),
re_path(
url(r"^oembed/?$", views.OembedView.as_view(), name="oembed"),
url(
r"^instance/",
include(("funkwhale_api.instance.urls", "instance"), namespace="instance"),
),
re_path(
url(
r"^manage/",
include(("funkwhale_api.manage.urls", "manage"), namespace="manage"),
),
re_path(
url(
r"^moderation/",
include(
("funkwhale_api.moderation.urls", "moderation"), namespace="moderation"
),
),
re_path(
url(
r"^federation/",
include(
("funkwhale_api.federation.api_urls", "federation"), namespace="federation"
),
),
re_path(
url(
r"^providers/",
include(("funkwhale_api.providers.urls", "providers"), namespace="providers"),
),
re_path(
url(
r"^favorites/",
include(("funkwhale_api.favorites.urls", "favorites"), namespace="favorites"),
),
re_path(r"^search$", views.Search.as_view(), name="search"),
re_path(
url(r"^search$", views.Search.as_view(), name="search"),
url(
r"^radios/",
include(("funkwhale_api.radios.urls", "radios"), namespace="radios"),
),
re_path(
url(
r"^history/",
include(("funkwhale_api.history.urls", "history"), namespace="history"),
),
re_path(
url(
r"^",
include(("funkwhale_api.users.api_urls", "users"), namespace="users"),
),
# XXX: remove if Funkwhale 1.1
re_path(
url(
r"^users/",
include(("funkwhale_api.users.api_urls", "users"), namespace="users-nested"),
),
re_path(
url(
r"^oauth/",
include(("funkwhale_api.users.oauth.urls", "oauth"), namespace="oauth"),
),
re_path(r"^rate-limit/?$", common_views.RateLimitView.as_view(), name="rate-limit"),
re_path(
url(r"^rate-limit/?$", common_views.RateLimitView.as_view(), name="rate-limit"),
url(
r"^text-preview/?$", common_views.TextPreviewView.as_view(), name="text-preview"
),
]
urlpatterns = [re_path("", include((v1_patterns, "v1"), namespace="v1"))]
urlpatterns = [url("", include((v1_patterns, "v1"), namespace="v1"))]

View File

@ -1,36 +1,19 @@
from django.conf.urls import include
from django.urls import re_path
from django.conf.urls import include, url
from funkwhale_api.common import routers as common_routers
from . import api
router = common_routers.OptionalSlashRouter()
v2_patterns = router.urls
v2_patterns += [
re_path(
url(
r"^instance/",
include(("funkwhale_api.instance.urls_v2", "instance"), namespace="instance"),
),
re_path(
url(
r"^radios/",
include(("funkwhale_api.radios.urls_v2", "radios"), namespace="radios"),
),
]
v2_paths = {
pattern.pattern.regex.pattern
for pattern in v2_patterns
if hasattr(pattern.pattern, "regex")
}
filtered_v1_patterns = [
pattern
for pattern in api.v1_patterns
if pattern.pattern.regex.pattern not in v2_paths
]
v2_patterns += filtered_v1_patterns
urlpatterns = [re_path("", include((v2_patterns, "v2"), namespace="v2"))]
urlpatterns = [url("", include((v2_patterns, "v2"), namespace="v2"))]

View File

@ -1,5 +1,4 @@
from django.conf.urls import include
from django.urls import re_path
from django.conf.urls import include, url
from rest_framework import routers
from rest_framework.urlpatterns import format_suffix_patterns
@ -9,9 +8,7 @@ subsonic_router = routers.SimpleRouter(trailing_slash=False)
subsonic_router.register(r"rest", SubsonicViewSet, basename="subsonic")
subsonic_patterns = format_suffix_patterns(subsonic_router.urls, allowed=["view"])
urlpatterns = [
re_path("", include((subsonic_patterns, "subsonic"), namespace="subsonic"))
]
urlpatterns = [url("", include((subsonic_patterns, "subsonic"), namespace="subsonic"))]
# urlpatterns = [
# url(

View File

@ -9,5 +9,5 @@ funkwhale-manage migrate
exec gunicorn config.asgi:application \
--workers "${FUNKWHALE_WEB_WORKERS-1}" \
--worker-class uvicorn.workers.UvicornWorker \
--bind 0.0.0.0:"${FUNKWHALE_API_PORT}" \
--bind 0.0.0.0:5000 \
${GUNICORN_ARGS-}

View File

@ -38,27 +38,13 @@ def combined_recent(limit, **kwargs):
def get_activity(user, limit=20):
query = fields.privacy_level_query(
user, "actor__user__privacy_level", "actor__user"
)
query = fields.privacy_level_query(user, lookup_field="user__privacy_level")
querysets = [
Listening.objects.filter(query)
.select_related(
"track",
"actor",
)
.prefetch_related(
"track__artist_credit__artist",
"track__album__artist_credit__artist",
Listening.objects.filter(query).select_related(
"track", "user", "track__artist", "track__album__artist"
),
TrackFavorite.objects.filter(query)
.select_related(
"track",
"actor",
)
.prefetch_related(
"track__artist_credit__artist",
"track__album__artist_credit__artist",
TrackFavorite.objects.filter(query).select_related(
"track", "user", "track__artist", "track__album__artist"
),
]
records = combined_recent(limit=limit, querysets=querysets)

View File

@ -21,11 +21,7 @@ TAG_FILTER = common_filters.MultipleQueryFilter(method=filter_tags)
class ChannelFilter(moderation_filters.HiddenContentFilterSet):
q = fields.SearchFilter(
search_fields=[
"artist_credit__artist__name",
"actor__summary",
"actor__preferred_username",
]
search_fields=["artist__name", "actor__summary", "actor__preferred_username"]
)
tag = TAG_FILTER
scope = common_filters.ActorScopeFilter(actor_field="attributed_to", distinct=True)

View File

@ -26,7 +26,6 @@ from funkwhale_api.federation import serializers as federation_serializers
from funkwhale_api.federation import utils as federation_utils
from funkwhale_api.moderation import mrf
from funkwhale_api.music import models as music_models
from funkwhale_api.music import tasks
from funkwhale_api.music.serializers import COVER_WRITE_FIELD, CoverField
from funkwhale_api.tags import models as tags_models
from funkwhale_api.tags import serializers as tags_serializers
@ -247,14 +246,11 @@ class SimpleChannelArtistSerializer(serializers.Serializer):
description = common_serializers.ContentSerializer(allow_null=True, required=False)
cover = CoverField(allow_null=True, required=False)
channel = serializers.UUIDField(allow_null=True, required=False)
tracks_count = serializers.SerializerMethodField(required=False)
tracks_count = serializers.IntegerField(source="_tracks_count", required=False)
tags = serializers.ListField(
child=serializers.CharField(), source="_prefetched_tagged_items", required=False
)
def get_tracks_count(self, o) -> int:
return getattr(o, "_tracks_count", 0)
class ChannelSerializer(serializers.ModelSerializer):
artist = SimpleChannelArtistSerializer()
@ -263,7 +259,6 @@ class ChannelSerializer(serializers.ModelSerializer):
attributed_to = federation_serializers.APIActorSerializer()
rss_url = serializers.CharField(source="get_rss_url")
url = serializers.SerializerMethodField()
subscriptions_count = serializers.SerializerMethodField()
class Meta:
model = models.Channel
@ -277,7 +272,6 @@ class ChannelSerializer(serializers.ModelSerializer):
"rss_url",
"url",
"downloads_count",
"subscriptions_count",
]
def to_representation(self, obj):
@ -286,7 +280,6 @@ class ChannelSerializer(serializers.ModelSerializer):
data["subscriptions_count"] = self.get_subscriptions_count(obj)
return data
@extend_schema_field(OpenApiTypes.INT)
def get_subscriptions_count(self, obj) -> int:
return obj.actor.received_follows.exclude(approved=False).count()
@ -756,7 +749,7 @@ class RssFeedItemSerializer(serializers.Serializer):
else:
existing_track = (
music_models.Track.objects.filter(
uuid=expected_uuid, artist_credit__artist__channel=channel
uuid=expected_uuid, artist__channel=channel
)
.select_related("description", "attachment_cover")
.first()
@ -772,6 +765,7 @@ class RssFeedItemSerializer(serializers.Serializer):
"disc_number": validated_data.get("itunes_season", 1) or 1,
"position": validated_data.get("itunes_episode", 1) or 1,
"title": validated_data["title"],
"artist": channel.artist,
}
)
if "rights" in validated_data:
@ -807,21 +801,6 @@ class RssFeedItemSerializer(serializers.Serializer):
**track_kwargs,
defaults=track_defaults,
)
# channel only have one artist so we can safely update artist_credit
defaults = {
"artist": channel.artist,
"credit": channel.artist.name,
"joinphrase": "",
}
query = (
Q(artist=channel.artist) & Q(credit=channel.artist.name) & Q(joinphrase="")
)
artist_credit = tasks.get_best_candidate_or_create(
music_models.ArtistCredit, query, defaults, ["artist", "joinphrase"]
)
track.artist_credit.set([artist_credit[0]])
# optimisation for reducing SQL queries, because we cannot use select_related with
# update or create, so we restore the cache by hand
if existing_track:

View File

@ -2,12 +2,10 @@ from django import http
from django.db import transaction
from django.db.models import Count, Prefetch, Q, Sum
from django.utils import timezone
from drf_spectacular.utils import extend_schema, extend_schema_view, inline_serializer
from drf_spectacular.utils import extend_schema, extend_schema_view
from rest_framework import decorators, exceptions, mixins
from rest_framework import permissions as rest_permissions
from rest_framework import response
from rest_framework import serializers as rest_serializers
from rest_framework import viewsets
from rest_framework import response, viewsets
from funkwhale_api.common import locales, permissions, preferences
from funkwhale_api.common import utils as common_utils
@ -29,7 +27,7 @@ ARTIST_PREFETCH_QS = (
"attachment_cover",
)
.prefetch_related(music_views.TAG_PREFETCH)
.annotate(_tracks_count=Count("artist_credit__tracks"))
.annotate(_tracks_count=Count("tracks"))
)
@ -105,7 +103,7 @@ class ChannelViewSet(
queryset = super().get_queryset()
if self.action == "retrieve":
queryset = queryset.annotate(
_downloads_count=Sum("artist__artist_credit__tracks__downloads_count")
_downloads_count=Sum("artist__tracks__downloads_count")
)
return queryset
@ -194,6 +192,7 @@ class ChannelViewSet(
if object.attributed_to == actors.get_service_actor():
# external feed, we redirect to the canonical one
return http.HttpResponseRedirect(object.rss_url)
uploads = (
object.library.uploads.playable_by(None)
.prefetch_related(
@ -212,32 +211,6 @@ class ChannelViewSet(
data = serializers.rss_serialize_channel_full(channel=object, uploads=uploads)
return response.Response(data, status=200)
@extend_schema(
responses=inline_serializer(
name="MetedataChoicesSerializer",
fields={
"language": rest_serializers.ListField(
child=inline_serializer(
name="LanguageItem",
fields={
"value": rest_serializers.CharField(),
"label": rest_serializers.CharField(),
},
)
),
"itunes_category": rest_serializers.ListField(
child=inline_serializer(
name="iTunesCategoryItem",
fields={
"value": rest_serializers.CharField(),
"label": rest_serializers.CharField(),
"children": rest_serializers.CharField(),
},
)
),
},
)
)
@decorators.action(
methods=["get"],
detail=False,

View File

@ -49,7 +49,6 @@ def handler_create_user(
utils.logger.warn("Unknown permission %s", permission)
utils.logger.debug("Creating actor…")
user.actor = models.create_actor(user)
models.create_user_libraries(user)
user.save()
return user

View File

@ -1,6 +1,6 @@
from allauth.account.models import EmailAddress
from allauth.account.utils import send_email_confirmation
from django.core.cache import cache
from django.utils.translation import gettext as _
from django.utils.translation import ugettext as _
from oauth2_provider.contrib.rest_framework.authentication import (
OAuth2Authentication as BaseOAuth2Authentication,
)
@ -20,13 +20,9 @@ def resend_confirmation_email(request, user):
if cache.get(cache_key):
return False
# We do the sending of the conformation by hand because we don't want to pass the request down
# to the email rendering, which would cause another UnverifiedEmail Exception and restarts the sending
# again and again
email = EmailAddress.objects.get_for_user(user, user.email)
email.send_confirmation()
done = send_email_confirmation(request, user)
cache.set(cache_key, True, THROTTLE_DELAY)
return True
return done
class OAuth2Authentication(BaseOAuth2Authentication):

View File

@ -19,10 +19,6 @@ class JsonAuthConsumer(JsonWebsocketConsumer):
channels.group_add(group, self.channel_name)
def disconnect(self, close_code):
if self.scope.get("user", False) and self.scope.get("user").pk is not None:
groups = self.scope["user"].get_channels_groups() + self.groups
else:
groups = self.groups
groups = self.scope["user"].get_channels_groups() + self.groups
for group in groups:
channels.group_discard(group, self.channel_name)

View File

@ -24,21 +24,8 @@ def privacy_level_query(user, lookup_field="privacy_level", user_field="user"):
if user.is_anonymous:
return models.Q(**{lookup_field: "everyone"})
followers_query = models.Q(
**{
f"{lookup_field}": "followers",
f"{user_field}__actor__in": user.actor.get_approved_followings(),
}
)
# Federated TrackFavorite don't have an user associated with the trackfavorite.actor
# to do : if we implement the followers privacy_level this will become a problem
no_user_query = models.Q(**{f"{user_field}__isnull": True})
return (
models.Q(**{f"{lookup_field}__in": ["instance", "everyone"]})
| models.Q(**{lookup_field: "me", user_field: user})
| followers_query
| no_user_query
return models.Q(**{f"{lookup_field}__in": ["instance", "everyone"]}) | models.Q(
**{lookup_field: "me", user_field: user}
)

View File

@ -1,4 +1,5 @@
from django.conf import settings
import os
from django.contrib.auth.management.commands.createsuperuser import (
Command as BaseCommand,
)
@ -11,8 +12,7 @@ class Command(BaseCommand):
Creating Django Superusers would bypass some of our username checks, which can lead to unexpected behaviour.
We therefore prohibit the execution of the command.
"""
force = settings.FORCE
if not force == 1:
if not os.environ.get("FORCE") == "1":
raise CommandError(
"Running createsuperuser on your Funkwhale instance bypasses some of our checks "
"which can lead to unexpected behavior of your instance. We therefore suggest to "

View File

@ -68,33 +68,22 @@ def create_taggable_items(dependency):
CONFIG = [
{
"id": "artist_credit",
"model": music_models.ArtistCredit,
"factory": "music.ArtistCredit",
"factory_kwargs": {"joinphrase": ""},
"depends_on": [
{"field": "artist", "id": "artists", "default_factor": 0.5},
],
},
{
"id": "tracks",
"model": music_models.Track,
"factory": "music.Track",
"factory_kwargs": {"album": None},
"factory_kwargs": {"artist": None, "album": None},
"depends_on": [
{"field": "album", "id": "albums", "default_factor": 0.1},
{"field": "artist_credit", "id": "artist_credit", "default_factor": 0.05},
{"field": "artist", "id": "artists", "default_factor": 0.05},
],
},
{
"id": "albums",
"model": music_models.Album,
"factory": "music.Album",
"factory_kwargs": {},
"depends_on": [
{"field": "artist_credit", "id": "artist_credit", "default_factor": 0.3}
],
"factory_kwargs": {"artist": None},
"depends_on": [{"field": "artist", "id": "artists", "default_factor": 0.3}],
},
{"id": "artists", "model": music_models.Artist, "factory": "music.Artist"},
{
@ -321,23 +310,12 @@ class Command(BaseCommand):
candidates = list(queryset.values_list("pk", flat=True))
picked_pks = [random.choice(candidates) for _ in objects]
picked_objects = {o.pk: o for o in queryset.filter(pk__in=picked_pks)}
saved_obj = []
for i, obj in enumerate(objects):
if create_dependencies:
value = random.choice(candidates)
else:
value = picked_objects[picked_pks[i]]
if dependency["field"] == "artist_credit":
obj.save()
obj.artist_credit.set([value])
saved_obj.append(obj)
else:
setattr(obj, dependency["field"], value)
if saved_obj:
return saved_obj
setattr(obj, dependency["field"], value)
if not handler:
objects = row["model"].objects.bulk_create(objects, batch_size=BATCH_SIZE)
results[row["id"]] = objects

View File

@ -1,4 +1,5 @@
from django.conf import settings
import os
from django.core.management.base import CommandError
from django.core.management.commands.makemigrations import Command as BaseCommand
@ -10,8 +11,8 @@ class Command(BaseCommand):
We ensure the command is disabled, unless a specific env var is provided.
"""
force = settings.FORCE
if not force == 1:
force = os.environ.get("FORCE") == "1"
if not force:
raise CommandError(
"Running makemigrations on your Funkwhale instance can have desastrous"
" consequences. This command is disabled, and should only be run in "

View File

@ -10,7 +10,7 @@ class Command(BaseCommand):
self.help = "Helper to generate randomized testdata"
self.type_choices = {"notifications": self.handle_notifications}
self.missing_args_message = f"Please specify one of the following sub-commands: {*self.type_choices.keys(), }"
self.missing_args_message = f"Please specify one of the following sub-commands: { *self.type_choices.keys(), }"
def add_arguments(self, parser):
subparsers = parser.add_subparsers(dest="subcommand")

View File

@ -60,12 +60,12 @@ class NullsLastSQLCompiler(SQLCompiler):
class NullsLastQuery(models.sql.query.Query):
"""Use a custom compiler to inject 'NULLS LAST' (for PostgreSQL)."""
def get_compiler(self, using=None, connection=None, elide_empty=True):
def get_compiler(self, using=None, connection=None):
if using is None and connection is None:
raise ValueError("Need either using or connection")
if using:
connection = connections[using]
return NullsLastSQLCompiler(self, connection, using, elide_empty)
return NullsLastSQLCompiler(self, connection, using)
class NullsLastQuerySet(models.QuerySet):
@ -257,13 +257,6 @@ class Attachment(models.Model):
proxy_url = reverse("api:v1:attachments-proxy", kwargs={"uuid": self.uuid})
return federation_utils.full_url(proxy_url + "?next=original")
@property
def download_url_small_square_crop(self):
if self.file:
return utils.media_url(self.file.crop["50x50"].url)
proxy_url = reverse("api:v1:attachments-proxy", kwargs={"uuid": self.uuid})
return federation_utils.full_url(proxy_url + "?next=small_square_crop")
@property
def download_url_medium_square_crop(self):
if self.file:

View File

@ -56,59 +56,3 @@ class OwnerPermission(BasePermission):
if not owner or not request.user.is_authenticated or owner != request.user:
raise owner_exception
return True
class PrivacyLevelPermission(BasePermission):
"""
Ensure the request actor have access to the object considering the privacylevel configuration
of the user.
request.user is None if actor, else its Anonymous if user is not auth.
"""
def has_object_permission(self, request, view, obj):
if (
not hasattr(obj, "user")
and hasattr(obj, "actor")
and not obj.actor.is_local
):
# it's a remote actor object. It should be public.
# But we could trigger an update of the remote actor data
# to avoid leaking data (#2326)
return True
if hasattr(obj, "privacy_level"):
privacy_level = obj.privacy_level
elif hasattr(obj, "actor") and obj.actor.user:
privacy_level = obj.actor.user.privacy_level
else:
privacy_level = obj.user.privacy_level
obj_actor = obj.actor if hasattr(obj, "actor") else obj.user.actor
if privacy_level == "everyone":
return True
# user is anonymous
if hasattr(request, "actor"):
request_actor = request.actor
elif request.user and request.user.is_authenticated:
request_actor = request.user.actor
else:
return False
if privacy_level == "instance":
# user is local
if request.user and hasattr(request.user, "actor"):
return True
elif hasattr(request, "actor") and request.actor and request.actor.is_local:
return True
else:
return False
elif privacy_level == "me" and obj_actor == request_actor:
return True
elif request_actor in obj_actor.get_approved_followers():
return True
else:
return False

View File

@ -2,7 +2,7 @@ import json
from django import forms
from django.conf import settings
from django.forms import JSONField
from django.contrib.postgres.forms import JSONField
from dynamic_preferences import serializers, types
from dynamic_preferences.registries import global_preferences_registry
@ -93,6 +93,7 @@ class SerializedPreference(types.BasePreferenceType):
serializer
"""
serializer = JSONSerializer
data_serializer_class = None
field_class = JSONField
widget = forms.Textarea

File diff suppressed because it is too large Load Diff

View File

@ -5,8 +5,8 @@ import os
import PIL
from django.core.exceptions import ObjectDoesNotExist
from django.core.files.uploadedfile import SimpleUploadedFile
from django.utils.encoding import smart_str
from django.utils.translation import gettext_lazy as _
from django.utils.encoding import smart_text
from django.utils.translation import ugettext_lazy as _
from drf_spectacular.types import OpenApiTypes
from drf_spectacular.utils import extend_schema_field
from rest_framework import serializers
@ -52,7 +52,7 @@ class RelatedField(serializers.RelatedField):
self.fail(
"does_not_exist",
related_field_name=self.related_field_name,
value=smart_str(data),
value=smart_text(data),
)
except (TypeError, ValueError):
self.fail("invalid")
@ -293,22 +293,11 @@ class AttachmentSerializer(serializers.Serializer):
file = StripExifImageField(write_only=True)
urls = serializers.SerializerMethodField()
@extend_schema_field(
{
"type": "object",
"properties": {
"original": {"type": "string"},
"small_square_crop": {"type": "string"},
"medium_square_crop": {"type": "string"},
"large_square_crop": {"type": "string"},
},
}
)
@extend_schema_field(OpenApiTypes.OBJECT)
def get_urls(self, o):
urls = {}
urls["source"] = o.url
urls["original"] = o.download_url_original
urls["small_square_crop"] = o.download_url_small_square_crop
urls["medium_square_crop"] = o.download_url_medium_square_crop
urls["large_square_crop"] = o.download_url_large_square_crop
return urls

View File

@ -1,6 +1,6 @@
import django.dispatch
""" Required args: mutation """
mutation_created = django.dispatch.Signal()
""" Required args: mutation, old_is_approved, new_is_approved """
mutation_updated = django.dispatch.Signal()
mutation_created = django.dispatch.Signal(providing_args=["mutation"])
mutation_updated = django.dispatch.Signal(
providing_args=["mutation", "old_is_approved", "new_is_approved"]
)

View File

@ -6,7 +6,7 @@ from django.core.exceptions import ValidationError
from django.core.files.images import get_image_dimensions
from django.template.defaultfilters import filesizeformat
from django.utils.deconstruct import deconstructible
from django.utils.translation import gettext_lazy as _
from django.utils.translation import ugettext_lazy as _
@deconstructible

View File

@ -176,12 +176,7 @@ class AttachmentViewSet(
return r
size = request.GET.get("next", "original").lower()
if size not in [
"original",
"small_square_crop",
"medium_square_crop",
"large_square_crop",
]:
if size not in ["original", "medium_square_crop", "large_square_crop"]:
size = "original"
try:

View File

@ -1,13 +0,0 @@
import logging
from config import plugins
from funkwhale_api.contrib.archivedl import tasks
from .funkwhale_startup import PLUGIN
logger = logging.getLogger(__name__)
@plugins.register_hook(plugins.TRIGGER_THIRD_PARTY_UPLOAD, PLUGIN)
def lauch_download(track, conf={}):
tasks.archive_download.delay(track_id=track.pk, conf=conf)

View File

@ -1,10 +0,0 @@
from config import plugins
PLUGIN = plugins.get_plugin_config(
name="archivedl",
label="Archive-dl",
description="",
version="0.1",
user=False,
conf=[],
)

View File

@ -1,192 +0,0 @@
import asyncio
import hashlib
import logging
import os
import tempfile
import time
import urllib.parse
from datetime import timedelta
import requests
from django.core.files import File
from django.utils import timezone
from funkwhale_api.federation import actors
from funkwhale_api.music import models, utils
from funkwhale_api.taskapp import celery
logger = logging.getLogger(__name__)
class TooManyQueriesError(Exception):
pass
def check_existing_download_task(track):
if models.Upload.objects.filter(
track=track,
import_status__in=["pending", "finished"],
third_party_provider="archive-dl",
).exists():
raise TooManyQueriesError(
"Upload for this track already exist or is pending. Stopping task."
)
def check_last_third_party_queries(track, count):
# 15 per minutes according to their doc = one each 4 seconds
time_threshold = timezone.now() - timedelta(seconds=5)
if models.Upload.objects.filter(
third_party_provider="archive-dl",
import_status__in=["pending", "finished"],
creation_date__gte=time_threshold,
).exists():
logger.info(
"Last archive.org query was too recent. Trying to wait 2 seconds..."
)
time.sleep(2)
count += 1
if count > 3:
raise TooManyQueriesError(
"Probably too many archivedl tasks are queue, stopping this task"
)
check_last_third_party_queries(track, count)
def create_upload(url, track, files_data):
mimetype = f"audio/{files_data.get('format', 'unknown')}"
duration = files_data.get("mtime", 0)
filesize = files_data.get("size", 0)
bitrate = files_data.get("bitrate", 0)
service_library = models.Library.objects.create(
privacy_level="everyone",
actor=actors.get_service_actor(),
)
return models.Upload.objects.create(
mimetype=mimetype,
source=url,
third_party_provider="archive-dl",
creation_date=timezone.now(),
track=track,
duration=duration,
size=filesize,
bitrate=bitrate,
library=service_library,
from_activity=None,
import_status="pending",
)
@celery.app.task(name="archivedl.archive_download")
@celery.require_instance(models.Track.objects.select_related(), "track")
def archive_download(track, conf):
try:
check_existing_download_task(track)
check_last_third_party_queries(track, 0)
except TooManyQueriesError as e:
logger.error(e)
return
artist_name = utils.get_artist_credit_string(track)
query = f"mediatype:audio AND title:{track.title} AND creator:{artist_name}"
with requests.Session() as session:
url = get_search_url(query, page_size=1, page=1)
page_data = fetch_json(url, session)
for obj in page_data["response"]["docs"]:
logger.info(f"launching download item for {str(obj)}")
download_item(
item_data=obj,
session=session,
allowed_extensions=utils.SUPPORTED_EXTENSIONS,
track=track,
)
def fetch_json(url, session):
logger.info(f"Fetching {url}...")
with session.get(url) as response:
return response.json()
def download_item(
item_data,
session,
allowed_extensions,
track,
):
files_data = get_files_data(item_data["identifier"], session)
to_download = list(
filter_files(
files_data["result"],
allowed_extensions=allowed_extensions,
)
)
url = f"https://archive.org/download/{item_data['identifier']}/{to_download[0]['name']}"
upload = create_upload(url, track, to_download[0])
try:
with tempfile.TemporaryDirectory() as temp_dir:
path = os.path.join(temp_dir, to_download[0]["name"])
download_file(
path,
url=url,
session=session,
checksum=to_download[0]["sha1"],
upload=upload,
to_download=to_download,
)
logger.info(f"Finished to download item {item_data['identifier']}...")
except Exception as e:
upload.delete()
raise e
def check_integrity(path, expected_checksum):
with open(path, mode="rb") as f:
hash = hashlib.sha1()
hash.update(f.read())
return expected_checksum == hash.hexdigest()
def get_files_data(identifier, session):
url = f"https://archive.org/metadata/{identifier}/files"
logger.info(f"Fetching files data at {url}...")
with session.get(url) as response:
return response.json()
def download_file(path, url, session, checksum, upload, to_download):
if os.path.exists(path) and check_integrity(path, checksum):
logger.info(f"Skipping already downloaded file at {path}")
return
logger.info(f"Downloading file {url}...")
with open(path, mode="wb") as f:
try:
with session.get(url) as response:
f.write(response.content)
except asyncio.TimeoutError as e:
logger.error(f"Timeout error while downloading {url}: {e}")
with open(path, "rb") as f:
upload.audio_file.save(f"{to_download['name']}", File(f))
upload.import_status = "finished"
upload.url = url
upload.save()
return upload
def filter_files(files, allowed_extensions):
for f in files:
if allowed_extensions:
extension = os.path.splitext(f["name"])[-1][1:]
if extension not in allowed_extensions:
continue
yield f
def get_search_url(query, page_size, page):
q = urllib.parse.urlencode({"q": query})
return f"https://archive.org/advancedsearch.php?{q}&sort[]=addeddate+desc&rows={page_size}\
&page={page}&output=json"

View File

@ -0,0 +1,168 @@
# Copyright (c) 2018 Philipp Wolfer <ph.wolfer@gmail.com>
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import json
import logging
import ssl
import time
from http.client import HTTPSConnection
HOST_NAME = "api.listenbrainz.org"
PATH_SUBMIT = "/1/submit-listens"
SSL_CONTEXT = ssl.create_default_context()
class Track:
"""
Represents a single track to submit.
See https://listenbrainz.readthedocs.io/en/latest/dev/json.html
"""
def __init__(self, artist_name, track_name, release_name=None, additional_info={}):
"""
Create a new Track instance
@param artist_name as str
@param track_name as str
@param release_name as str
@param additional_info as dict
"""
self.artist_name = artist_name
self.track_name = track_name
self.release_name = release_name
self.additional_info = additional_info
@staticmethod
def from_dict(data):
return Track(
data["artist_name"],
data["track_name"],
data.get("release_name", None),
data.get("additional_info", {}),
)
def to_dict(self):
return {
"artist_name": self.artist_name,
"track_name": self.track_name,
"release_name": self.release_name,
"additional_info": self.additional_info,
}
def __repr__(self):
return f"Track({self.artist_name}, {self.track_name})"
class ListenBrainzClient:
"""
Submit listens to ListenBrainz.org.
See https://listenbrainz.readthedocs.io/en/latest/dev/api.html
"""
def __init__(self, user_token, logger=logging.getLogger(__name__)):
self.__next_request_time = 0
self.user_token = user_token
self.logger = logger
def listen(self, listened_at, track):
"""
Submit a listen for a track
@param listened_at as int
@param entry as Track
"""
payload = _get_payload(track, listened_at)
return self._submit("single", [payload])
def playing_now(self, track):
"""
Submit a playing now notification for a track
@param track as Track
"""
payload = _get_payload(track)
return self._submit("playing_now", [payload])
def import_tracks(self, tracks):
"""
Import a list of tracks as (listened_at, Track) pairs
@param track as [(int, Track)]
"""
payload = _get_payload_many(tracks)
return self._submit("import", payload)
def _submit(self, listen_type, payload, retry=0):
self._wait_for_ratelimit()
self.logger.debug("ListenBrainz %s: %r", listen_type, payload)
data = {"listen_type": listen_type, "payload": payload}
headers = {
"Authorization": "Token %s" % self.user_token,
"Content-Type": "application/json",
}
body = json.dumps(data)
conn = HTTPSConnection(HOST_NAME, context=SSL_CONTEXT)
conn.request("POST", PATH_SUBMIT, body, headers)
response = conn.getresponse()
response_text = response.read()
try:
response_data = json.loads(response_text)
except json.decoder.JSONDecodeError:
response_data = response_text
self._handle_ratelimit(response)
log_msg = f"Response {response.status}: {response_data!r}"
if response.status == 429 and retry < 5: # Too Many Requests
self.logger.warning(log_msg)
return self._submit(listen_type, payload, retry + 1)
elif response.status == 200:
self.logger.debug(log_msg)
else:
self.logger.error(log_msg)
return response
def _wait_for_ratelimit(self):
now = time.time()
if self.__next_request_time > now:
delay = self.__next_request_time - now
self.logger.debug("Rate limit applies, delay %d", delay)
time.sleep(delay)
def _handle_ratelimit(self, response):
remaining = int(response.getheader("X-RateLimit-Remaining", 0))
reset_in = int(response.getheader("X-RateLimit-Reset-In", 0))
self.logger.debug("X-RateLimit-Remaining: %i", remaining)
self.logger.debug("X-RateLimit-Reset-In: %i", reset_in)
if remaining == 0:
self.__next_request_time = time.time() + reset_in
def _get_payload_many(tracks):
payload = []
for listened_at, track in tracks:
data = _get_payload(track, listened_at)
payload.append(data)
return payload
def _get_payload(track, listened_at=None):
data = {"track_metadata": track.to_dict()}
if listened_at is not None:
data["listened_at"] = listened_at
return data

View File

@ -1,31 +1,27 @@
import liblistenbrainz
import funkwhale_api
from config import plugins
from funkwhale_api.favorites import models as favorites_models
from funkwhale_api.history import models as history_models
from . import tasks
from .client import ListenBrainzClient, Track
from .funkwhale_startup import PLUGIN
@plugins.register_hook(plugins.LISTENING_CREATED, PLUGIN)
def submit_listen(listening, conf, **kwargs):
user_token = conf["user_token"]
if not user_token and not conf["submit_listenings"]:
if not user_token:
return
logger = PLUGIN["logger"]
logger.info("Submitting listen to ListenBrainz")
client = liblistenbrainz.ListenBrainz()
client.set_auth_token(user_token)
listen = get_lb_listen(listening)
client.submit_single_listen(listen)
client = ListenBrainzClient(user_token=user_token, logger=logger)
track = get_track(listening.track)
client.listen(int(listening.creation_date.timestamp()), track)
def get_lb_listen(listening):
track = listening.track
def get_track(track):
artist = track.artist.name
title = track.title
album = None
additional_info = {
"media_player": "Funkwhale",
"media_player_version": funkwhale_api.__version__,
@ -40,97 +36,15 @@ def get_lb_listen(listening):
if track.album:
if track.album.title:
release_name = track.album.title
album = track.album.title
if track.album.mbid:
additional_info["release_mbid"] = str(track.album.mbid)
mbids = [ac.artist.mbid for ac in track.artist_credit.all() if ac.artist.mbid]
if mbids:
additional_info["artist_mbids"] = mbids
if track.artist.mbid:
additional_info["artist_mbids"] = [str(track.artist.mbid)]
upload = track.uploads.filter(duration__gte=0).first()
if upload:
additional_info["duration"] = upload.duration
return liblistenbrainz.Listen(
track_name=track.title,
listened_at=listening.creation_date.timestamp(),
artist_name=track.get_artist_credit_string,
release_name=release_name,
additional_info=additional_info,
)
@plugins.register_hook(plugins.FAVORITE_CREATED, PLUGIN)
def submit_favorite_creation(track_favorite, conf, **kwargs):
user_token = conf["user_token"]
if not user_token or not conf["submit_favorites"]:
return
logger = PLUGIN["logger"]
logger.info("Submitting favorite to ListenBrainz")
client = liblistenbrainz.ListenBrainz()
track = track_favorite.track
if not track.mbid:
logger.warning(
"This tracks doesn't have a mbid. Feedback will not be submitted to Listenbrainz"
)
return
client.submit_user_feedback(1, track.mbid)
@plugins.register_hook(plugins.FAVORITE_DELETED, PLUGIN)
def submit_favorite_deletion(track_favorite, conf, **kwargs):
user_token = conf["user_token"]
if not user_token or not conf["submit_favorites"]:
return
logger = PLUGIN["logger"]
logger.info("Submitting favorite deletion to ListenBrainz")
client = liblistenbrainz.ListenBrainz()
track = track_favorite.track
if not track.mbid:
logger.warning(
"This tracks doesn't have a mbid. Feedback will not be submitted to Listenbrainz"
)
return
client.submit_user_feedback(0, track.mbid)
@plugins.register_hook(plugins.LISTENING_SYNC, PLUGIN)
def sync_listenings_from_listenbrainz(user, conf):
user_name = conf["user_name"]
if not user_name or not conf["sync_listenings"]:
return
logger = PLUGIN["logger"]
logger.info("Getting listenings from ListenBrainz")
try:
last_ts = (
history_models.Listening.objects.filter(actor=user.actor)
.filter(source="Listenbrainz")
.latest("creation_date")
.values_list("creation_date", flat=True)
).timestamp()
except funkwhale_api.history.models.Listening.DoesNotExist:
tasks.import_listenbrainz_listenings(user, user_name, 0)
return
tasks.import_listenbrainz_listenings(user, user_name, last_ts)
@plugins.register_hook(plugins.FAVORITE_SYNC, PLUGIN)
def sync_favorites_from_listenbrainz(user, conf):
user_name = conf["user_name"]
if not user_name or not conf["sync_favorites"]:
return
try:
last_ts = (
favorites_models.TrackFavorite.objects.filter(actor=user.actor)
.filter(source="Listenbrainz")
.latest("creation_date")
.creation_date.timestamp()
)
except favorites_models.TrackFavorite.DoesNotExist:
tasks.import_listenbrainz_favorites(user, user_name, 0)
return
tasks.import_listenbrainz_favorites(user, user_name, last_ts)
return Track(artist, title, album, additional_info)

View File

@ -3,7 +3,7 @@ from config import plugins
PLUGIN = plugins.get_plugin_config(
name="listenbrainz",
label="ListenBrainz",
description="A plugin that allows you to submit or sync your listens and favorites to ListenBrainz.",
description="A plugin that allows you to submit your listens to ListenBrainz.",
homepage="https://docs.funkwhale.audio/users/builtinplugins.html#listenbrainz-plugin", # noqa
version="0.3",
user=True,
@ -13,45 +13,6 @@ PLUGIN = plugins.get_plugin_config(
"type": "text",
"label": "Your ListenBrainz user token",
"help": "You can find your user token in your ListenBrainz profile at https://listenbrainz.org/profile/",
},
{
"name": "user_name",
"type": "text",
"required": False,
"label": "Your ListenBrainz user name.",
"help": "Required for importing listenings and favorites with ListenBrainz \
but not to send activities",
},
{
"name": "submit_listenings",
"type": "boolean",
"default": True,
"label": "Enable listening submission to ListenBrainz",
"help": "If enabled, your listenings from Funkwhale will be imported into ListenBrainz.",
},
{
"name": "sync_listenings",
"type": "boolean",
"default": False,
"label": "Enable listenings sync",
"help": "If enabled, your listening from ListenBrainz will be imported into Funkwhale. This means they \
will be used along with Funkwhale listenings to filter out recently listened content or \
generate recommendations",
},
{
"name": "sync_favorites",
"type": "boolean",
"default": False,
"label": "Enable favorite sync",
"help": "If enabled, your favorites from ListenBrainz will be imported into Funkwhale. This means they \
will be used along with Funkwhale favorites (UI display, federation activity)",
},
{
"name": "submit_favorites",
"type": "boolean",
"default": False,
"label": "Enable favorite submission to ListenBrainz services",
"help": "If enabled, your favorites from Funkwhale will be submitted to ListenBrainz",
},
}
],
)

View File

@ -1,165 +0,0 @@
import datetime
import liblistenbrainz
from django.utils import timezone
from config import plugins
from funkwhale_api.favorites import models as favorites_models
from funkwhale_api.history import models as history_models
from funkwhale_api.music import models as music_models
from funkwhale_api.taskapp import celery
from funkwhale_api.users import models
from .funkwhale_startup import PLUGIN
@celery.app.task(name="listenbrainz.trigger_listening_sync_with_listenbrainz")
def trigger_listening_sync_with_listenbrainz():
now = timezone.now()
active_month = now - datetime.timedelta(days=30)
users = (
models.User.objects.filter(plugins__code="listenbrainz")
.filter(plugins__conf__sync_listenings=True)
.filter(last_activity__gte=active_month)
)
for user in users:
plugins.trigger_hook(
plugins.LISTENING_SYNC,
user=user,
confs=plugins.get_confs(user),
)
@celery.app.task(name="listenbrainz.trigger_favorite_sync_with_listenbrainz")
def trigger_favorite_sync_with_listenbrainz():
now = timezone.now()
active_month = now - datetime.timedelta(days=30)
users = (
models.User.objects.filter(plugins__code="listenbrainz")
.filter(plugins__conf__sync_listenings=True)
.filter(last_activity__gte=active_month)
)
for user in users:
plugins.trigger_hook(
plugins.FAVORITE_SYNC,
user=user,
confs=plugins.get_confs(user),
)
@celery.app.task(name="listenbrainz.import_listenbrainz_listenings")
def import_listenbrainz_listenings(user, user_name, since):
client = liblistenbrainz.ListenBrainz()
response = client.get_listens(username=user_name, min_ts=since, count=100)
listens = response["payload"]["listens"]
while listens:
add_lb_listenings_to_db(listens, user)
new_ts = max(
listens,
key=lambda obj: datetime.datetime.fromtimestamp(
obj.listened_at, datetime.timezone.utc
),
)
response = client.get_listens(username=user_name, min_ts=new_ts, count=100)
listens = response["payload"]["listens"]
def add_lb_listenings_to_db(listens, user):
logger = PLUGIN["logger"]
fw_listens = []
for listen in listens:
if (
listen.additional_info.get("submission_client")
and listen.additional_info.get("submission_client")
== "Funkwhale ListenBrainz plugin"
and history_models.Listening.objects.filter(
creation_date=datetime.datetime.fromtimestamp(
listen.listened_at, datetime.timezone.utc
)
).exists()
):
logger.info(
f"Listen with ts {listen.listened_at} skipped because already in db"
)
continue
mbid = (
listen.mbid_mapping
if hasattr(listen, "mbid_mapping")
else listen.recording_mbid
)
if not mbid:
logger.info("Received listening that doesn't have a mbid. Skipping...")
try:
track = music_models.Track.objects.get(mbid=mbid)
except music_models.Track.DoesNotExist:
logger.info(
"Received listening that doesn't exist in fw database. Skipping..."
)
continue
user = user
fw_listen = history_models.Listening(
creation_date=datetime.datetime.fromtimestamp(
listen.listened_at, datetime.timezone.utc
),
track=track,
actor=user.actor,
source="Listenbrainz",
)
fw_listens.append(fw_listen)
history_models.Listening.objects.bulk_create(fw_listens)
@celery.app.task(name="listenbrainz.import_listenbrainz_favorites")
def import_listenbrainz_favorites(user, user_name, since):
client = liblistenbrainz.ListenBrainz()
response = client.get_user_feedback(username=user_name)
offset = 0
while response["feedback"]:
count = response["count"]
offset = offset + count
last_sync = min(
response["feedback"],
key=lambda obj: datetime.datetime.fromtimestamp(
obj["created"], datetime.timezone.utc
),
)["created"]
add_lb_feedback_to_db(response["feedback"], user)
if last_sync <= since or count == 0:
return
response = client.get_user_feedback(username=user_name, offset=offset)
def add_lb_feedback_to_db(feedbacks, user):
logger = PLUGIN["logger"]
for feedback in feedbacks:
try:
track = music_models.Track.objects.get(mbid=feedback["recording_mbid"])
except music_models.Track.DoesNotExist:
logger.info(
"Received feedback track that doesn't exist in fw database. Skipping..."
)
continue
if feedback["score"] == 1:
favorites_models.TrackFavorite.objects.get_or_create(
actor=user.actor,
creation_date=datetime.datetime.fromtimestamp(
feedback["created"], datetime.timezone.utc
),
track=track,
source="Listenbrainz",
)
elif feedback["score"] == 0:
try:
favorites_models.TrackFavorite.objects.get(
actor=user.actor, track=track
).delete()
except favorites_models.TrackFavorite.DoesNotExist:
continue
elif feedback["score"] == -1:
logger.info("Funkwhale doesn't support disliked tracks")

View File

@ -37,7 +37,7 @@ def get_payload(listening, api_key, conf):
# See https://github.com/krateng/maloja/blob/master/API.md
payload = {
"key": api_key,
"artists": [artist.name for artist in track.artist_credit.get_artists_list()],
"artists": [track.artist.name],
"title": track.title,
"time": int(listening.creation_date.timestamp()),
"nofix": bool(conf.get("nofix")),
@ -46,10 +46,8 @@ def get_payload(listening, api_key, conf):
if track.album:
if track.album.title:
payload["album"] = track.album.title
if track.album.artist_credit.all():
payload["albumartists"] = [
artist.name for artist in track.album.artist_credit.get_artists_list()
]
if track.album.artist:
payload["albumartists"] = [track.album.artist.name]
upload = track.uploads.filter(duration__gte=0).first()
if upload:

View File

@ -29,7 +29,7 @@ def forward_to_scrobblers(listening, conf, **kwargs):
(username + " " + password).encode("utf-8")
).hexdigest()
cache_key = "lastfm:sessionkey:{}".format(
":".join([str(listening.actor.pk), hashed_auth])
":".join([str(listening.user.pk), hashed_auth])
)
PLUGIN["logger"].info("Forwarding scrobble to %s", LASTFM_SCROBBLER_URL)
session_key = PLUGIN["cache"].get(cache_key)

View File

@ -84,7 +84,7 @@ def get_scrobble_payload(track, date, suffix="[0]"):
"""
upload = track.uploads.filter(duration__gte=0).first()
data = {
f"a{suffix}": track.get_artist_credit_string,
f"a{suffix}": track.artist.name,
f"t{suffix}": track.title,
f"l{suffix}": upload.duration if upload else 0,
f"b{suffix}": (track.album.title if track.album else "") or "",
@ -103,7 +103,7 @@ def get_scrobble2_payload(track, date, suffix="[0]"):
"""
upload = track.uploads.filter(duration__gte=0).first()
data = {
"artist": track.get_artist_credit_string,
"artist": track.artist.name,
"track": track.title,
"chosenByUser": 1,
}

View File

@ -314,12 +314,9 @@ class FunkwhaleProvider(internet_provider.Provider):
not random enough
"""
def federation_url(self, prefix="", obj_uuid=None, local=False):
if not obj_uuid:
obj_uuid = uuid.uuid4()
def federation_url(self, prefix="", local=False):
def path_generator():
return f"{prefix}/{obj_uuid}"
return f"{prefix}/{uuid.uuid4()}"
domain = settings.FEDERATION_HOSTNAME if local else self.domain_name()
protocol = "https"

View File

@ -8,7 +8,7 @@ record.registry.register_serializer(serializers.TrackFavoriteActivitySerializer)
@record.registry.register_consumer("favorites.TrackFavorite")
def broadcast_track_favorite_to_instance_activity(data, obj):
if obj.actor.user.privacy_level not in ["instance", "everyone"]:
if obj.user.privacy_level not in ["instance", "everyone"]:
return
channels.group_send(

View File

@ -5,5 +5,5 @@ from . import models
@admin.register(models.TrackFavorite)
class TrackFavoriteAdmin(admin.ModelAdmin):
list_display = ["actor", "track", "creation_date"]
list_select_related = ["actor", "track"]
list_display = ["user", "track", "creation_date"]
list_select_related = ["user", "track"]

View File

@ -1,28 +1,14 @@
import factory
from django.conf import settings
from funkwhale_api.factories import NoUpdateOnCreate, registry
from funkwhale_api.federation import models
from funkwhale_api.federation.factories import ActorFactory
from funkwhale_api.music.factories import TrackFactory
from funkwhale_api.users.factories import UserFactory
@registry.register
class TrackFavorite(NoUpdateOnCreate, factory.django.DjangoModelFactory):
track = factory.SubFactory(TrackFactory)
actor = factory.SubFactory(ActorFactory)
fid = factory.Faker("federation_url")
uuid = factory.Faker("uuid4")
user = factory.SubFactory(UserFactory)
class Meta:
model = "favorites.TrackFavorite"
@factory.post_generation
def local(self, create, extracted, **kwargs):
if not extracted and not kwargs:
return
domain = models.Domain.objects.get_or_create(name=settings.FEDERATION_HOSTNAME)[
0
]
self.fid = f"https://{domain}/federation/music/favorite/{self.uuid}"
self.save(update_fields=["fid"])

View File

@ -9,7 +9,7 @@ class TrackFavoriteFilter(moderation_filters.HiddenContentFilterSet):
q = fields.SearchFilter(
search_fields=["track__title", "track__artist__name", "track__album__title"]
)
scope = common_filters.ActorScopeFilter(actor_field="actor", distinct=True)
scope = common_filters.ActorScopeFilter(actor_field="user__actor", distinct=True)
class Meta:
model = models.TrackFavorite

View File

@ -1,18 +0,0 @@
# Generated by Django 3.2.20 on 2023-12-09 14:25
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('favorites', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='trackfavorite',
name='source',
field=models.CharField(blank=True, max_length=100, null=True),
),
]

View File

@ -1,106 +0,0 @@
# Generated by Django 4.2.9 on 2024-03-28 23:32
import uuid
from django.db import migrations, models, transaction
import django.db.models.deletion
from django.conf import settings
from funkwhale_api.federation import utils
from django.urls import reverse
def gen_uuid(apps, schema_editor):
MyModel = apps.get_model("favorites", "TrackFavorite")
for row in MyModel.objects.all():
unique_uuid = uuid.uuid4()
while MyModel.objects.filter(uuid=unique_uuid).exists():
unique_uuid = uuid.uuid4()
fid = utils.full_url(
reverse("federation:music:likes-detail", kwargs={"uuid": unique_uuid})
)
row.uuid = unique_uuid
row.fid = fid
row.save(update_fields=["uuid", "fid"])
def get_user_actor(apps, schema_editor):
MyModel = apps.get_model("favorites", "TrackFavorite")
for row in MyModel.objects.all():
actor = row.user.actor
row.actor = actor
row.save(update_fields=["actor"])
class Migration(migrations.Migration):
dependencies = [
("favorites", "0002_trackfavorite_source"),
]
operations = [
migrations.AddField(
model_name="trackfavorite",
name="actor",
field=models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="track_favorites",
to="federation.actor",
),
),
migrations.AddField(
model_name="trackfavorite",
name="fid",
field=models.URLField(default="https://default.fid"),
preserve_default=False,
),
migrations.AddField(
model_name="trackfavorite",
name="url",
field=models.URLField(blank=True, max_length=500, null=True),
),
migrations.AddField(
model_name="trackfavorite",
name="uuid",
field=models.UUIDField(null=True),
),
migrations.RunPython(gen_uuid, reverse_code=migrations.RunPython.noop),
migrations.AlterField(
model_name="trackfavorite",
name="uuid",
field=models.UUIDField(default=uuid.uuid4, unique=True, null=False),
),
migrations.AlterField(
model_name="trackfavorite",
name="fid",
field=models.URLField(
db_index=True,
max_length=500,
unique=True,
),
),
migrations.RunPython(get_user_actor, reverse_code=migrations.RunPython.noop),
migrations.AlterField(
model_name="trackfavorite",
name="actor",
field=models.ForeignKey(
blank=False,
null=False,
on_delete=django.db.models.deletion.CASCADE,
related_name="track_favorites",
to="federation.actor",
), ),
migrations.AlterUniqueTogether(
name="trackfavorite",
unique_together={("track", "actor")},
),
migrations.RemoveField(
model_name="trackfavorite",
name="user",
),
]

View File

@ -1,91 +1,26 @@
import uuid
from django.db import models
from django.urls import reverse
from django.utils import timezone
from funkwhale_api.common import fields
from funkwhale_api.common import models as common_models
from funkwhale_api.federation import models as federation_models
from funkwhale_api.federation import utils as federation_utils
from funkwhale_api.music.models import Track
FAVORITE_PRIVACY_LEVEL_CHOICES = [
(k, l) for k, l in fields.PRIVACY_LEVEL_CHOICES if k != "followers"
]
class TrackFavoriteQuerySet(models.QuerySet, common_models.LocalFromFidQuerySet):
def viewable_by(self, actor):
if actor is None:
return self.filter(actor__user__privacy_level="everyone")
if hasattr(actor, "user"):
me_query = models.Q(actor__user__privacy_level="me", actor=actor)
me_query = models.Q(actor__user__privacy_level="me", actor=actor)
instance_query = models.Q(
actor__user__privacy_level="instance", actor__domain=actor.domain
)
instance_actor_query = models.Q(
actor__user__privacy_level="instance", actor__domain=actor.domain
)
return self.filter(
me_query
| instance_query
| instance_actor_query
| models.Q(actor__user__privacy_level="everyone")
)
class TrackFavorite(federation_models.FederationMixin):
uuid = models.UUIDField(default=uuid.uuid4, unique=True)
class TrackFavorite(models.Model):
creation_date = models.DateTimeField(default=timezone.now)
actor = models.ForeignKey(
"federation.Actor",
related_name="track_favorites",
on_delete=models.CASCADE,
null=False,
blank=False,
user = models.ForeignKey(
"users.User", related_name="track_favorites", on_delete=models.CASCADE
)
track = models.ForeignKey(
Track, related_name="track_favorites", on_delete=models.CASCADE
)
source = models.CharField(max_length=100, null=True, blank=True)
federation_namespace = "likes"
objects = TrackFavoriteQuerySet.as_manager()
class Meta:
unique_together = ("track", "actor")
unique_together = ("track", "user")
ordering = ("-creation_date",)
@classmethod
def add(cls, track, actor):
favorite, created = cls.objects.get_or_create(actor=actor, track=track)
def add(cls, track, user):
favorite, created = cls.objects.get_or_create(user=user, track=track)
return favorite
def get_activity_url(self):
return f"{self.actor.get_absolute_url()}/favorites/tracks/{self.pk}"
def get_absolute_url(self):
return f"/library/tracks/{self.track.pk}"
def get_federation_id(self):
if self.fid:
return self.fid
return federation_utils.full_url(
reverse(
f"federation:music:{self.federation_namespace}-detail",
kwargs={"uuid": self.uuid},
)
)
def save(self, **kwargs):
if not self.pk and not self.fid:
self.fid = self.get_federation_id()
return super().save(**kwargs)
return f"{self.user.get_activity_url()}/favorites/tracks/{self.pk}"

View File

@ -1,8 +1,10 @@
from drf_spectacular.utils import extend_schema_field
from rest_framework import serializers
from funkwhale_api.activity import serializers as activity_serializers
from funkwhale_api.federation import serializers as federation_serializers
from funkwhale_api.music.serializers import TrackActivitySerializer, TrackSerializer
from funkwhale_api.users.serializers import UserActivitySerializer, UserBasicSerializer
from . import models
@ -10,24 +12,35 @@ from . import models
class TrackFavoriteActivitySerializer(activity_serializers.ModelSerializer):
type = serializers.SerializerMethodField()
object = TrackActivitySerializer(source="track")
actor = federation_serializers.APIActorSerializer(read_only=True)
actor = UserActivitySerializer(source="user")
published = serializers.DateTimeField(source="creation_date")
class Meta:
model = models.TrackFavorite
fields = ["id", "local_id", "object", "type", "actor", "published"]
def get_actor(self, obj):
return UserActivitySerializer(obj.user).data
def get_type(self, obj):
return "Like"
class UserTrackFavoriteSerializer(serializers.ModelSerializer):
track = TrackSerializer(read_only=True)
actor = federation_serializers.APIActorSerializer(read_only=True)
user = UserBasicSerializer(read_only=True)
actor = serializers.SerializerMethodField()
class Meta:
model = models.TrackFavorite
fields = ("id", "actor", "track", "creation_date", "actor")
fields = ("id", "user", "track", "creation_date", "actor")
actor = serializers.SerializerMethodField()
@extend_schema_field(federation_serializers.APIActorSerializer)
def get_actor(self, obj):
actor = obj.user.actor
if actor:
return federation_serializers.APIActorSerializer(actor).data
class UserTrackFavoriteWriteSerializer(serializers.ModelSerializer):

View File

@ -4,10 +4,8 @@ from rest_framework import mixins, status, viewsets
from rest_framework.decorators import action
from rest_framework.response import Response
from config import plugins
from funkwhale_api.activity import record
from funkwhale_api.common import fields, permissions
from funkwhale_api.federation import routes
from funkwhale_api.music import utils as music_utils
from funkwhale_api.music.models import Track
from funkwhale_api.users.oauth import permissions as oauth_permissions
@ -24,7 +22,7 @@ class TrackFavoriteViewSet(
filterset_class = filters.TrackFavoriteFilter
serializer_class = serializers.UserTrackFavoriteSerializer
queryset = models.TrackFavorite.objects.all().select_related(
"actor__attachment_icon"
"user__actor__attachment_icon"
)
permission_classes = [
oauth_permissions.ScopePermission,
@ -33,7 +31,6 @@ class TrackFavoriteViewSet(
required_scope = "favorites"
anonymous_policy = "setting"
owner_checks = ["write"]
owner_field = "actor.user"
def get_serializer_class(self):
if self.request.method.lower() in ["head", "get", "options"]:
@ -47,20 +44,7 @@ class TrackFavoriteViewSet(
instance = self.perform_create(serializer)
serializer = self.get_serializer(instance=instance)
headers = self.get_success_headers(serializer.data)
plugins.trigger_hook(
plugins.FAVORITE_CREATED,
track_favorite=serializer.instance,
confs=plugins.get_confs(self.request.user),
)
record.send(instance)
routes.outbox.dispatch(
{"type": "Like", "object": {"type": "Track"}},
context={
"track": instance.track,
"actor": instance.actor,
"id": instance.fid,
},
)
return Response(
serializer.data, status=status.HTTP_201_CREATED, headers=headers
)
@ -68,30 +52,19 @@ class TrackFavoriteViewSet(
def get_queryset(self):
queryset = super().get_queryset()
queryset = queryset.filter(
fields.privacy_level_query(
self.request.user, "actor__user__privacy_level", "actor__user"
)
fields.privacy_level_query(self.request.user, "user__privacy_level")
)
tracks = (
Track.objects.with_playable_uploads(
music_utils.get_actor_from_request(self.request)
)
.prefetch_related(
"artist_credit__artist",
"album__artist_credit__artist",
)
.select_related(
"attributed_to",
"album__attachment_cover",
)
tracks = Track.objects.with_playable_uploads(
music_utils.get_actor_from_request(self.request)
).select_related(
"artist", "album__artist", "attributed_to", "album__attachment_cover"
)
queryset = queryset.prefetch_related(Prefetch("track", queryset=tracks))
return queryset
def perform_create(self, serializer):
track = Track.objects.get(pk=serializer.data["track"])
favorite = models.TrackFavorite.add(track=track, actor=self.request.user.actor)
favorite = models.TrackFavorite.add(track=track, user=self.request.user)
return favorite
@extend_schema(operation_id="unfavorite_track")
@ -99,19 +72,10 @@ class TrackFavoriteViewSet(
def remove(self, request, *args, **kwargs):
try:
pk = int(request.data["track"])
favorite = request.user.actor.track_favorites.get(track__pk=pk)
favorite = request.user.track_favorites.get(track__pk=pk)
except (AttributeError, ValueError, models.TrackFavorite.DoesNotExist):
return Response({}, status=400)
routes.outbox.dispatch(
{"type": "Dislike", "object": {"type": "Track"}},
context={"favorite": favorite},
)
favorite.delete()
plugins.trigger_hook(
plugins.FAVORITE_DELETED,
track_favorite=favorite,
confs=plugins.get_confs(self.request.user),
)
return Response([], status=status.HTTP_204_NO_CONTENT)
@extend_schema(
@ -128,9 +92,7 @@ class TrackFavoriteViewSet(
if not request.user.is_authenticated:
return Response({"results": [], "count": 0}, status=401)
favorites = request.user.actor.track_favorites.values("id", "track").order_by(
"id"
)
favorites = request.user.track_favorites.values("id", "track").order_by("id")
payload = serializers.AllFavoriteSerializer(favorites).data
return Response(payload, status=200)

View File

@ -119,9 +119,6 @@ def should_reject(fid, actor_id=None, payload={}):
@transaction.atomic
def receive(activity, on_behalf_of, inbox_actor=None):
"""
Receive an activity, find his recipients and save it to the database before dispatching it
"""
from funkwhale_api.moderation import mrf
from . import models, serializers, tasks
@ -226,9 +223,6 @@ class InboxRouter(Router):
"""
from . import api_serializers, models
logger.debug(
f"[federation] Inbox dispatch payload : {payload} with context : {context}"
)
handlers = self.get_matching_handlers(payload)
for handler in handlers:
if call_handlers:
@ -299,59 +293,6 @@ def schedule_key_rotation(actor_id, delay):
tasks.rotate_actor_key.apply_async(kwargs={"actor_id": actor_id}, countdown=delay)
def activity_pass_user_privacy_level(context, routing):
TYPE_FOLLOW_USER_PRIVACY_LEVEL = ["Listen", "Like", "Create"]
TYPE_IGNORE_USER_PRIVACY_LEVEL = ["Delete", "Accept", "Follow"]
MUSIC_OBJECT_TYPE = ["Audio", "Track", "Album", "Artist"]
actor = context.get("actor", False)
type = routing.get("type", False)
object_type = routing.get("object", {}).get("type", None)
if not actor:
logger.warning(
"No actor provided in activity context : \
we cannot follow actor.privacy_level, activity will be sent by default."
)
# We do not consider music metadata has private
if object_type in MUSIC_OBJECT_TYPE:
return True
if type:
if type in TYPE_IGNORE_USER_PRIVACY_LEVEL:
return True
if type in TYPE_FOLLOW_USER_PRIVACY_LEVEL and actor and actor.is_local:
if actor.user.privacy_level in [
"me",
"instance",
]:
return False
return True
return True
def activity_pass_object_privacy_level(context, routing):
MUSIC_OBJECT_TYPE = ["Audio", "Track", "Album", "Artist"]
# we only support playlist federation for now
object = context.get("playlist", False)
obj_privacy_level = object.privacy_level if object else None
object_type = routing.get("object", {}).get("type", None)
# We do not consider music metadata has private
if object_type in MUSIC_OBJECT_TYPE:
return True
if object and obj_privacy_level and obj_privacy_level in ["me", "instance"]:
return False
return True
class OutboxRouter(Router):
@transaction.atomic
def dispatch(self, routing, context):
@ -364,7 +305,6 @@ class OutboxRouter(Router):
from . import models, tasks
logger.debug(f"[federation] Outbox dispatch context : {context}")
allow_list_enabled = preferences.get("moderation__allow_list_enabled")
allowed_domains = None
if allow_list_enabled:
@ -374,18 +314,6 @@ class OutboxRouter(Router):
)
)
if activity_pass_user_privacy_level(context, routing) is False:
logger.info(
"[federation] Discarding outbox dispatch due to user privacy_level"
)
return
if activity_pass_object_privacy_level(context, routing) is False:
logger.info(
"[federation] Discarding outbox dispatch due to object privacy_level"
)
return
for route, handler in self.routes:
if not match_route(route, routing):
continue
@ -469,7 +397,6 @@ class OutboxRouter(Router):
)
for a in activities:
logger.info(f"[federation] OUtbox sending activity : {a.pk}")
funkwhale_utils.on_commit(tasks.dispatch_outbox.delay, activity_id=a.pk)
return activities
@ -627,6 +554,12 @@ def get_actors_from_audience(urls):
final_query, Q(pk__in=actor_follows.values_list("actor", flat=True))
)
library_follows = models.LibraryFollow.objects.filter(
queries["followed"], approved=True
)
final_query = funkwhale_utils.join_queries_or(
final_query, Q(pk__in=library_follows.values_list("actor", flat=True))
)
if not final_query:
return models.Actor.objects.none()
return models.Actor.objects.filter(final_query)

View File

@ -1,5 +1,4 @@
import datetime
from urllib.parse import urlparse
from django.conf import settings
from django.core import validators
@ -10,10 +9,9 @@ from drf_spectacular.utils import extend_schema_field
from rest_framework import serializers
from funkwhale_api.audio import models as audio_models
from funkwhale_api.audio import serializers as audio_serializers
from funkwhale_api.common import fields as common_fields
from funkwhale_api.common import serializers as common_serializers
from funkwhale_api.music import models as music_models
from funkwhale_api.playlists import models as playlists_models
from funkwhale_api.users import serializers as users_serializers
from . import filters, models
@ -57,6 +55,7 @@ class LibrarySerializer(serializers.ModelSerializer):
"uuid",
"actor",
"name",
"description",
"creation_date",
"uploads_count",
"privacy_level",
@ -98,30 +97,6 @@ class LibraryFollowSerializer(serializers.ModelSerializer):
return federation_serializers.APIActorSerializer(o.actor).data
class FollowSerializer(serializers.ModelSerializer):
target = common_serializers.RelatedField(
"fid", federation_serializers.APIActorSerializer(), required=True
)
actor = serializers.SerializerMethodField()
class Meta:
model = models.Follow
fields = ["creation_date", "actor", "uuid", "target", "approved"]
read_only_fields = ["uuid", "actor", "approved", "creation_date"]
def validate_target(self, v):
request_actor = self.context["actor"]
if v == request_actor:
raise serializers.ValidationError("You cannot follow yourself")
if v.received_follows.filter(actor=request_actor).exists():
raise serializers.ValidationError("You are already following this user")
return v
@extend_schema_field(federation_serializers.APIActorSerializer)
def get_actor(self, o):
return federation_serializers.APIActorSerializer(o.actor).data
def serialize_generic_relation(activity, obj):
data = {"type": obj._meta.label}
if data["type"] == "federation.Actor":
@ -131,11 +106,9 @@ def serialize_generic_relation(activity, obj):
if data["type"] == "music.Library":
data["name"] = obj.name
if (
data["type"] == "federation.LibraryFollow"
or data["type"] == "federation.Follow"
):
if data["type"] == "federation.LibraryFollow":
data["approved"] = obj.approved
return data
@ -193,32 +166,21 @@ class InboxItemActionSerializer(common_serializers.ActionSerializer):
return objects.update(is_read=True)
OBJECT_SERIALIZER_MAPPING = {
music_models.Artist: federation_serializers.ArtistSerializer,
music_models.Album: federation_serializers.AlbumSerializer,
music_models.Track: federation_serializers.TrackSerializer,
models.Actor: federation_serializers.APIActorSerializer,
audio_models.Channel: audio_serializers.ChannelSerializer,
playlists_models.Playlist: federation_serializers.PlaylistSerializer,
FETCH_OBJECT_CONFIG = {
"artist": {"queryset": music_models.Artist.objects.all()},
"album": {"queryset": music_models.Album.objects.all()},
"track": {"queryset": music_models.Track.objects.all()},
"library": {"queryset": music_models.Library.objects.all(), "id_attr": "uuid"},
"upload": {"queryset": music_models.Upload.objects.all(), "id_attr": "uuid"},
"account": {"queryset": models.Actor.objects.all(), "id_attr": "full_username"},
"channel": {"queryset": audio_models.Channel.objects.all(), "id_attr": "uuid"},
}
def convert_url_to_webfinger(url):
parsed_url = urlparse(url)
domain = parsed_url.netloc # e.g., "node1.funkwhale.test"
path_parts = parsed_url.path.strip("/").split("/")
# Ensure the path is in the expected format
if len(path_parts) > 0 and path_parts[0].startswith("@"):
username = path_parts[0][1:] # Remove the '@'
return f"{username}@{domain}"
return None
FETCH_OBJECT_FIELD = common_fields.GenericRelation(FETCH_OBJECT_CONFIG)
class FetchSerializer(serializers.ModelSerializer):
actor = federation_serializers.APIActorSerializer(read_only=True)
object_uri = serializers.CharField(required=True, write_only=True)
object = serializers.SerializerMethodField(read_only=True)
type = serializers.SerializerMethodField(read_only=True)
object = serializers.CharField(write_only=True)
force = serializers.BooleanField(default=False, required=False, write_only=True)
class Meta:
@ -231,10 +193,8 @@ class FetchSerializer(serializers.ModelSerializer):
"detail",
"creation_date",
"fetch_date",
"object_uri",
"force",
"type",
"object",
"force",
]
read_only_fields = [
"id",
@ -244,36 +204,10 @@ class FetchSerializer(serializers.ModelSerializer):
"detail",
"creation_date",
"fetch_date",
"type",
"object",
]
def get_type(self, fetch):
obj = fetch.object
if obj is None:
return None
# Return the type as a string
if isinstance(obj, music_models.Artist):
return "artist"
elif isinstance(obj, music_models.Album):
return "album"
elif isinstance(obj, music_models.Track):
return "track"
elif isinstance(obj, models.Actor):
return "account"
elif isinstance(obj, audio_models.Channel):
return "channel"
elif isinstance(obj, playlists_models.Playlist):
return "playlist"
else:
return None
def validate_object_uri(self, value):
if value.startswith("https://"):
converted = convert_url_to_webfinger(value)
if converted:
value = converted
def validate_object(self, value):
# if value is a webginfer lookup, we craft a special url
if value.startswith("@"):
value = value.lstrip("@")
validator = validators.EmailValidator()
@ -281,30 +215,9 @@ class FetchSerializer(serializers.ModelSerializer):
validator(value)
except validators.ValidationError:
return value
return f"webfinger://{value}"
@extend_schema_field(
{
"oneOf": [
{"$ref": "#/components/schemas/Artist"},
{"$ref": "#/components/schemas/Album"},
{"$ref": "#/components/schemas/Track"},
{"$ref": "#/components/schemas/APIActor"},
{"$ref": "#/components/schemas/Channel"},
{"$ref": "#/components/schemas/Playlist"},
]
}
)
def get_object(self, fetch):
obj = fetch.object
if obj is None:
return None
serializer_class = OBJECT_SERIALIZER_MAPPING.get(type(obj))
if serializer_class:
return serializer_class(obj).data
return None
def create(self, validated_data):
check_duplicates = not validated_data.get("force", False)
if check_duplicates:
@ -313,7 +226,7 @@ class FetchSerializer(serializers.ModelSerializer):
validated_data["actor"]
.fetches.filter(
status="finished",
url=validated_data["object_uri"],
url=validated_data["object"],
creation_date__gte=timezone.now()
- datetime.timedelta(
seconds=settings.FEDERATION_DUPLICATE_FETCH_DELAY
@ -326,10 +239,18 @@ class FetchSerializer(serializers.ModelSerializer):
return duplicate
fetch = models.Fetch.objects.create(
actor=validated_data["actor"], url=validated_data["object_uri"]
actor=validated_data["actor"], url=validated_data["object"]
)
return fetch
def to_representation(self, obj):
repr = super().to_representation(obj)
object_data = None
if obj.object:
object_data = FETCH_OBJECT_FIELD.to_representation(obj.object)
repr["object"] = object_data
return repr
class FullActorSerializer(serializers.Serializer):
fid = serializers.URLField()

View File

@ -5,7 +5,6 @@ from . import api_views
router = routers.OptionalSlashRouter()
router.register(r"fetches", api_views.FetchViewSet, "fetches")
router.register(r"follows/library", api_views.LibraryFollowViewSet, "library-follows")
router.register(r"follows/user", api_views.UserFollowViewSet, "user-follows")
router.register(r"inbox", api_views.InboxItemViewSet, "inbox")
router.register(r"libraries", api_views.LibraryViewSet, "libraries")
router.register(r"domains", api_views.DomainViewSet, "domains")

View File

@ -4,7 +4,6 @@ from django.db import transaction
from django.db.models import Count, Q
from drf_spectacular.utils import extend_schema, extend_schema_view
from rest_framework import decorators, mixins, permissions, response, viewsets
from rest_framework.exceptions import NotFound as RestNotFound
from funkwhale_api.common import preferences
from funkwhale_api.common import utils as common_utils
@ -290,12 +289,7 @@ class ActorViewSet(mixins.RetrieveModelMixin, viewsets.GenericViewSet):
def get_object(self):
queryset = self.get_queryset()
username, domain = self.kwargs["full_username"].split("@", 1)
try:
return queryset.get(preferred_username=username, domain_id=domain)
except models.Actor.DoesNotExist:
raise RestNotFound(
detail=f"Actor {username}@{domain} not found",
)
return queryset.get(preferred_username=username, domain_id=domain)
def get_queryset(self):
qs = super().get_queryset()
@ -317,106 +311,3 @@ class ActorViewSet(mixins.RetrieveModelMixin, viewsets.GenericViewSet):
filter_uploads=lambda o, uploads: uploads.filter(library__actor=o)
)
)
@extend_schema_view(
list=extend_schema(operation_id="get_federation_received_follows"),
create=extend_schema(operation_id="create_federation_user_follow"),
)
class UserFollowViewSet(
mixins.CreateModelMixin,
mixins.ListModelMixin,
mixins.RetrieveModelMixin,
mixins.DestroyModelMixin,
viewsets.GenericViewSet,
):
lookup_field = "uuid"
queryset = (
models.Follow.objects.all()
.order_by("-creation_date")
.select_related("actor", "target")
.filter(actor__type="Person")
)
serializer_class = api_serializers.FollowSerializer
permission_classes = [oauth_permissions.ScopePermission]
required_scope = "follows"
ordering_fields = ("creation_date",)
@extend_schema(operation_id="get_federation_user_follow")
def retrieve(self, request, *args, **kwargs):
return super().retrieve(request, *args, **kwargs)
@extend_schema(operation_id="delete_federation_user_follow")
def destroy(self, request, uuid=None):
return super().destroy(request, uuid)
def get_queryset(self):
qs = super().get_queryset()
return qs.filter(
Q(target=self.request.user.actor) | Q(actor=self.request.user.actor)
).exclude(approved=False)
def perform_create(self, serializer):
follow = serializer.save(actor=self.request.user.actor)
routes.outbox.dispatch({"type": "Follow"}, context={"follow": follow})
@transaction.atomic
def perform_destroy(self, instance):
routes.outbox.dispatch(
{"type": "Undo", "object": {"type": "Follow"}}, context={"follow": instance}
)
instance.delete()
def get_serializer_context(self):
context = super().get_serializer_context()
context["actor"] = self.request.user.actor
return context
@extend_schema(
operation_id="accept_federation_user_follow",
responses={404: None, 204: None},
)
@decorators.action(methods=["post"], detail=True)
def accept(self, request, *args, **kwargs):
try:
follow = self.queryset.get(
target=self.request.user.actor, uuid=kwargs["uuid"]
)
except models.Follow.DoesNotExist:
return response.Response({}, status=404)
update_follow(follow, approved=True)
return response.Response(status=204)
@extend_schema(operation_id="reject_federation_user_follow")
@decorators.action(methods=["post"], detail=True)
def reject(self, request, *args, **kwargs):
try:
follow = self.queryset.get(
target=self.request.user.actor, uuid=kwargs["uuid"]
)
except models.Follow.DoesNotExist:
return response.Response({}, status=404)
update_follow(follow, approved=False)
return response.Response(status=204)
@extend_schema(operation_id="get_all_federation_library_follows")
@decorators.action(methods=["get"], detail=False)
def all(self, request, *args, **kwargs):
"""
Return all the subscriptions of the current user, with only limited data
to have a performant endpoint and avoid lots of queries just to display
subscription status in the UI
"""
follows = list(
self.get_queryset().values_list("uuid", "target__fid", "approved")
)
payload = {
"results": [
{"uuid": str(u[0]), "actor": str(u[1]), "approved": u[2]}
for u in follows
],
"count": len(follows),
}
return response.Response(payload, status=200)

View File

@ -81,12 +81,11 @@ class SignatureAuthentication(authentication.BaseAuthentication):
fetch_delay = 24 * 3600
now = timezone.now()
last_fetch = actor.domain.nodeinfo_fetch_date
if not actor.domain.is_local:
if not last_fetch or (
last_fetch < (now - datetime.timedelta(seconds=fetch_delay))
):
tasks.update_domain_nodeinfo(domain_name=actor.domain.name)
actor.domain.refresh_from_db()
if not last_fetch or (
last_fetch < (now - datetime.timedelta(seconds=fetch_delay))
):
tasks.update_domain_nodeinfo(domain_name=actor.domain.name)
actor.domain.refresh_from_db()
return actor
def authenticate(self, request):

View File

@ -293,10 +293,7 @@ CONTEXTS = [
"Album": "fw:Album",
"Track": "fw:Track",
"Artist": "fw:Artist",
"ArtistCredit": "fw:ArtistCredit",
"Library": "fw:Library",
"Playlist": "fw:Playlist",
"PlaylistTrack": "fw:PlaylistTrack",
"bitrate": {"@id": "fw:bitrate", "@type": "xsd:nonNegativeInteger"},
"size": {"@id": "fw:size", "@type": "xsd:nonNegativeInteger"},
"position": {"@id": "fw:position", "@type": "xsd:nonNegativeInteger"},
@ -305,23 +302,13 @@ CONTEXTS = [
"track": {"@id": "fw:track", "@type": "@id"},
"cover": {"@id": "fw:cover", "@type": "as:Link"},
"album": {"@id": "fw:album", "@type": "@id"},
"artist": {"@id": "fw:artist", "@type": "@id"},
"artists": {"@id": "fw:artists", "@type": "@id", "@container": "@list"},
"artist_credit": {
"@id": "fw:artist_credit",
"@type": "@id",
"@container": "@list",
},
"joinphrase": {"@id": "fw:joinphrase", "@type": "xsd:string"},
"credit": {"@id": "fw:credit", "@type": "xsd:string"},
"index": {"@id": "fw:index", "@type": "xsd:nonNegativeInteger"},
"released": {"@id": "fw:released", "@type": "xsd:date"},
"musicbrainzId": "fw:musicbrainzId",
"license": {"@id": "fw:license", "@type": "@id"},
"copyright": "fw:copyright",
"category": "schema:category",
"language": "schema:inLanguage",
"playlist": {"@id": "fw:playlist", "@type": "@id"},
}
},
},

View File

@ -128,6 +128,11 @@ class ActorFactory(NoUpdateOnCreate, factory.django.DjangoModelFactory):
class Meta:
model = models.Actor
class Params:
with_real_keys = factory.Trait(
keys=factory.LazyFunction(keys.get_key_pair),
)
@factory.post_generation
def local(self, create, extracted, **kwargs):
if not extracted and not kwargs:
@ -148,26 +153,6 @@ class ActorFactory(NoUpdateOnCreate, factory.django.DjangoModelFactory):
extracted.actor = self
extracted.save(update_fields=["user"])
else:
user = UserFactory(actor=self, **kwargs)
user.actor = self
user.save()
@factory.post_generation
def user(self, create, extracted, **kwargs):
"""
Handle the creation or assignment of the related user instance.
If `actor__user` is passed, it will be linked; otherwise, no user is created.
"""
if not create:
return
if extracted: # If a User instance is provided
extracted.actor = self
extracted.save(update_fields=["actor"])
elif kwargs:
from funkwhale_api.users.factories import UserFactory
# Create a User linked to this Actor
self.user = UserFactory(actor=self, **kwargs)
@ -185,25 +170,22 @@ class FollowFactory(NoUpdateOnCreate, factory.django.DjangoModelFactory):
@registry.register
class MusicLibraryFactory(NoUpdateOnCreate, factory.django.DjangoModelFactory):
uuid = factory.Faker("uuid4")
actor = factory.SubFactory(ActorFactory)
privacy_level = "me"
name = privacy_level
name = factory.Faker("sentence")
description = factory.Faker("sentence")
uploads_count = 0
fid = factory.Faker("federation_url")
followers_url = factory.LazyAttribute(
lambda o: o.fid + "/followers" if o.fid else None
)
class Meta:
model = "music.Library"
class Params:
local = factory.Trait(
fid=factory.Faker(
"federation_url",
local=True,
prefix="federation/music/libraries",
obj_uuid=factory.SelfAttribute("..uuid"),
),
actor=factory.SubFactory(ActorFactory, local=True),
fid=None, actor=factory.SubFactory(ActorFactory, local=True)
)

View File

@ -191,6 +191,7 @@ def prepare_for_serializer(payload, config, fallbacks={}):
value = noop
if not aliases:
continue
for a in aliases:
try:
value = get_value(
@ -278,6 +279,7 @@ class JsonLdSerializer(serializers.Serializer):
for field in dereferenced_fields:
for i in get_ids(data[field]):
dereferenced_ids.add(i)
if dereferenced_ids:
try:
loop = asyncio.get_event_loop()

View File

@ -9,7 +9,7 @@ MODELS = [
(music_models.Album, ["fid"]),
(music_models.Track, ["fid"]),
(music_models.Upload, ["fid"]),
(music_models.Library, ["fid"]),
(music_models.Library, ["fid", "followers_url"]),
(
federation_models.Actor,
[

View File

@ -218,6 +218,7 @@ class Actor(models.Model):
on_delete=models.SET_NULL,
related_name="iconed_actor",
)
objects = ActorQuerySet.as_manager()
class Meta:
@ -250,15 +251,9 @@ class Actor(models.Model):
follows = self.received_follows.filter(approved=True)
return self.followers.filter(pk__in=follows.values_list("actor", flat=True))
def get_approved_followings(self):
follows = self.emitted_follows.filter(approved=True)
return Actor.objects.filter(pk__in=follows.values_list("target", flat=True))
def should_autoapprove_follow(self, actor):
if self.get_channel():
return True
if self.user.privacy_level == "public":
return True
return False
def get_user(self):
@ -405,7 +400,6 @@ class Fetch(models.Model):
serializers.ChannelUploadSerializer,
],
contexts.FW.Library: [serializers.LibrarySerializer],
contexts.FW.Playlist: [serializers.PlaylistSerializer],
contexts.AS.Group: [serializers.ActorSerializer],
contexts.AS.Person: [serializers.ActorSerializer],
contexts.AS.Organization: [serializers.ActorSerializer],

View File

@ -3,10 +3,7 @@ import uuid
from django.db.models import Q
from funkwhale_api.favorites import models as favorites_models
from funkwhale_api.history import models as history_models
from funkwhale_api.music import models as music_models
from funkwhale_api.playlists import models as playlist_models
from . import activity, actors, models, serializers
@ -166,7 +163,7 @@ def outbox_follow(context):
def outbox_create_audio(context):
upload = context["upload"]
channel = upload.library.get_channel()
followers_target = channel.actor if channel else upload.library.actor
followers_target = channel.actor if channel else upload.library
actor = channel.actor if channel else upload.library.actor
if channel:
serializer = serializers.ChannelCreateUploadSerializer(upload)
@ -296,7 +293,7 @@ def inbox_delete_audio(payload, context):
upload_fids = [payload["object"]["id"]]
query = Q(fid__in=upload_fids) & (
Q(library__actor=actor) | Q(track__artist_credit__artist__channel__actor=actor)
Q(library__actor=actor) | Q(track__artist__channel__actor=actor)
)
candidates = music_models.Upload.objects.filter(query)
@ -310,8 +307,8 @@ def outbox_delete_audio(context):
uploads = context["uploads"]
library = uploads[0].library
channel = library.get_channel()
followers_target = channel.actor if channel else library
actor = channel.actor if channel else library.actor
followers_target = channel.actor if channel else actor
serializer = serializers.ActivitySerializer(
{
"type": "Delete",
@ -580,9 +577,7 @@ def inbox_delete_album(payload, context):
logger.debug("Discarding deletion of empty library")
return
query = Q(fid=album_id) & (
Q(attributed_to=actor) | Q(artist_credit__artist__channel__actor=actor)
)
query = Q(fid=album_id) & (Q(attributed_to=actor) | Q(artist__channel__actor=actor))
try:
album = music_models.Album.objects.get(query)
except music_models.Album.DoesNotExist:
@ -595,10 +590,9 @@ def inbox_delete_album(payload, context):
@outbox.register({"type": "Delete", "object.type": "Album"})
def outbox_delete_album(context):
album = context["album"]
album_artist = album.artist_credit.all()[0].artist
actor = (
album_artist.channel.actor
if album_artist.get_channel()
album.artist.channel.actor
if album.artist.get_channel()
else album.attributed_to
)
actor = actor or actors.get_service_actor()
@ -614,229 +608,3 @@ def outbox_delete_album(context):
to=[activity.PUBLIC_ADDRESS, {"type": "instances_with_followers"}],
),
}
@outbox.register({"type": "Like", "object.type": "Track"})
def outbox_create_track_favorite(context):
track = context["track"]
actor = context["actor"]
serializer = serializers.ActivitySerializer(
{
"type": "Like",
"id": context["id"],
"object": {"type": "Track", "id": track.fid},
}
)
yield {
"type": "Like",
"actor": actor,
"payload": with_recipients(
serializer.data,
to=[{"type": "followers", "target": actor}],
),
}
@outbox.register({"type": "Dislike", "object.type": "Track"})
def outbox_delete_favorite(context):
favorite = context["favorite"]
actor = favorite.actor
serializer = serializers.ActivitySerializer(
{"type": "Dislike", "object": {"type": "Track", "id": favorite.track.fid}}
)
yield {
"type": "Dislike",
"actor": actor,
"payload": with_recipients(
serializer.data,
to=[{"type": "followers", "target": actor}],
),
}
@inbox.register({"type": "Like", "object.type": "Track"})
def inbox_create_favorite(payload, context):
serializer = serializers.TrackFavoriteSerializer(data=payload)
serializer.is_valid(raise_exception=True)
instance = serializer.save()
return {"object": instance}
@inbox.register({"type": "Dislike", "object.type": "Track"})
def inbox_delete_favorite(payload, context):
actor = context["actor"]
track_id = payload["object"].get("id")
query = Q(track__fid=track_id) & Q(actor=actor)
try:
favorite = favorites_models.TrackFavorite.objects.get(query)
except favorites_models.TrackFavorite.DoesNotExist:
logger.debug(
"Discarding deletion of unkwnown favorite with track : %s", track_id
)
return
favorite.delete()
@outbox.register({"type": "Listen", "object.type": "Track"})
def outbox_create_listening(context):
track = context["track"]
actor = context["actor"]
serializer = serializers.ActivitySerializer(
{
"type": "Listen",
"id": context["id"],
"object": {"type": "Track", "id": track.fid},
}
)
yield {
"type": "Listen",
"actor": actor,
"payload": with_recipients(
serializer.data,
to=[{"type": "followers", "target": actor}],
),
}
@outbox.register({"type": "Delete", "object.type": "Listen"})
def outbox_delete_listening(context):
listening = context["listening"]
actor = listening.actor
serializer = serializers.ActivitySerializer(
{"type": "Delete", "object": {"type": "Listen", "id": listening.fid}}
)
yield {
"type": "Delete",
"actor": actor,
"payload": with_recipients(
serializer.data,
to=[{"type": "followers", "target": actor}],
),
}
@inbox.register({"type": "Listen", "object.type": "Track"})
def inbox_create_listening(payload, context):
serializer = serializers.ListeningSerializer(data=payload)
serializer.is_valid(raise_exception=True)
instance = serializer.save()
return {"object": instance}
@inbox.register({"type": "Delete", "object.type": "Listen"})
def inbox_delete_listening(payload, context):
actor = context["actor"]
listening_id = payload["object"].get("id")
query = Q(fid=listening_id) & Q(actor=actor)
try:
favorite = history_models.Listening.objects.get(query)
except history_models.Listening.DoesNotExist:
logger.debug("Discarding deletion of unkwnown listening %s", listening_id)
return
favorite.delete()
@outbox.register({"type": "Create", "object.type": "Playlist"})
def outbox_create_playlist(context):
playlist = context["playlist"]
serializer = serializers.ActivitySerializer(
{
"type": "Create",
"actor": playlist.actor,
"id": playlist.fid,
"object": serializers.PlaylistSerializer(playlist).data,
}
)
yield {
"type": "Create",
"actor": playlist.actor,
"payload": with_recipients(
serializer.data,
to=[{"type": "followers", "target": playlist.actor}],
),
}
@outbox.register({"type": "Delete", "object.type": "Playlist"})
def outbox_delete_playlist(context):
playlist = context["playlist"]
actor = playlist.actor
serializer = serializers.ActivitySerializer(
{"type": "Delete", "object": {"type": "Playlist", "id": playlist.fid}}
)
yield {
"type": "Delete",
"actor": actor,
"payload": with_recipients(
serializer.data,
to=[activity.PUBLIC_ADDRESS, {"type": "instances_with_followers"}],
),
}
@inbox.register({"type": "Create", "object.type": "Playlist"})
def inbox_create_playlist(payload, context):
serializer = serializers.PlaylistSerializer(data=payload["object"])
serializer.is_valid(raise_exception=True)
instance = serializer.save()
return {"object": instance}
@inbox.register({"type": "Delete", "object.type": "Playlist"})
def inbox_delete_playlist(payload, context):
actor = context["actor"]
playlist_id = payload["object"].get("id")
query = Q(fid=playlist_id) & Q(actor=actor)
try:
playlist = playlist_models.Playlist.objects.get(query)
except playlist_models.Playlist.DoesNotExist:
logger.debug("Discarding deletion of unkwnown listening %s", playlist_id)
return
playlist.playlist_tracks.all().delete()
playlist.delete()
@inbox.register({"type": "Update", "object.type": "Playlist"})
def inbox_update_playlist(payload, context):
"""If we receive an update on an unkwnown playlist, we create the playlist"""
playlist_id = payload["object"].get("id")
serializer = serializers.PlaylistSerializer(data=payload["object"])
if serializer.is_valid(raise_exception=True):
playlist = serializer.save()
# we update the playlist.library to get the plt.track.uploads locally
if follows := playlist.library.received_follows.filter(approved=True):
playlist.library.schedule_scan(follows[0].actor, force=True)
# we trigger a scan since we use this activity to avoid sending many PlaylistTracks activities
playlist.schedule_scan(actors.get_service_actor(), force=True)
return
else:
logger.debug(
"Discarding update of playlist_id %s because of payload errors: %s",
playlist_id,
serializer.errors,
)
@outbox.register({"type": "Update", "object.type": "Playlist"})
def outbox_update_playlist(context):
playlist = context["playlist"]
serializer = serializers.ActivitySerializer(
{"type": "Update", "object": serializers.PlaylistSerializer(playlist).data}
)
yield {
"type": "Update",
"actor": playlist.actor,
"payload": with_recipients(
serializer.data,
to=[{"type": "followers", "target": playlist.actor}],
),
}

View File

@ -1,31 +1,27 @@
import logging
import os
import re
import urllib.parse
import uuid
from django.core.exceptions import ObjectDoesNotExist
from django.core.paginator import Paginator
from django.db import transaction
from django.db.models import Q
from django.urls import reverse
from django.utils import timezone
from rest_framework import serializers
from funkwhale_api.common import models as common_models
from funkwhale_api.common import utils as common_utils
from funkwhale_api.favorites import models as favorites_models
from funkwhale_api.federation import activity, actors, contexts, jsonld, models, utils
from funkwhale_api.history import models as history_models
from funkwhale_api.moderation import models as moderation_models
from funkwhale_api.moderation import serializers as moderation_serializers
from funkwhale_api.moderation import signals as moderation_signals
from funkwhale_api.music import licenses
from funkwhale_api.music import models as music_models
from funkwhale_api.music import tasks as music_tasks
from funkwhale_api.playlists import models as playlists_models
from funkwhale_api.tags import models as tags_models
from . import activity, actors, contexts, jsonld, models, utils
logger = logging.getLogger(__name__)
@ -344,11 +340,9 @@ class ActorSerializer(jsonld.JsonLdSerializer):
ret["url"] = [
{
"type": "Link",
"href": (
instance.channel.get_absolute_url()
if instance.channel.artist.is_local
else instance.get_absolute_url()
),
"href": instance.channel.get_absolute_url()
if instance.channel.artist.is_local
else instance.get_absolute_url(),
"mediaType": "text/html",
},
{
@ -442,11 +436,9 @@ class ActorSerializer(jsonld.JsonLdSerializer):
common_utils.attach_file(
actor,
"attachment_icon",
(
{"url": new_value["url"], "mimetype": new_value.get("mediaType")}
if new_value
else None
),
{"url": new_value["url"], "mimetype": new_value.get("mediaType")}
if new_value
else None,
)
rss_url = get_by_media_type(
@ -499,11 +491,9 @@ def create_or_update_channel(actor, rss_url, attributed_to_fid, **validated_data
common_utils.attach_file(
artist,
"attachment_cover",
(
{"url": new_value["url"], "mimetype": new_value.get("mediaType")}
if new_value
else None
),
{"url": new_value["url"], "mimetype": new_value.get("mediaType")}
if new_value
else None,
)
tags = [t["name"] for t in validated_data.get("tags", []) or []]
tags_models.set_tags(artist, *tags)
@ -654,6 +644,7 @@ class FollowSerializer(serializers.Serializer):
def save(self, **kwargs):
target = self.validated_data["object"]
if target._meta.label == "music.Library":
follow_class = models.LibraryFollow
else:
@ -821,9 +812,7 @@ class UndoFollowSerializer(serializers.Serializer):
actor=validated_data["actor"], target=target
).get()
except follow_class.DoesNotExist:
raise serializers.ValidationError(
f"No follow to remove follow_class = {follow_class}"
)
raise serializers.ValidationError("No follow to remove")
return validated_data
def to_representation(self, instance):
@ -890,6 +879,7 @@ class ActivitySerializer(serializers.Serializer):
object_serializer = OBJECT_SERIALIZERS[type]
except KeyError:
raise serializers.ValidationError(f"Unsupported type {type}")
serializer = object_serializer(data=value)
serializer.is_valid(raise_exception=True)
return serializer.data
@ -940,13 +930,10 @@ OBJECT_SERIALIZERS = {t: ObjectSerializer for t in activity.OBJECT_TYPES}
def get_additional_fields(data):
UNSET = object()
additional_fields = {}
for field in ["name", "summary", "library", "audience", "published"]:
for field in ["name", "summary"]:
v = data.get(field, UNSET)
if v == UNSET:
continue
# in some cases we use the serializer context to pass objects instances, we don't want to add them
if not isinstance(v, str) or isinstance(v, dict):
continue
additional_fields[field] = v
return additional_fields
@ -977,7 +964,7 @@ class PaginatedCollectionSerializer(jsonld.JsonLdSerializer):
first = common_utils.set_query_parameter(conf["id"], page=1)
current = first
last = common_utils.set_query_parameter(conf["id"], page=paginator.num_pages)
data = {
d = {
"id": conf["id"],
"attributedTo": conf["actor"].fid,
"totalItems": paginator.count,
@ -986,10 +973,10 @@ class PaginatedCollectionSerializer(jsonld.JsonLdSerializer):
"first": first,
"last": last,
}
data.update(get_additional_fields(conf))
d.update(get_additional_fields(conf))
if self.context.get("include_ap_context", True):
data["@context"] = jsonld.get_default_context()
return data
d["@context"] = jsonld.get_default_context()
return d
class LibrarySerializer(PaginatedCollectionSerializer):
@ -999,6 +986,8 @@ class LibrarySerializer(PaginatedCollectionSerializer):
actor = serializers.URLField(max_length=500, required=False)
attributedTo = serializers.URLField(max_length=500, required=False)
name = serializers.CharField()
summary = serializers.CharField(allow_blank=True, allow_null=True, required=False)
followers = serializers.URLField(max_length=500)
audience = serializers.ChoiceField(
choices=["", "./", None, "https://www.w3.org/ns/activitystreams#Public"],
required=False,
@ -1015,7 +1004,9 @@ class LibrarySerializer(PaginatedCollectionSerializer):
PAGINATED_COLLECTION_JSONLD_MAPPING,
{
"name": jsonld.first_val(contexts.AS.name),
"summary": jsonld.first_val(contexts.AS.summary),
"audience": jsonld.first_id(contexts.AS.audience),
"followers": jsonld.first_id(contexts.AS.followers),
"actor": jsonld.first_id(contexts.AS.actor),
"attributedTo": jsonld.first_id(contexts.AS.attributedTo),
},
@ -1037,20 +1028,18 @@ class LibrarySerializer(PaginatedCollectionSerializer):
conf = {
"id": library.fid,
"name": library.name,
"summary": library.description,
"page_size": 100,
"attributedTo": library.actor,
"actor": library.actor,
"items": (
library.uploads.for_federation()
if not library.playlist_uploads.all()
else library.playlist_uploads.for_federation()
),
"items": library.uploads.for_federation(),
"type": "Library",
}
r = super().to_representation(conf)
r["audience"] = (
contexts.AS.Public if library.privacy_level == "everyone" else ""
)
r["followers"] = library.followers_url
return r
def create(self, validated_data):
@ -1070,6 +1059,8 @@ class LibrarySerializer(PaginatedCollectionSerializer):
defaults={
"uploads_count": validated_data["totalItems"],
"name": validated_data["name"],
"description": validated_data.get("summary"),
"followers_url": validated_data["followers"],
"privacy_level": privacy[validated_data["audience"]],
},
)
@ -1136,12 +1127,7 @@ class CollectionPageSerializer(jsonld.JsonLdSerializer):
"last": last,
"items": [
conf["item_serializer"](
i,
context={
"actor": conf["actor"],
"library": conf.get("library", None),
"include_ap_context": False,
},
i, context={"actor": conf["actor"], "include_ap_context": False}
).data
for i in page.object_list
],
@ -1235,22 +1221,12 @@ class MusicEntitySerializer(jsonld.JsonLdSerializer):
self.updateable_fields, validated_data, instance
)
updated_fields = self.validate_updated_data(instance, updated_fields)
set_ac = False
if "artist_credit" in updated_fields:
artist_credit = updated_fields.pop("artist_credit")
set_ac = True
if creating:
instance, created = self.Meta.model.objects.get_or_create(
fid=validated_data["id"], defaults=updated_fields
)
if set_ac:
instance.artist_credit.set(artist_credit)
else:
obj = music_tasks.update_library_entity(instance, updated_fields)
if set_ac:
obj.artist_credit.set(artist_credit)
music_tasks.update_library_entity(instance, updated_fields)
tags = [t["name"] for t in validated_data.get("tags", []) or []]
tags_models.set_tags(instance, *tags)
@ -1312,6 +1288,7 @@ class ArtistSerializer(MusicEntitySerializer):
MUSIC_ENTITY_JSONLD_MAPPING,
{
"released": jsonld.first_val(contexts.FW.released),
"artists": jsonld.first_attr(contexts.FW.artists, "@list"),
"image": jsonld.first_obj(contexts.AS.image),
},
)
@ -1323,9 +1300,9 @@ class ArtistSerializer(MusicEntitySerializer):
"name": instance.name,
"published": instance.creation_date.isoformat(),
"musicbrainzId": str(instance.mbid) if instance.mbid else None,
"attributedTo": (
instance.attributed_to.fid if instance.attributed_to else None
),
"attributedTo": instance.attributed_to.fid
if instance.attributed_to
else None,
"tag": self.get_tags_repr(instance),
}
include_content(d, instance.description)
@ -1337,53 +1314,12 @@ class ArtistSerializer(MusicEntitySerializer):
create = MusicEntitySerializer.update_or_create
class ArtistCreditSerializer(jsonld.JsonLdSerializer):
artist = ArtistSerializer()
joinphrase = serializers.CharField(
trim_whitespace=False, required=False, allow_null=True, allow_blank=True
)
credit = serializers.CharField(
trim_whitespace=False, required=False, allow_null=True, allow_blank=True
)
published = serializers.DateTimeField()
id = serializers.URLField(max_length=500)
updateable_fields = [
("credit", "credit"),
("artist", "artist"),
("joinphrase", "joinphrase"),
]
class Meta:
model = music_models.ArtistCredit
jsonld_mapping = {
"artist": jsonld.first_obj(contexts.FW.artist),
"credit": jsonld.first_val(contexts.FW.credit),
"index": jsonld.first_val(contexts.FW.index),
"joinphrase": jsonld.first_val(contexts.FW.joinphrase),
"published": jsonld.first_val(contexts.AS.published),
}
def to_representation(self, instance):
data = {
"type": "ArtistCredit",
"id": instance.fid,
"artist": ArtistSerializer(
instance.artist, context={"include_ap_context": False}
).data,
"joinphrase": instance.joinphrase,
"credit": instance.credit,
"index": instance.index,
"published": instance.creation_date.isoformat(),
}
if self.context.get("include_ap_context", self.parent is None):
data["@context"] = jsonld.get_default_context()
return data
class AlbumSerializer(MusicEntitySerializer):
released = serializers.DateField(allow_null=True, required=False)
artist_credit = serializers.ListField(child=ArtistCreditSerializer(), min_length=1)
artists = serializers.ListField(
child=MultipleSerializer(allowed=[BasicActorSerializer, ArtistSerializer]),
min_length=1,
)
image = ImageSerializer(
allowed_mimetypes=["image/*"],
allow_null=True,
@ -1396,7 +1332,7 @@ class AlbumSerializer(MusicEntitySerializer):
("musicbrainzId", "mbid"),
("attributedTo", "attributed_to"),
("released", "release_date"),
("artist_credit", "artist_credit"),
("_artist", "artist"),
]
class Meta:
@ -1405,60 +1341,62 @@ class AlbumSerializer(MusicEntitySerializer):
MUSIC_ENTITY_JSONLD_MAPPING,
{
"released": jsonld.first_val(contexts.FW.released),
"artist_credit": jsonld.first_attr(contexts.FW.artist_credit, "@list"),
"artists": jsonld.first_attr(contexts.FW.artists, "@list"),
"image": jsonld.first_obj(contexts.AS.image),
},
)
def to_representation(self, instance):
data = {
d = {
"type": "Album",
"id": instance.fid,
"name": instance.title,
"published": instance.creation_date.isoformat(),
"musicbrainzId": str(instance.mbid) if instance.mbid else None,
"released": (
instance.release_date.isoformat() if instance.release_date else None
),
"attributedTo": (
instance.attributed_to.fid if instance.attributed_to else None
),
"released": instance.release_date.isoformat()
if instance.release_date
else None,
"attributedTo": instance.attributed_to.fid
if instance.attributed_to
else None,
"tag": self.get_tags_repr(instance),
}
data["artist_credit"] = ArtistCreditSerializer(
instance.artist_credit.all(),
context={"include_ap_context": False},
many=True,
).data
include_content(data, instance.description)
if instance.artist.get_channel():
d["artists"] = [
{
"type": instance.artist.channel.actor.type,
"id": instance.artist.channel.actor.fid,
}
]
else:
d["artists"] = [
ArtistSerializer(
instance.artist, context={"include_ap_context": False}
).data
]
include_content(d, instance.description)
if instance.attachment_cover:
include_image(data, instance.attachment_cover)
include_image(d, instance.attachment_cover)
if self.context.get("include_ap_context", self.parent is None):
data["@context"] = jsonld.get_default_context()
return data
d["@context"] = jsonld.get_default_context()
return d
def validate(self, data):
validated_data = super().validate(data)
if not self.parent:
artist_credit_data = validated_data["artist_credit"]
if artist_credit_data[0]["artist"].get("type", "Artist") == "Artist":
acs = []
for ac in validated_data["artist_credit"]:
acs.append(
utils.retrieve_ap_object(
ac["id"],
actor=self.context.get("fetch_actor"),
queryset=music_models.ArtistCredit,
serializer_class=ArtistCreditSerializer,
)
)
validated_data["artist_credit"] = acs
artist_data = validated_data["artists"][0]
if artist_data.get("type", "Artist") == "Artist":
validated_data["_artist"] = utils.retrieve_ap_object(
artist_data["id"],
actor=self.context.get("fetch_actor"),
queryset=music_models.Artist,
serializer_class=ArtistSerializer,
)
else:
# we have an actor as an artist, so it's a channel
actor = actors.get_actor(artist_credit_data[0]["artist"]["id"])
validated_data["artist_credit"] = [{"artist": actor.channel.artist}]
actor = actors.get_actor(artist_data["id"])
validated_data["_artist"] = actor.channel.artist
return validated_data
@ -1468,7 +1406,7 @@ class AlbumSerializer(MusicEntitySerializer):
class TrackSerializer(MusicEntitySerializer):
position = serializers.IntegerField(min_value=0, allow_null=True, required=False)
disc = serializers.IntegerField(min_value=1, allow_null=True, required=False)
artist_credit = serializers.ListField(child=ArtistCreditSerializer(), min_length=1)
artists = serializers.ListField(child=ArtistSerializer(), min_length=1)
album = AlbumSerializer()
license = serializers.URLField(allow_null=True, required=False)
copyright = serializers.CharField(allow_null=True, required=False)
@ -1496,7 +1434,7 @@ class TrackSerializer(MusicEntitySerializer):
MUSIC_ENTITY_JSONLD_MAPPING,
{
"album": jsonld.first_obj(contexts.FW.album),
"artist_credit": jsonld.first_attr(contexts.FW.artist_credit, "@list"),
"artists": jsonld.first_attr(contexts.FW.artists, "@list"),
"copyright": jsonld.first_val(contexts.FW.copyright),
"disc": jsonld.first_val(contexts.FW.disc),
"license": jsonld.first_id(contexts.FW.license),
@ -1506,7 +1444,7 @@ class TrackSerializer(MusicEntitySerializer):
)
def to_representation(self, instance):
data = {
d = {
"type": "Track",
"id": instance.fid,
"name": instance.title,
@ -1514,32 +1452,29 @@ class TrackSerializer(MusicEntitySerializer):
"musicbrainzId": str(instance.mbid) if instance.mbid else None,
"position": instance.position,
"disc": instance.disc_number,
"license": (
instance.local_license["identifiers"][0]
if instance.local_license
else None
),
"license": instance.local_license["identifiers"][0]
if instance.local_license
else None,
"copyright": instance.copyright if instance.copyright else None,
"artist_credit": ArtistCreditSerializer(
instance.artist_credit.all(),
context={"include_ap_context": False},
many=True,
).data,
"artists": [
ArtistSerializer(
instance.artist, context={"include_ap_context": False}
).data
],
"album": AlbumSerializer(
instance.album, context={"include_ap_context": False}
).data,
"attributedTo": (
instance.attributed_to.fid if instance.attributed_to else None
),
"attributedTo": instance.attributed_to.fid
if instance.attributed_to
else None,
"tag": self.get_tags_repr(instance),
}
include_content(data, instance.description)
include_image(data, instance.attachment_cover)
include_content(d, instance.description)
include_image(d, instance.attachment_cover)
if self.context.get("include_ap_context", self.parent is None):
data["@context"] = jsonld.get_default_context()
return data
d["@context"] = jsonld.get_default_context()
return d
@transaction.atomic
def create(self, validated_data):
from funkwhale_api.music import tasks as music_tasks
@ -1555,21 +1490,18 @@ class TrackSerializer(MusicEntitySerializer):
validated_data, "album.attributedTo", permissive=True
)
)
artist_credit = (
artists = (
common_utils.recursive_getattr(validated_data, "artists", permissive=True)
or []
)
album_artists = (
common_utils.recursive_getattr(
validated_data, "artist_credit", permissive=True
validated_data, "album.artists", permissive=True
)
or []
)
album_artists_credit = (
common_utils.recursive_getattr(
validated_data, "album.artist_credit", permissive=True
)
or []
)
for ac in artist_credit + album_artists_credit:
actors_to_fetch.add(ac["artist"].get("attributedTo"))
for artist in artists + album_artists:
actors_to_fetch.add(artist.get("attributedTo"))
for url in actors_to_fetch:
if not url:
@ -1582,9 +1514,8 @@ class TrackSerializer(MusicEntitySerializer):
from_activity = self.context.get("activity")
if from_activity:
metadata["from_activity_id"] = from_activity.pk
track = music_tasks.get_track_from_import_metadata(
metadata, update_cover=True, query_mb=False
)
track = music_tasks.get_track_from_import_metadata(metadata, update_cover=True)
return track
def update(self, obj, validated_data):
@ -1593,50 +1524,6 @@ class TrackSerializer(MusicEntitySerializer):
return super().update(obj, validated_data)
def duration_int_to_xml(duration):
if not duration:
return None
multipliers = {"S": 1, "M": 60, "H": 3600, "D": 86400}
ret = "P"
days, seconds = divmod(int(duration), multipliers["D"])
ret += f"{days:d}DT" if days > 0 else "T"
hours, seconds = divmod(seconds, multipliers["H"])
ret += f"{hours:d}H" if hours > 0 else ""
minutes, seconds = divmod(seconds, multipliers["M"])
ret += f"{minutes:d}M" if minutes > 0 else ""
ret += f"{seconds:d}S" if seconds > 0 or ret == "PT" else ""
return ret
class DayTimeDurationSerializer(serializers.DurationField):
multipliers = {"S": 1, "M": 60, "H": 3600, "D": 86400}
def to_internal_value(self, value):
if isinstance(value, float):
return value
parsed = re.match(
r"P([0-9]+D)?T([0-9]+H)?([0-9]+M)?([0-9]+(?:\.[0-9]+)?S)?", str(value)
)
if parsed is not None:
return int(
sum(
[
self.multipliers[s[-1]] * float("0" + s[:-1])
for s in parsed.groups()
if s is not None
]
)
)
self.fail(
"invalid", format="https://www.w3.org/TR/xmlschema11-2/#dayTimeDuration"
)
def to_representation(self, value):
duration_int_to_xml(value)
class UploadSerializer(jsonld.JsonLdSerializer):
type = serializers.ChoiceField(choices=[contexts.AS.Audio])
id = serializers.URLField(max_length=500)
@ -1646,7 +1533,7 @@ class UploadSerializer(jsonld.JsonLdSerializer):
updated = serializers.DateTimeField(required=False, allow_null=True)
bitrate = serializers.IntegerField(min_value=0)
size = serializers.IntegerField(min_value=0)
duration = DayTimeDurationSerializer(min_value=0)
duration = serializers.IntegerField(min_value=0)
track = TrackSerializer(required=True)
@ -1682,9 +1569,8 @@ class UploadSerializer(jsonld.JsonLdSerializer):
def validate_library(self, v):
lb = self.context.get("library")
if lb:
# the upload can come from a playlist lib
if lb.fid != v and not lb.playlist.library and lb.playlist.library.fid != v:
raise serializers.ValidationError("Invalid library fid")
if lb.fid != v:
raise serializers.ValidationError("Invalid library")
return lb
actor = self.context.get("actor")
@ -1696,10 +1582,10 @@ class UploadSerializer(jsonld.JsonLdSerializer):
queryset=music_models.Library,
serializer_class=LibrarySerializer,
)
except Exception as e:
raise serializers.ValidationError(f"Invalid library : {e}")
except Exception:
raise serializers.ValidationError("Invalid library")
if actor and library.actor != actor:
raise serializers.ValidationError("Invalid library, actor check fails")
raise serializers.ValidationError("Invalid library")
return library
def update(self, instance, validated_data):
@ -1750,17 +1636,16 @@ class UploadSerializer(jsonld.JsonLdSerializer):
return music_models.Upload.objects.create(**data)
def to_representation(self, instance):
lib = instance.library if instance.library else self.context.get("library")
track = instance.track
d = {
"type": "Audio",
"id": instance.get_federation_id(),
"library": lib.fid,
"library": instance.library.fid,
"name": track.full_name,
"published": instance.creation_date.isoformat(),
"bitrate": instance.bitrate,
"size": instance.size,
"duration": duration_int_to_xml(instance.duration),
"duration": instance.duration,
"url": [
{
"href": utils.full_url(instance.listen_url_no_download),
@ -1774,8 +1659,10 @@ class UploadSerializer(jsonld.JsonLdSerializer):
},
],
"track": TrackSerializer(track, context={"include_ap_context": False}).data,
"to": (contexts.AS.Public if lib.privacy_level == "everyone" else ""),
"attributedTo": lib.actor.fid,
"to": contexts.AS.Public
if instance.library.privacy_level == "everyone"
else "",
"attributedTo": instance.library.actor.fid,
}
if instance.modification_date:
d["updated"] = instance.modification_date.isoformat()
@ -1893,7 +1780,7 @@ class ChannelOutboxSerializer(PaginatedCollectionSerializer):
"actor": channel.actor,
"items": channel.library.uploads.for_federation()
.order_by("-creation_date")
.filter(track__artist_credit__artist=channel.artist),
.filter(track__artist=channel.artist),
"type": "OrderedCollection",
}
r = super().to_representation(conf)
@ -1906,7 +1793,7 @@ class ChannelUploadSerializer(jsonld.JsonLdSerializer):
url = LinkListSerializer(keep_mediatype=["audio/*"], min_length=1)
name = serializers.CharField()
published = serializers.DateTimeField(required=False)
duration = DayTimeDurationSerializer(required=False)
duration = serializers.IntegerField(min_value=0, required=False)
position = serializers.IntegerField(min_value=0, allow_null=True, required=False)
disc = serializers.IntegerField(min_value=1, allow_null=True, required=False)
album = serializers.URLField(max_length=500, required=False)
@ -1963,7 +1850,7 @@ class ChannelUploadSerializer(jsonld.JsonLdSerializer):
actor=actors.get_service_actor(),
serializer_class=AlbumSerializer,
queryset=music_models.Album.objects.filter(
artist_credit__artist__channel=self.context["channel"]
artist__channel=self.context["channel"]
),
)
@ -1994,9 +1881,9 @@ class ChannelUploadSerializer(jsonld.JsonLdSerializer):
"name": upload.track.title,
"attributedTo": upload.library.channel.actor.fid,
"published": upload.creation_date.isoformat(),
"to": (
contexts.AS.Public if upload.library.privacy_level == "everyone" else ""
),
"to": contexts.AS.Public
if upload.library.privacy_level == "everyone"
else "",
"url": [
{
"type": "Link",
@ -2015,7 +1902,7 @@ class ChannelUploadSerializer(jsonld.JsonLdSerializer):
if upload.track.local_license:
data["license"] = upload.track.local_license["identifiers"][0]
include_if_not_none(data, duration_int_to_xml(upload.duration), "duration")
include_if_not_none(data, upload.duration, "duration")
include_if_not_none(data, upload.track.position, "position")
include_if_not_none(data, upload.track.disc_number, "disc")
include_if_not_none(data, upload.track.copyright, "copyright")
@ -2042,6 +1929,7 @@ class ChannelUploadSerializer(jsonld.JsonLdSerializer):
now = timezone.now()
track_defaults = {
"fid": validated_data["id"],
"artist": channel.artist,
"position": validated_data.get("position", 1),
"disc_number": validated_data.get("disc", 1),
"title": validated_data["name"],
@ -2054,42 +1942,17 @@ class ChannelUploadSerializer(jsonld.JsonLdSerializer):
track_defaults["license"] = licenses.match(validated_data["license"])
track, created = music_models.Track.objects.update_or_create(
fid=validated_data["id"],
defaults=track_defaults,
artist__channel=channel, fid=validated_data["id"], defaults=track_defaults
)
# only one artist_credit per channel
query = (
Q(
artist=channel.artist,
)
& Q(credit__iexact=channel.artist.name)
& Q(joinphrase="")
)
defaults = {
"artist": channel.artist,
"joinphrase": "",
"credit": channel.artist.name,
}
ac_obj = music_tasks.get_best_candidate_or_create(
music_models.ArtistCredit,
query,
defaults=defaults,
sort_fields=["mbid", "fid"],
)
track.artist_credit.set([ac_obj[0].id])
if "image" in validated_data:
new_value = self.validated_data["image"]
common_utils.attach_file(
track,
"attachment_cover",
(
{"url": new_value["url"], "mimetype": new_value.get("mediaType")}
if new_value
else None
),
{"url": new_value["url"], "mimetype": new_value.get("mediaType")}
if new_value
else None,
)
common_utils.attach_content(
@ -2213,276 +2076,3 @@ class IndexSerializer(jsonld.JsonLdSerializer):
if self.context.get("include_ap_context", True):
d["@context"] = jsonld.get_default_context()
return d
class TrackFavoriteSerializer(jsonld.JsonLdSerializer):
type = serializers.ChoiceField(choices=[contexts.AS.Like])
id = serializers.URLField(max_length=500)
object = serializers.URLField(max_length=500)
actor = serializers.URLField(max_length=500)
class Meta:
jsonld_mapping = {
"object": jsonld.first_id(contexts.AS.object),
"actor": jsonld.first_id(contexts.AS.actor),
}
def to_representation(self, favorite):
payload = {
"type": "Like",
"id": favorite.fid,
"actor": favorite.actor.fid,
"object": favorite.track.fid,
}
if self.context.get("include_ap_context", True):
payload["@context"] = jsonld.get_default_context()
return payload
def create(self, validated_data):
actor = actors.get_actor(validated_data["actor"])
track = utils.retrieve_ap_object(
validated_data["object"],
actor=actors.get_service_actor(),
serializer_class=TrackSerializer,
)
return favorites_models.TrackFavorite.objects.create(
fid=validated_data.get("id"),
uuid=uuid.uuid4(),
actor=actor,
track=track,
)
class ListeningSerializer(jsonld.JsonLdSerializer):
type = serializers.ChoiceField(choices=[contexts.AS.Listen])
id = serializers.URLField(max_length=500)
object = serializers.URLField(max_length=500)
actor = serializers.URLField(max_length=500)
class Meta:
jsonld_mapping = {
"object": jsonld.first_id(contexts.AS.object),
"actor": jsonld.first_id(contexts.AS.actor),
}
def to_representation(self, listening):
payload = {
"type": "Listen",
"id": listening.fid,
"actor": listening.actor.fid,
"object": listening.track.fid,
}
if self.context.get("include_ap_context", True):
payload["@context"] = jsonld.get_default_context()
return payload
def create(self, validated_data):
actor = actors.get_actor(validated_data["actor"])
track = utils.retrieve_ap_object(
validated_data["object"],
actor=actors.get_service_actor(),
serializer_class=TrackSerializer,
)
return history_models.Listening.objects.create(
fid=validated_data.get("id"),
uuid=validated_data["id"].rstrip("/").split("/")[-1],
actor=actor,
track=track,
)
class PlaylistTrackSerializer(jsonld.JsonLdSerializer):
type = serializers.ChoiceField(choices=[contexts.FW.PlaylistTrack])
id = serializers.URLField(max_length=500)
track = serializers.URLField(max_length=500)
index = serializers.IntegerField()
creation_date = serializers.DateTimeField()
playlist = serializers.URLField(max_length=500, required=False)
class Meta:
model = playlists_models.PlaylistTrack
jsonld_mapping = {
"track": jsonld.first_id(contexts.FW.track),
"playlist": jsonld.first_id(contexts.FW.playlist),
"index": jsonld.first_val(contexts.FW.index),
"creation_date": jsonld.first_val(contexts.AS.published),
}
def to_representation(self, plt):
payload = {
"type": "PlaylistTrack",
"id": plt.fid,
"track": plt.track.fid,
"index": plt.index,
"attributedTo": plt.playlist.actor.fid,
"published": plt.creation_date.isoformat(),
}
if self.context.get("include_ap_context", True):
payload["@context"] = jsonld.get_default_context()
if self.context.get("include_playlist", True):
payload["playlist"] = plt.playlist.fid
return payload
def create(self, validated_data):
track = utils.retrieve_ap_object(
validated_data["track"],
actor=self.context.get("fetch_actor"),
queryset=music_models.Track,
serializer_class=TrackSerializer,
)
playlist = utils.retrieve_ap_object(
validated_data["playlist"],
actor=self.context.get("fetch_actor"),
queryset=playlists_models.Playlist,
serializer_class=PlaylistSerializer,
)
defaults = {
"track": track,
"index": validated_data["index"],
"creation_date": validated_data["creation_date"],
"playlist": playlist,
}
if existing_plt := playlists_models.PlaylistTrack.objects.filter(
playlist=playlist, index=validated_data["index"]
):
existing_plt.delete()
plt, created = playlists_models.PlaylistTrack.objects.update_or_create(
defaults,
**{
"uuid": validated_data["id"].rstrip("/").split("/")[-1],
"fid": validated_data["id"],
},
)
return plt
class PlaylistSerializer(jsonld.JsonLdSerializer):
"""
Used for playlist activities
"""
type = serializers.ChoiceField(choices=[contexts.FW.Playlist, contexts.AS.Create])
id = serializers.URLField(max_length=500)
uuid = serializers.UUIDField(required=False)
name = serializers.CharField(required=False)
attributedTo = serializers.URLField(max_length=500, required=False)
published = serializers.DateTimeField(required=False)
updated = serializers.DateTimeField(required=False)
audience = serializers.ChoiceField(
choices=[None, "https://www.w3.org/ns/activitystreams#Public"],
required=False,
allow_null=True,
allow_blank=True,
)
library = serializers.URLField(max_length=500, required=True)
updateable_fields = [
("name", "title"),
("attributedTo", "attributed_to"),
]
class Meta:
model = playlists_models.Playlist
jsonld_mapping = common_utils.concat_dicts(
MUSIC_ENTITY_JSONLD_MAPPING,
{
"updated": jsonld.first_val(contexts.AS.published),
"audience": jsonld.first_id(contexts.AS.audience),
"attributedTo": jsonld.first_id(contexts.AS.attributedTo),
"library": jsonld.first_id(contexts.FW.library),
},
)
def to_representation(self, playlist):
payload = {
"type": "Playlist",
"id": playlist.fid,
"name": playlist.name,
"attributedTo": playlist.actor.fid,
"published": playlist.creation_date.isoformat(),
"audience": playlist.privacy_level,
"library": playlist.library.fid,
}
payload["audience"] = (
contexts.AS.Public if playlist.privacy_level == "everyone" else ""
)
if playlist.modification_date:
payload["updated"] = playlist.modification_date.isoformat()
if self.context.get("include_ap_context", True):
payload["@context"] = jsonld.get_default_context()
return payload
def create(self, validated_data):
actor = utils.retrieve_ap_object(
validated_data["attributedTo"],
actor=self.context.get("fetch_actor"),
queryset=models.Actor,
serializer_class=ActorSerializer,
)
library = utils.retrieve_ap_object(
validated_data["library"],
actor=self.context.get("fetch_actor"),
queryset=music_models.Library,
serializer_class=LibrarySerializer,
)
ap_to_fw_data = {
"actor": actor,
"name": validated_data["name"],
"creation_date": validated_data["published"],
"privacy_level": validated_data["audience"],
"library": library,
}
playlist, created = playlists_models.Playlist.objects.update_or_create(
defaults=ap_to_fw_data,
**{
"fid": validated_data["id"],
"uuid": validated_data.get(
"uuid", validated_data["id"].rstrip("/").split("/")[-1]
),
},
)
return playlist
def validate(self, data):
validated_data = super().validate(data)
if validated_data["audience"] in [
"https://www.w3.org/ns/activitystreams#Public",
"everyone",
]:
validated_data["audience"] = "everyone"
else:
validated_data.pop("audience")
return validated_data
def update(self, instance, validated_data):
return self.create(validated_data)
class PlaylistCollectionSerializer(PaginatedCollectionSerializer):
"""
Used for the federation view.
"""
type = serializers.ChoiceField(choices=[contexts.FW.Playlist])
def to_representation(self, playlist):
conf = {
"id": playlist.fid,
"name": playlist.name,
"page_size": 100,
"actor": playlist.actor,
"items": playlist.playlist_tracks.order_by("index").prefetch_related(
"tracks",
),
"type": "Playlist",
"library": playlist.library.fid,
"published": playlist.creation_date.isoformat(),
}
r = super().to_representation(conf)
return r

View File

@ -30,7 +30,7 @@ def verify_date(raw_date):
ts = parse_http_date(raw_date)
except ValueError as e:
raise forms.ValidationError(str(e))
dt = datetime.datetime.fromtimestamp(ts, datetime.timezone.utc)
dt = datetime.datetime.utcfromtimestamp(ts)
dt = dt.replace(tzinfo=ZoneInfo("UTC"))
delta = datetime.timedelta(seconds=DATE_HEADER_VALID_FOR)
now = timezone.now()

View File

@ -5,7 +5,6 @@ import os
import requests
from django.conf import settings
from django.core.cache import cache
from django.db import transaction
from django.db.models import F, Q
from django.db.models.deletion import Collector
@ -19,7 +18,6 @@ from funkwhale_api.common import preferences, session
from funkwhale_api.common import utils as common_utils
from funkwhale_api.moderation import mrf
from funkwhale_api.music import models as music_models
from funkwhale_api.playlists import models as playlists_models
from funkwhale_api.taskapp import celery
from . import (
@ -236,10 +234,8 @@ def refresh_nodeinfo_known_nodes():
settings.NODEINFO_REFRESH_DELAY
"""
limit = timezone.now() - datetime.timedelta(seconds=settings.NODEINFO_REFRESH_DELAY)
candidates = (
models.Domain.objects.external()
.exclude(nodeinfo_fetch_date__gte=limit)
.filter(nodeinfo__software__name="Funkwhale")
candidates = models.Domain.objects.external().exclude(
nodeinfo_fetch_date__gte=limit
)
names = candidates.values_list("name", flat=True)
logger.info("Launching periodic nodeinfo refresh on %s domains", len(names))
@ -669,14 +665,3 @@ def check_single_remote_instance_availability(domain):
domain.reachable = False
domain.save()
return domain.reachable
@celery.app.task(name="federation.trigger_playlist_ap_update")
def trigger_playlist_ap_update(playlist):
for playlist_uuid in cache.get("playlists_for_ap_update"):
routes.outbox.dispatch(
{"type": "Update", "object": {"type": "Playlist"}},
context={
"playlist": playlists_models.Playlist.objects.get(uuid=playlist_uuid)
},
)

View File

@ -1,5 +1,4 @@
from django.conf.urls import include
from django.urls import re_path
from django.conf.urls import include, url
from rest_framework import routers
from . import views
@ -17,20 +16,13 @@ router.register(r".well-known", views.WellKnownViewSet, "well-known")
music_router.register(r"libraries", views.MusicLibraryViewSet, "libraries")
music_router.register(r"uploads", views.MusicUploadViewSet, "uploads")
music_router.register(r"artists", views.MusicArtistViewSet, "artists")
music_router.register(r"artistcredit", views.MusicArtistCreditViewSet, "artistcredit")
music_router.register(r"albums", views.MusicAlbumViewSet, "albums")
music_router.register(r"tracks", views.MusicTrackViewSet, "tracks")
music_router.register(r"likes", views.TrackFavoriteViewSet, "likes")
music_router.register(r"listenings", views.ListeningsViewSet, "listenings")
music_router.register(r"playlists", views.PlaylistViewSet, "playlists")
music_router.register(r"playlists", views.PlaylistTrackViewSet, "playlist-tracks")
index_router.register(r"index", views.IndexViewSet, "index")
urlpatterns = router.urls + [
re_path(
"federation/music/", include((music_router.urls, "music"), namespace="music")
),
re_path("federation/", include((index_router.urls, "index"), namespace="index")),
url("federation/music/", include((music_router.urls, "music"), namespace="music")),
url("federation/", include((index_router.urls, "index"), namespace="index")),
]

View File

@ -7,16 +7,12 @@ from django.urls import reverse
from rest_framework import exceptions, mixins, permissions, response, viewsets
from rest_framework.decorators import action
from funkwhale_api.common import permissions as common_permissions
from funkwhale_api.common import preferences
from funkwhale_api.common import utils as common_utils
from funkwhale_api.favorites import models as favorites_models
from funkwhale_api.federation import utils as federation_utils
from funkwhale_api.history import models as history_models
from funkwhale_api.moderation import models as moderation_models
from funkwhale_api.music import models as music_models
from funkwhale_api.music import utils as music_utils
from funkwhale_api.playlists import models as playlists_models
from . import (
activity,
@ -165,9 +161,7 @@ class ActorViewSet(FederationMixin, mixins.RetrieveModelMixin, viewsets.GenericV
"actor": channel.actor,
"items": channel.library.uploads.for_federation()
.order_by("-creation_date")
.prefetch_related(
"library__channel__actor", "track__artist_credit__artist"
),
.prefetch_related("library__channel__actor", "track__artist"),
"item_serializer": serializers.ChannelCreateUploadSerializer,
}
return get_collection_response(
@ -176,115 +170,17 @@ class ActorViewSet(FederationMixin, mixins.RetrieveModelMixin, viewsets.GenericV
collection_serializer=serializers.ChannelOutboxSerializer(channel),
)
@action(
methods=["get"],
detail=True,
permission_classes=[common_permissions.PrivacyLevelPermission],
)
@action(methods=["get"], detail=True)
def followers(self, request, *args, **kwargs):
actor = self.get_object()
followers = list(actor.get_approved_followers())
conf = {
"id": federation_utils.full_url(
reverse(
"federation:actors-followers",
kwargs={"preferred_username": actor.preferred_username},
)
),
"items": followers,
"item_serializer": serializers.ActorSerializer,
"page_size": 100,
"actor": None,
}
response = get_collection_response(
conf=conf,
querystring=request.GET,
collection_serializer=serializers.IndexSerializer(conf),
)
return response
self.get_object()
# XXX to implement
return response.Response({})
@action(
methods=["get"],
detail=True,
permission_classes=[common_permissions.PrivacyLevelPermission],
)
@action(methods=["get"], detail=True)
def following(self, request, *args, **kwargs):
actor = self.get_object()
followings = list(
actor.emitted_follows.filter(approved=True).values_list("target", flat=True)
)
conf = {
"id": federation_utils.full_url(
reverse(
"federation:actors-following",
kwargs={"preferred_username": actor.preferred_username},
)
),
"items": followings,
"item_serializer": serializers.ActorSerializer,
"page_size": 100,
"actor": None,
}
response = get_collection_response(
conf=conf,
querystring=request.GET,
collection_serializer=serializers.IndexSerializer(conf),
)
return response
@action(
methods=["get"],
detail=True,
permission_classes=[common_permissions.PrivacyLevelPermission],
)
def listens(self, request, *args, **kwargs):
actor = self.get_object()
listenings = history_models.Listening.objects.filter(actor=actor)
conf = {
"id": federation_utils.full_url(
reverse(
"federation:actors-listens",
kwargs={"preferred_username": actor.preferred_username},
)
),
"items": listenings,
"item_serializer": serializers.ListeningSerializer,
"page_size": 100,
"actor": None,
}
response = get_collection_response(
conf=conf,
querystring=request.GET,
collection_serializer=serializers.IndexSerializer(conf),
)
return response
@action(
methods=["get"],
detail=True,
permission_classes=[common_permissions.PrivacyLevelPermission],
)
def likes(self, request, *args, **kwargs):
actor = self.get_object()
likes = favorites_models.TrackFavorite.objects.filter(actor=actor)
conf = {
"id": federation_utils.full_url(
reverse(
"federation:actors-likes",
kwargs={"preferred_username": actor.preferred_username},
)
),
"items": likes,
"item_serializer": serializers.TrackFavoriteSerializer,
"page_size": 100,
"actor": None,
}
response = get_collection_response(
conf=conf,
querystring=request.GET,
collection_serializer=serializers.IndexSerializer(conf),
)
return response
self.get_object()
# XXX to implement
return response.Response({})
class EditViewSet(FederationMixin, mixins.RetrieveModelMixin, viewsets.GenericViewSet):
@ -365,20 +261,6 @@ def has_library_access(request, library):
return library.received_follows.filter(actor=actor, approved=True).exists()
def has_playlist_access(request, playlist):
if playlist.privacy_level == "everyone":
return True
if request.user.is_authenticated and request.user.is_superuser:
return True
try:
actor = request.actor
except AttributeError:
return False
return playlist.library.received_follows.filter(actor=actor, approved=True).exists()
class MusicLibraryViewSet(
FederationMixin, mixins.RetrieveModelMixin, viewsets.GenericViewSet
):
@ -397,41 +279,39 @@ class MusicLibraryViewSet(
lb = self.get_object()
if utils.should_redirect_ap_to_html(request.headers.get("accept")):
return redirect_to_html(lb.get_absolute_url())
items_qs = (
lb.uploads.for_federation()
if not lb.playlist_uploads.all()
else lb.playlist_uploads.for_federation()
)
conf = {
"id": lb.get_federation_id(),
"actor": lb.actor,
"name": lb.name,
"items": items_qs.order_by("-creation_date").prefetch_related(
"summary": lb.description,
"items": lb.uploads.for_federation()
.order_by("-creation_date")
.prefetch_related(
Prefetch(
"track",
queryset=music_models.Track.objects.select_related(
"album__artist__attributed_to",
"artist__attributed_to",
"artist__attachment_cover",
"attachment_cover",
"album__attributed_to",
"attributed_to",
"album__attachment_cover",
"album__artist__attachment_cover",
"description",
).prefetch_related(
"album__artist_credit__artist__attributed_to",
"artist_credit__artist__attributed_to",
"artist_credit__artist__attachment_cover",
"tagged_items__tag",
"album__tagged_items__tag",
"album__artist_credit__artist__tagged_items__tag",
"album__artist_credit__artist__attachment_cover",
"artist_credit__artist__tagged_items__tag",
"artist_credit__artist__description",
"album__artist__tagged_items__tag",
"artist__tagged_items__tag",
"artist__description",
"album__description",
),
)
),
"item_serializer": serializers.UploadSerializer,
"library": lb,
}
return get_collection_response(
conf=conf,
querystring=request.GET,
@ -451,20 +331,15 @@ class MusicUploadViewSet(
):
authentication_classes = [authentication.SignatureAuthentication]
renderer_classes = renderers.get_ap_renderers()
queryset = (
music_models.Upload.objects.local()
.select_related(
"library__actor",
"track__description",
"track__album__attachment_cover",
"track__attachment_cover",
)
.prefetch_related(
"track__artist_credit__artist",
"track__album__artist_credit__artist",
"track__album__artist_credit__artist__attachment_cover",
"track__artist_credit__artist__attachment_cover",
)
queryset = music_models.Upload.objects.local().select_related(
"library__actor",
"track__artist",
"track__album__artist",
"track__description",
"track__album__attachment_cover",
"track__album__artist__attachment_cover",
"track__artist__attachment_cover",
"track__attachment_cover",
)
serializer_class = serializers.UploadSerializer
lookup_field = "uuid"
@ -518,35 +393,13 @@ class MusicArtistViewSet(
return response.Response(serializer.data)
class MusicArtistCreditViewSet(
FederationMixin, mixins.RetrieveModelMixin, viewsets.GenericViewSet
):
authentication_classes = [authentication.SignatureAuthentication]
renderer_classes = renderers.get_ap_renderers()
queryset = music_models.ArtistCredit.objects.local().prefetch_related("artist")
serializer_class = serializers.ArtistCreditSerializer
lookup_field = "uuid"
def retrieve(self, request, *args, **kwargs):
instance = self.get_object()
serializer = self.get_serializer(instance)
return response.Response(serializer.data)
class MusicAlbumViewSet(
FederationMixin, mixins.RetrieveModelMixin, viewsets.GenericViewSet
):
authentication_classes = [authentication.SignatureAuthentication]
renderer_classes = renderers.get_ap_renderers()
queryset = (
music_models.Album.objects.local()
.prefetch_related(
"artist_credit__artist__description",
"artist_credit__artist__attachment_cover",
)
.select_related(
"description",
)
queryset = music_models.Album.objects.local().select_related(
"artist__description", "description", "artist__attachment_cover"
)
serializer_class = serializers.AlbumSerializer
lookup_field = "uuid"
@ -565,22 +418,16 @@ class MusicTrackViewSet(
):
authentication_classes = [authentication.SignatureAuthentication]
renderer_classes = renderers.get_ap_renderers()
queryset = (
music_models.Track.objects.local()
.select_related(
"album__description",
"description",
"attachment_cover",
"album__attachment_cover",
)
.prefetch_related(
"album__artist_credit__artist",
"artist_credit__artist__description",
"artist_credit__artist__attachment_cover",
"album__artist_credit__artist__attachment_cover",
)
queryset = music_models.Track.objects.local().select_related(
"album__artist",
"album__description",
"artist__description",
"description",
"attachment_cover",
"album__artist__attachment_cover",
"album__attachment_cover",
"artist__attachment_cover",
)
serializer_class = serializers.TrackSerializer
lookup_field = "uuid"
@ -680,95 +527,3 @@ class IndexViewSet(FederationMixin, viewsets.GenericViewSet):
)
return response.Response({}, status=200)
class TrackFavoriteViewSet(
FederationMixin, mixins.RetrieveModelMixin, viewsets.GenericViewSet
):
authentication_classes = [authentication.SignatureAuthentication]
permission_classes = [common_permissions.PrivacyLevelPermission]
renderer_classes = renderers.get_ap_renderers()
queryset = favorites_models.TrackFavorite.objects.local().select_related(
"track", "actor"
)
serializer_class = serializers.TrackFavoriteSerializer
lookup_field = "uuid"
def retrieve(self, request, *args, **kwargs):
instance = self.get_object()
if utils.should_redirect_ap_to_html(request.headers.get("accept")):
return redirect_to_html(instance.get_absolute_url())
serializer = self.get_serializer(instance)
return response.Response(serializer.data)
class ListeningsViewSet(
FederationMixin, mixins.RetrieveModelMixin, viewsets.GenericViewSet
):
authentication_classes = [authentication.SignatureAuthentication]
permission_classes = [common_permissions.PrivacyLevelPermission]
renderer_classes = renderers.get_ap_renderers()
queryset = history_models.Listening.objects.local().select_related("track", "actor")
serializer_class = serializers.ListeningSerializer
lookup_field = "uuid"
def retrieve(self, request, *args, **kwargs):
instance = self.get_object()
if utils.should_redirect_ap_to_html(request.headers.get("accept")):
return redirect_to_html(instance.get_absolute_url())
serializer = self.get_serializer(instance)
return response.Response(serializer.data)
class PlaylistViewSet(
FederationMixin, mixins.RetrieveModelMixin, viewsets.GenericViewSet
):
authentication_classes = [authentication.SignatureAuthentication]
renderer_classes = renderers.get_ap_renderers()
queryset = playlists_models.Playlist.objects.local().select_related("actor")
serializer_class = serializers.PlaylistCollectionSerializer
lookup_field = "uuid"
def retrieve(self, request, *args, **kwargs):
playlist = self.get_object()
if utils.should_redirect_ap_to_html(request.headers.get("accept")):
return redirect_to_html(playlist.get_absolute_url())
conf = {
"id": playlist.fid,
"actor": playlist.actor,
"name": playlist.name,
"items": playlist.playlist_tracks.order_by("index").prefetch_related(
"track",
),
"item_serializer": serializers.PlaylistTrackSerializer,
"library": playlist.library.fid,
}
return get_collection_response(
conf=conf,
querystring=request.GET,
collection_serializer=serializers.PlaylistCollectionSerializer(playlist),
page_access_check=lambda: has_playlist_access(request, playlist),
)
class PlaylistTrackViewSet(
FederationMixin, mixins.RetrieveModelMixin, viewsets.GenericViewSet
):
authentication_classes = [authentication.SignatureAuthentication]
renderer_classes = renderers.get_ap_renderers()
queryset = playlists_models.PlaylistTrack.objects.local().select_related("actor")
serializer_class = serializers.PlaylistTrackSerializer
lookup_field = "uuid"
def retrieve(self, request, *args, **kwargs):
plt = self.get_object()
if not has_playlist_access(request, plt.playlist):
return response.Response(status=403)
if utils.should_redirect_ap_to_html(request.headers.get("accept")):
return redirect_to_html(plt.get_absolute_url())
serializer = self.get_serializer(plt)
return response.Response(serializer.data)

View File

@ -8,7 +8,7 @@ record.registry.register_serializer(serializers.ListeningActivitySerializer)
@record.registry.register_consumer("history.Listening")
def broadcast_listening_to_instance_activity(data, obj):
if obj.actor.user.privacy_level not in ["instance", "everyone"]:
if obj.user.privacy_level not in ["instance", "everyone"]:
return
channels.group_send(

View File

@ -5,6 +5,6 @@ from . import models
@admin.register(models.Listening)
class ListeningAdmin(admin.ModelAdmin):
list_display = ["track", "creation_date", "actor", "session_key"]
search_fields = ["track__name", "actor__user__username"]
list_select_related = ["actor", "track"]
list_display = ["track", "creation_date", "user", "session_key"]
search_fields = ["track__name", "user__username"]
list_select_related = ["user", "track"]

View File

@ -1,28 +1,14 @@
import factory
from django.conf import settings
from funkwhale_api.factories import NoUpdateOnCreate, registry
from funkwhale_api.federation import models
from funkwhale_api.federation.factories import ActorFactory
from funkwhale_api.music import factories
from funkwhale_api.users.factories import UserFactory
@registry.register
class ListeningFactory(NoUpdateOnCreate, factory.django.DjangoModelFactory):
actor = factory.SubFactory(ActorFactory)
user = factory.SubFactory(UserFactory)
track = factory.SubFactory(factories.TrackFactory)
fid = factory.Faker("federation_url")
uuid = factory.Faker("uuid4")
class Meta:
model = "history.Listening"
@factory.post_generation
def local(self, create, extracted, **kwargs):
if not extracted and not kwargs:
return
domain = models.Domain.objects.get_or_create(name=settings.FEDERATION_HOSTNAME)[
0
]
self.fid = f"https://{domain}/federation/music/favorite/{self.uuid}"
self.save(update_fields=["fid"])

View File

@ -7,9 +7,9 @@ from . import models
class ListeningFilter(moderation_filters.HiddenContentFilterSet):
username = django_filters.CharFilter("actor__user__username")
domain = django_filters.CharFilter("actor__domain_id")
scope = common_filters.ActorScopeFilter(actor_field="actor", distinct=True)
username = django_filters.CharFilter("user__username")
domain = django_filters.CharFilter("user__actor__domain_id")
scope = common_filters.ActorScopeFilter(actor_field="user__actor", distinct=True)
class Meta:
model = models.Listening

View File

@ -1,18 +0,0 @@
# Generated by Django 3.2.20 on 2023-12-09 14:23
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('history', '0002_auto_20180325_1433'),
]
operations = [
migrations.AddField(
model_name='listening',
name='source',
field=models.CharField(blank=True, max_length=100, null=True),
),
]

View File

@ -1,107 +0,0 @@
import uuid
from django.db import migrations, models
from django.urls import reverse
from funkwhale_api.federation import utils
import django.db.models.deletion
def get_user_actor(apps, schema_editor):
MyModel = apps.get_model("history", "Listening")
for row in MyModel.objects.all():
actor = row.user.actor
row.actor = actor
row.save(update_fields=["actor"])
def gen_uuid(apps, schema_editor):
MyModel = apps.get_model("history", "Listening")
for row in MyModel.objects.all():
unique_uuid = uuid.uuid4()
while MyModel.objects.filter(uuid=unique_uuid).exists():
unique_uuid = uuid.uuid4()
fid = utils.full_url(
reverse("federation:music:listenings-detail", kwargs={"uuid": unique_uuid})
)
row.uuid = unique_uuid
row.fid = fid
row.save(update_fields=["uuid", "fid"])
def get_user_actor(apps, schema_editor):
MyModel = apps.get_model("history", "Listening")
for row in MyModel.objects.all():
actor = row.user.actor
row.actor = actor
row.save(update_fields=["actor"])
class Migration(migrations.Migration):
dependencies = [
("history", "0003_listening_source"),
("federation", "0028_auto_20221027_1141"),
]
operations = [
migrations.AddField(
model_name="listening",
name="actor",
field=models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="listenings",
to="federation.actor",
),
),
migrations.AddField(
model_name="listening",
name="fid",
field=models.URLField(
max_length=500,
null=True,
),
),
migrations.AddField(
model_name="listening",
name="url",
field=models.URLField(blank=True, max_length=500, null=True),
),
migrations.AddField(
model_name="listening",
name="uuid",
field=models.UUIDField(default=uuid.uuid4, null=True),
),
migrations.RunPython(gen_uuid, reverse_code=migrations.RunPython.noop),
migrations.AlterField(
model_name="listening",
name="uuid",
field=models.UUIDField(default=uuid.uuid4, unique=True),
),
migrations.AlterField(
model_name="listening",
name="fid",
field=models.URLField(
unique=True,
db_index=True,
max_length=500,
),
),
migrations.RunPython(get_user_actor, reverse_code=migrations.RunPython.noop),
migrations.RemoveField(
model_name="listening",
name="user",
),
migrations.AlterField(
model_name="listening",
name="actor",
field=models.ForeignKey(
blank=False,
null=False,
on_delete=django.db.models.deletion.CASCADE,
related_name="listenings",
to="federation.actor",
),
),
]

View File

@ -1,59 +1,25 @@
import uuid
from django.db import models
from django.urls import reverse
from django.utils import timezone
from funkwhale_api.common import models as common_models
from funkwhale_api.federation import models as federation_models
from funkwhale_api.federation import utils as federation_utils
from funkwhale_api.music.models import Track
class ListeningQuerySet(models.QuerySet, common_models.LocalFromFidQuerySet):
pass
class Listening(federation_models.FederationMixin):
uuid = models.UUIDField(default=uuid.uuid4, unique=True)
class Listening(models.Model):
creation_date = models.DateTimeField(default=timezone.now, null=True, blank=True)
track = models.ForeignKey(
Track, related_name="listenings", on_delete=models.CASCADE
)
actor = models.ForeignKey(
"federation.Actor",
user = models.ForeignKey(
"users.User",
related_name="listenings",
null=True,
blank=True,
on_delete=models.CASCADE,
null=False,
blank=False,
)
session_key = models.CharField(max_length=100, null=True, blank=True)
source = models.CharField(max_length=100, null=True, blank=True)
federation_namespace = "listenings"
objects = ListeningQuerySet.as_manager()
class Meta:
ordering = ("-creation_date",)
def get_activity_url(self):
return f"{self.actor.get_absolute_url()}/listenings/tracks/{self.pk}"
def get_absolute_url(self):
return f"/library/tracks/{self.track.pk}"
def get_federation_id(self):
if self.fid:
return self.fid
return federation_utils.full_url(
reverse(
f"federation:music:{self.federation_namespace}-detail",
kwargs={"uuid": self.uuid},
)
)
def save(self, **kwargs):
if not self.pk and not self.fid:
self.fid = self.get_federation_id()
return super().save(**kwargs)
return f"{self.user.get_activity_url()}/listenings/tracks/{self.pk}"

View File

@ -1,8 +1,10 @@
from drf_spectacular.utils import extend_schema_field
from rest_framework import serializers
from funkwhale_api.activity import serializers as activity_serializers
from funkwhale_api.federation import serializers as federation_serializers
from funkwhale_api.music.serializers import TrackActivitySerializer, TrackSerializer
from funkwhale_api.users.serializers import UserActivitySerializer, UserBasicSerializer
from . import models
@ -10,39 +12,47 @@ from . import models
class ListeningActivitySerializer(activity_serializers.ModelSerializer):
type = serializers.SerializerMethodField()
object = TrackActivitySerializer(source="track")
actor = federation_serializers.APIActorSerializer()
actor = UserActivitySerializer(source="user")
published = serializers.DateTimeField(source="creation_date")
class Meta:
model = models.Listening
fields = ["id", "local_id", "object", "type", "actor", "published"]
def get_actor(self, obj):
return UserActivitySerializer(obj.user).data
def get_type(self, obj):
return "Listen"
class ListeningSerializer(serializers.ModelSerializer):
track = TrackSerializer(read_only=True)
actor = federation_serializers.APIActorSerializer(read_only=True)
user = UserBasicSerializer(read_only=True)
actor = serializers.SerializerMethodField()
class Meta:
model = models.Listening
fields = ("id", "actor", "track", "creation_date", "actor")
fields = ("id", "user", "track", "creation_date", "actor")
def create(self, validated_data):
validated_data["actor"] = self.context["user"].actor
validated_data["user"] = self.context["user"]
return super().create(validated_data)
@extend_schema_field(federation_serializers.APIActorSerializer)
def get_actor(self, obj):
actor = obj.user.actor
if actor:
return federation_serializers.APIActorSerializer(actor).data
class ListeningWriteSerializer(serializers.ModelSerializer):
actor = federation_serializers.APIActorSerializer(read_only=True, required=False)
class Meta:
model = models.Listening
fields = ("id", "actor", "track", "creation_date")
fields = ("id", "user", "track", "creation_date")
def create(self, validated_data):
validated_data["actor"] = self.context["user"].actor
validated_data["user"] = self.context["user"]
return super().create(validated_data)

View File

@ -4,7 +4,6 @@ from rest_framework import mixins, viewsets
from config import plugins
from funkwhale_api.activity import record
from funkwhale_api.common import fields, permissions
from funkwhale_api.federation import routes
from funkwhale_api.music import utils as music_utils
from funkwhale_api.music.models import Track
from funkwhale_api.users.oauth import permissions as oauth_permissions
@ -19,7 +18,9 @@ class ListeningViewSet(
viewsets.GenericViewSet,
):
serializer_class = serializers.ListeningSerializer
queryset = models.Listening.objects.all().select_related("actor__attachment_icon")
queryset = models.Listening.objects.all().select_related(
"user__actor__attachment_icon"
)
permission_classes = [
oauth_permissions.ScopePermission,
@ -28,7 +29,6 @@ class ListeningViewSet(
required_scope = "listenings"
anonymous_policy = "setting"
owner_checks = ["write"]
owner_field = "actor.user"
filterset_class = filters.ListeningFilter
def get_serializer_class(self):
@ -38,40 +38,23 @@ class ListeningViewSet(
def perform_create(self, serializer):
r = super().perform_create(serializer)
instance = serializer.instance
plugins.trigger_hook(
plugins.LISTENING_CREATED,
listening=instance,
listening=serializer.instance,
confs=plugins.get_confs(self.request.user),
)
routes.outbox.dispatch(
{"type": "Listen", "object": {"type": "Track"}},
context={
"track": instance.track,
"actor": instance.actor,
"id": instance.fid,
},
)
record.send(serializer.instance)
return r
def get_queryset(self):
queryset = super().get_queryset()
queryset = queryset.filter(
fields.privacy_level_query(
self.request.user, "actor__user__privacy_level", "actor__user"
)
fields.privacy_level_query(self.request.user, "user__privacy_level")
)
tracks = (
Track.objects.with_playable_uploads(
music_utils.get_actor_from_request(self.request)
)
.prefetch_related(
"artist_credit",
"album__artist_credit__artist",
"artist_credit__artist__attachment_cover",
)
.select_related("attributed_to")
tracks = Track.objects.with_playable_uploads(
music_utils.get_actor_from_request(self.request)
).select_related(
"artist", "album__artist", "attributed_to", "artist__attachment_cover"
)
return queryset.prefetch_related(Prefetch("track", queryset=tracks))

View File

@ -37,7 +37,7 @@ def get_content():
def get_top_music_categories():
return (
models.Track.objects.filter(artist_credit__artist__content_category="music")
models.Track.objects.filter(artist__content_category="music")
.exclude(tagged_items__tag_id=None)
.values(name=F("tagged_items__tag__name"))
.annotate(count=Count("name"))
@ -47,7 +47,7 @@ def get_top_music_categories():
def get_top_podcast_categories():
return (
models.Track.objects.filter(artist_credit__artist__content_category="podcast")
models.Track.objects.filter(artist__content_category="podcast")
.exclude(tagged_items__tag_id=None)
.values(name=F("tagged_items__tag__name"))
.annotate(count=Count("name"))

View File

@ -1,4 +1,4 @@
from django.urls import re_path
from django.conf.urls import url
from funkwhale_api.common import routers
@ -8,7 +8,7 @@ admin_router = routers.OptionalSlashRouter()
admin_router.register(r"admin/settings", views.AdminSettings, "admin-settings")
urlpatterns = [
re_path(r"^nodeinfo/2.0/?$", views.NodeInfo20.as_view(), name="nodeinfo-2.0"),
re_path(r"^settings/?$", views.InstanceSettings.as_view(), name="settings"),
re_path(r"^spa-manifest.json", views.SpaManifest.as_view(), name="spa-manifest"),
url(r"^nodeinfo/2.0/?$", views.NodeInfo20.as_view(), name="nodeinfo-2.0"),
url(r"^settings/?$", views.InstanceSettings.as_view(), name="settings"),
url(r"^spa-manifest.json", views.SpaManifest.as_view(), name="spa-manifest"),
] + admin_router.urls

View File

@ -1,14 +1,7 @@
from django.urls import re_path
from funkwhale_api.common import routers
from django.conf.urls import url
from . import views
admin_router = routers.OptionalSlashRouter()
admin_router.register(r"admin/settings", views.AdminSettings, "admin-settings")
urlpatterns = [
re_path(r"^nodeinfo/2.1/?$", views.NodeInfo21.as_view(), name="nodeinfo-2.1"),
re_path(r"^settings/?$", views.InstanceSettings.as_view(), name="settings"),
re_path(r"^spa-manifest.json", views.SpaManifest.as_view(), name="spa-manifest"),
] + admin_router.urls
url(r"^nodeinfo/2.1/?$", views.NodeInfo21.as_view(), name="nodeinfo-2.1"),
]

View File

@ -126,7 +126,7 @@ class NodeInfo21(NodeInfo20):
serializer_class = serializers.NodeInfo21Serializer
@extend_schema(
responses=serializers.NodeInfo21Serializer, operation_id="getNodeInfo21"
responses=serializers.NodeInfo20Serializer, operation_id="getNodeInfo20"
)
def get(self, request):
pref = preferences.all()
@ -171,9 +171,6 @@ class NodeInfo21(NodeInfo20):
if pref.get("federation__enabled"):
data["features"].append("federation")
if pref.get("music__only_allow_musicbrainz_tagged_files"):
data["features"].append("onlyMbidTaggedContent")
serializer = self.serializer_class(data)
return Response(
serializer.data, status=200, content_type=NODEINFO_2_CONTENT_TYPE

Some files were not shown because too many files have changed in this diff Show More