Merge branch 'develop' into stable
This commit is contained in:
commit
98d4f4ef54
2
.env.dev
2
.env.dev
|
@ -12,6 +12,7 @@ FORWARDED_PROTO=http
|
|||
LDAP_ENABLED=False
|
||||
FUNKWHALE_SPA_HTML_ROOT=http://nginx/
|
||||
PYTHONTRACEMALLOC=0
|
||||
MEDIA_ROOT=/data/media
|
||||
|
||||
# Uncomment this if you're using traefik/https
|
||||
# FORCE_HTTPS_URLS=True
|
||||
|
@ -19,3 +20,4 @@ PYTHONTRACEMALLOC=0
|
|||
# Customize to your needs
|
||||
POSTGRES_VERSION=11
|
||||
DEBUG=true
|
||||
TYPESENSE_API_KEY="apikey"
|
||||
|
|
|
@ -92,7 +92,15 @@ docs/_build
|
|||
po/*.po
|
||||
docs/swagger
|
||||
_build
|
||||
front/src/translations.json
|
||||
front/src/translations/*.json
|
||||
front/locales/en_US/LC_MESSAGES/app.po
|
||||
*.prof
|
||||
|
||||
# Docker
|
||||
docker-bake.*.json
|
||||
metadata.json
|
||||
|
||||
# Linting
|
||||
.eslintcache
|
||||
tsconfig.tsbuildinfo
|
||||
|
||||
# Vscode
|
||||
.vscode/
|
||||
|
|
557
.gitlab-ci.yml
557
.gitlab-ci.yml
|
@ -4,11 +4,15 @@ include:
|
|||
file: /templates/pre-commit.yml
|
||||
- project: funkwhale/ci
|
||||
file: /templates/lychee.yml
|
||||
- project: funkwhale/ci
|
||||
file: /templates/ssh-agent.yml
|
||||
|
||||
variables:
|
||||
PYTHONDONTWRITEBYTECODE: "true"
|
||||
|
||||
PIP_CACHE_DIR: $CI_PROJECT_DIR/.cache/pip
|
||||
YARN_CACHE_FOLDER: $CI_PROJECT_DIR/.cache/yarn
|
||||
POETRY_VIRTUALENVS_IN_PROJECT: "true"
|
||||
|
||||
.shared_variables:
|
||||
# Keep the git files permissions during job setup
|
||||
|
@ -17,7 +21,42 @@ variables:
|
|||
GIT_DEPTH: "5"
|
||||
FF_DISABLE_UMASK_FOR_DOCKER_EXECUTOR: "true"
|
||||
|
||||
.shared_caches:
|
||||
# Cache for front related jobs
|
||||
front_cache: &front_cache
|
||||
- key: front-yarn
|
||||
paths: [$YARN_CACHE_FOLDER]
|
||||
- key:
|
||||
prefix: front-node_modules
|
||||
files: [front/yarn.lock]
|
||||
paths: [front/node_modules]
|
||||
- key:
|
||||
prefix: front-lint
|
||||
files:
|
||||
- front/.eslintcache
|
||||
- front/tsconfig.tsbuildinfo
|
||||
|
||||
# Cache for api related jobs
|
||||
# Include the python version to prevent loosing caches in the test matrix
|
||||
api_cache: &api_cache
|
||||
- key: api-pip-$PYTHON_VERSION
|
||||
paths: [$PIP_CACHE_DIR]
|
||||
- key:
|
||||
prefix: api-venv-$PYTHON_VERSION
|
||||
files: [api/poetry.lock]
|
||||
paths: [api/.venv]
|
||||
|
||||
# Cache for docs related jobs
|
||||
docs_cache: &docs_cache
|
||||
- key: docs-pip
|
||||
paths: [$PIP_CACHE_DIR]
|
||||
- key:
|
||||
prefix: docs-venv
|
||||
files: [docs/poetry.lock]
|
||||
paths: [docs/.venv]
|
||||
|
||||
default:
|
||||
interruptible: true
|
||||
tags:
|
||||
- docker
|
||||
|
||||
|
@ -31,7 +70,7 @@ workflow:
|
|||
$CI_COMMIT_TAG
|
||||
)
|
||||
# Run for merge requests from any repo or branches
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
- if: $CI_MERGE_REQUEST_ID
|
||||
|
||||
stages:
|
||||
- review
|
||||
|
@ -41,84 +80,64 @@ stages:
|
|||
- publish
|
||||
|
||||
review_front:
|
||||
interruptible: true
|
||||
stage: review
|
||||
image: node:18-alpine
|
||||
when: manual
|
||||
allow_failure: true
|
||||
stage: review
|
||||
needs: []
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
when: manual
|
||||
|
||||
image: $CI_REGISTRY/funkwhale/ci/node-python:18
|
||||
variables:
|
||||
BASE_URL: /-/$CI_PROJECT_NAME/-/jobs/$CI_JOB_ID/artifacts/front-review/
|
||||
VUE_APP_ROUTER_BASE_URL: /-/$CI_PROJECT_NAME/-/jobs/$CI_JOB_ID/artifacts/front-review/
|
||||
VUE_APP_INSTANCE_URL: https://demo.funkwhale.audio
|
||||
NODE_ENV: review
|
||||
NODE_OPTIONS: --max-old-space-size=4096
|
||||
environment:
|
||||
name: review/front/$CI_COMMIT_REF_NAME
|
||||
url: http://$CI_PROJECT_NAMESPACE.pages.funkwhale.audio/-/$CI_PROJECT_NAME/-/jobs/$CI_JOB_ID/artifacts/front-review/index.html
|
||||
cache: *front_cache
|
||||
before_script:
|
||||
- apk add --no-cache jq bash coreutils python3
|
||||
- rm -rf front-review
|
||||
- mkdir front-review
|
||||
- cd front
|
||||
- yarn install --frozen-lockfile
|
||||
script:
|
||||
- yarn install
|
||||
# this is to ensure we don't have any errors in the output,
|
||||
# cf https://dev.funkwhale.audio/funkwhale/funkwhale/issues/169
|
||||
- yarn run build --base ./ | tee /dev/stderr | (! grep -i 'ERROR in')
|
||||
- yarn run build --base ./
|
||||
- cp -r dist/* ../front-review
|
||||
artifacts:
|
||||
expire_in: 2 weeks
|
||||
paths:
|
||||
- front-review
|
||||
cache:
|
||||
key: "funkwhale__front_dependencies"
|
||||
paths:
|
||||
- front/node_modules
|
||||
- front/yarn.lock
|
||||
environment:
|
||||
name: review/front/$CI_COMMIT_REF_NAME
|
||||
url: http://$CI_PROJECT_NAMESPACE.pages.funkwhale.audio/-/$CI_PROJECT_NAME/-/jobs/$CI_JOB_ID/artifacts/front-review/index.html
|
||||
|
||||
review_docs:
|
||||
interruptible: true
|
||||
stage: review
|
||||
allow_failure: true
|
||||
image: python:3.11
|
||||
variables:
|
||||
BUILD_PATH: "../docs-review"
|
||||
stage: review
|
||||
needs: []
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
changes: [docs/**/*]
|
||||
|
||||
image: $CI_REGISTRY/funkwhale/ci/python-funkwhale-docs:3.11
|
||||
environment:
|
||||
name: review/docs/$CI_COMMIT_REF_NAME
|
||||
url: http://$CI_PROJECT_NAMESPACE.pages.funkwhale.audio/-/$CI_PROJECT_NAME/-/jobs/$CI_JOB_ID/artifacts/docs-review/index.html
|
||||
cache: *docs_cache
|
||||
before_script:
|
||||
- rm -rf docs-review
|
||||
- mkdir docs-review
|
||||
- cd docs
|
||||
- apt-get update
|
||||
- apt-get install -y graphviz git
|
||||
- pip install poetry
|
||||
- poetry install
|
||||
- git switch develop && git pull
|
||||
- git switch stable && git pull
|
||||
- git switch $CI_COMMIT_BRANCH && git pull
|
||||
- make install
|
||||
script:
|
||||
- poetry run python3 -m sphinx . $BUILD_PATH
|
||||
cache:
|
||||
key: "$CI_PROJECT_ID__sphinx"
|
||||
paths:
|
||||
- "$PIP_CACHE_DIR"
|
||||
- make build BUILD_DIR=../docs-review
|
||||
artifacts:
|
||||
expire_in: 2 weeks
|
||||
paths:
|
||||
- docs-review
|
||||
environment:
|
||||
name: review/docs/$CI_COMMIT_REF_NAME
|
||||
url: http://$CI_PROJECT_NAMESPACE.pages.funkwhale.audio/-/$CI_PROJECT_NAME/-/jobs/$CI_JOB_ID/artifacts/docs-review/index.html
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
changes:
|
||||
- docs/**/*
|
||||
- if: $CI_PIPELINE_SOURCE != "merge_request_event" && $CI_COMMIT_REF_NAME != $CI_DEFAULT_BRANCH
|
||||
when: manual
|
||||
|
||||
find_broken_links:
|
||||
extends: [.lychee]
|
||||
allow_failure:
|
||||
exit_codes: 2
|
||||
|
||||
extends: [.lychee]
|
||||
script:
|
||||
- >
|
||||
lychee
|
||||
|
@ -131,56 +150,96 @@ find_broken_links:
|
|||
--exclude-path 'docs/_templates/'
|
||||
-- . || exit $?
|
||||
|
||||
changelog_snippet:
|
||||
interruptible: true
|
||||
image: alpine:3.17
|
||||
require_changelog:
|
||||
stage: lint
|
||||
before_script:
|
||||
- apk add git
|
||||
- git fetch origin $CI_MERGE_REQUEST_TARGET_BRANCH_NAME
|
||||
script:
|
||||
- git diff --name-only FETCH_HEAD | grep "changes/changelog.d/*"
|
||||
rules:
|
||||
- if: $CI_COMMIT_AUTHOR == 'Renovate Bot <bot@dev.funkwhale.audio>'
|
||||
# Don't run on merge request that mention NOCHANGELOG or renovate bot commits
|
||||
- if: >
|
||||
$CI_MERGE_REQUEST_TITLE =~ /NOCHANGELOG/ ||
|
||||
$CI_COMMIT_AUTHOR == "Renovate Bot <bot@dev.funkwhale.audio>"
|
||||
when: never
|
||||
- if: $CI_MERGE_REQUEST_TITLE =~ /NOCHANGELOG/
|
||||
when: never
|
||||
- if: $CI_PIPELINE_SOURCE == 'merge_request_event'
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
|
||||
image: python:3.11
|
||||
script:
|
||||
- git diff --name-only $CI_MERGE_REQUEST_DIFF_BASE_SHA..$CI_COMMIT_SHA | grep "changes/changelog.d/*"
|
||||
|
||||
pre-commit:
|
||||
extends: [.pre-commit]
|
||||
|
||||
eslint:
|
||||
interruptible: true
|
||||
image: node:18-alpine
|
||||
lint_api:
|
||||
allow_failure: true
|
||||
stage: lint
|
||||
needs: []
|
||||
rules:
|
||||
- if: $CI_COMMIT_BRANCH =~ /(stable|develop)/
|
||||
- changes: [api/**/*]
|
||||
|
||||
image: $CI_REGISTRY/funkwhale/ci/python-funkwhale-api:3.11
|
||||
before_script:
|
||||
- cd api
|
||||
- make install
|
||||
script:
|
||||
- make lint
|
||||
|
||||
lint_front:
|
||||
stage: lint
|
||||
needs: []
|
||||
rules:
|
||||
- if: $CI_COMMIT_BRANCH =~ /(stable|develop)/
|
||||
- changes: [front/**/*]
|
||||
|
||||
image: $CI_REGISTRY/funkwhale/ci/node-python:18
|
||||
cache: *front_cache
|
||||
before_script:
|
||||
- cd front
|
||||
- apk add --no-cache jq bash coreutils python3
|
||||
- yarn install
|
||||
- yarn install --frozen-lockfile
|
||||
script:
|
||||
- yarn lint --max-warnings 0
|
||||
- yarn lint:tsc
|
||||
cache:
|
||||
key: "$CI_PROJECT_ID__eslint_npm_cache"
|
||||
paths:
|
||||
- front/node_modules
|
||||
|
||||
test_scripts:
|
||||
stage: test
|
||||
needs: []
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
changes:
|
||||
- front/**/*
|
||||
- if: $CI_COMMIT_BRANCH =~ /(stable|develop)/
|
||||
- changes: [scripts/**/*]
|
||||
|
||||
image: $CI_REGISTRY/funkwhale/ci/python:3.11
|
||||
cache:
|
||||
- key: scripts-pip
|
||||
paths: [$PIP_CACHE_DIR]
|
||||
- key:
|
||||
prefix: scripts-venv
|
||||
files: [scripts/poetry.lock]
|
||||
paths: [scripts/.venv]
|
||||
before_script:
|
||||
- cd scripts
|
||||
- make install
|
||||
script:
|
||||
- make test
|
||||
|
||||
test_api:
|
||||
interruptible: true
|
||||
services:
|
||||
- postgres:15-alpine
|
||||
- redis:7-alpine
|
||||
stage: test
|
||||
retry: 1
|
||||
cache:
|
||||
key: "$CI_PROJECT_ID__pip_cache"
|
||||
paths:
|
||||
- "$PIP_CACHE_DIR"
|
||||
stage: test
|
||||
needs:
|
||||
- job: lint_api
|
||||
rules:
|
||||
- if: $CI_COMMIT_BRANCH =~ /(stable|develop)/
|
||||
- changes: [api/**/*]
|
||||
|
||||
image: $CI_REGISTRY/funkwhale/ci/python-funkwhale-api:$PYTHON_VERSION
|
||||
parallel:
|
||||
matrix:
|
||||
- PYTHON_VERSION: ["3.8", "3.9", "3.10", "3.11"]
|
||||
services:
|
||||
- name: postgres:15-alpine
|
||||
command:
|
||||
- --fsync=off
|
||||
- --full_page_writes=off
|
||||
- --synchronous_commit=off
|
||||
- name: redis:7-alpine
|
||||
cache: *api_cache
|
||||
variables:
|
||||
DATABASE_URL: "postgresql://postgres@postgres/postgres"
|
||||
FUNKWHALE_URL: "https://funkwhale.ci"
|
||||
|
@ -189,9 +248,16 @@ test_api:
|
|||
CACHE_URL: "redis://redis:6379/0"
|
||||
before_script:
|
||||
- cd api
|
||||
- poetry install --no-root
|
||||
- poetry install --all-extras
|
||||
script:
|
||||
- poetry run pytest --cov-report xml --cov-report term-missing:skip-covered --cov=funkwhale_api --junitxml=report.xml tests/
|
||||
- >
|
||||
poetry run pytest
|
||||
--junitxml=report.xml
|
||||
--cov
|
||||
--cov-config=pyproject.toml
|
||||
--cov-report=term-missing:skip-covered
|
||||
--cov-report=xml
|
||||
tests
|
||||
artifacts:
|
||||
expire_in: 2 weeks
|
||||
reports:
|
||||
|
@ -199,60 +265,83 @@ test_api:
|
|||
coverage_report:
|
||||
coverage_format: cobertura
|
||||
path: api/coverage.xml
|
||||
parallel:
|
||||
matrix:
|
||||
- PY_VER: ["3.7", "3.8", "3.9", "3.10", "3.11"]
|
||||
image: $CI_REGISTRY/funkwhale/backend-test-docker:$PY_VER
|
||||
coverage: '/TOTAL\s*\d*\s*\d*\s*(\d*%)/'
|
||||
|
||||
test_front:
|
||||
stage: test
|
||||
needs:
|
||||
- job: lint_front
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event" || $CI_PIPELINE_SOURCE == "push"
|
||||
changes:
|
||||
- api/**/*
|
||||
- if: $CI_COMMIT_REF_PROTECTED == "true"
|
||||
when: always
|
||||
- if: $CI_COMMIT_BRANCH =~ /(stable|develop)/
|
||||
- changes: [front/**/*]
|
||||
|
||||
# Those tests are disabled for now since no vitest dom emulation is providing
|
||||
# AudioContext, which is required for our HTML audio player
|
||||
#test_front:
|
||||
# interruptible: true
|
||||
# stage: test
|
||||
# image: node:18-alpine
|
||||
# before_script:
|
||||
# - cd front
|
||||
# - apk add --no-cache jq bash coreutils python3
|
||||
# script:
|
||||
# - yarn install --check-files
|
||||
# - yarn test:unit
|
||||
# cache:
|
||||
# key: "funkwhale__front_dependencies"
|
||||
# paths:
|
||||
# - front/node_modules
|
||||
# - front/yarn.lock
|
||||
# artifacts:
|
||||
# name: "front_${CI_COMMIT_REF_NAME}"
|
||||
# paths:
|
||||
# - front/dist/
|
||||
# reports:
|
||||
# junit: front/coverage/cobertura-coverage.xml
|
||||
# tags:
|
||||
# - docker
|
||||
# rules:
|
||||
# - if: $CI_PIPELINE_SOURCE == "merge_request_event" || $CI_PIPELINE_SOURCE == "push"
|
||||
# changes:
|
||||
# - front/**/*
|
||||
# - if: $CI_COMMIT_REF_PROTECTED == "true"
|
||||
# when: always
|
||||
image: $CI_REGISTRY/funkwhale/ci/node-python:18
|
||||
cache: *front_cache
|
||||
before_script:
|
||||
- cd front
|
||||
- yarn install --frozen-lockfile
|
||||
script:
|
||||
- yarn test:unit
|
||||
artifacts:
|
||||
reports:
|
||||
junit: front/test_results.xml
|
||||
coverage_report:
|
||||
coverage_format: cobertura
|
||||
path: front/coverage/cobertura-coverage.xml
|
||||
|
||||
build_openapi_schema:
|
||||
build_metadata:
|
||||
stage: build
|
||||
image: $CI_REGISTRY/funkwhale/backend-test-docker:3.11
|
||||
|
||||
image: $CI_REGISTRY/funkwhale/ci/python:3.11
|
||||
variables:
|
||||
GIT_FETCH_EXTRA_FLAGS: --prune
|
||||
script:
|
||||
- make build-metadata
|
||||
- make docker-metadata
|
||||
artifacts:
|
||||
reports:
|
||||
dotenv: build_metadata.env
|
||||
paths:
|
||||
- docker-bake.json
|
||||
- docker-bake.api.json
|
||||
- docker-bake.front.json
|
||||
|
||||
test_integration:
|
||||
stage: test
|
||||
rules:
|
||||
- if: $RUN_CYPRESS
|
||||
interruptible: true
|
||||
|
||||
image:
|
||||
name: cypress/included:12.14.0
|
||||
entrypoint: [""]
|
||||
cache:
|
||||
- *front_cache
|
||||
- key:
|
||||
paths:
|
||||
- /root/.cache/Cypress
|
||||
before_script:
|
||||
- cd front
|
||||
- yarn install
|
||||
script:
|
||||
- yarn run cypress run
|
||||
|
||||
build_api_schema:
|
||||
stage: build
|
||||
needs:
|
||||
- job: test_api
|
||||
optional: true
|
||||
rules:
|
||||
- if: $CI_COMMIT_BRANCH =~ /(stable|develop)/
|
||||
- changes: [api/**/*]
|
||||
# Add build_docs rules because it depends on the build_api_schema artifact
|
||||
- changes: [docs/**/*]
|
||||
|
||||
image: $CI_REGISTRY/funkwhale/ci/python-funkwhale-api:3.11
|
||||
services:
|
||||
- postgres:15-alpine
|
||||
- redis:7-alpine
|
||||
cache:
|
||||
key: "$CI_PROJECT_ID__pip_cache"
|
||||
paths:
|
||||
- "$PIP_CACHE_DIR"
|
||||
cache: *api_cache
|
||||
variables:
|
||||
DATABASE_URL: "postgresql://postgres@postgres/postgres"
|
||||
FUNKWHALE_URL: "https://funkwhale.ci"
|
||||
|
@ -262,8 +351,7 @@ build_openapi_schema:
|
|||
API_TYPE: "v1"
|
||||
before_script:
|
||||
- cd api
|
||||
- pip3 install poetry
|
||||
- poetry install
|
||||
- poetry install --all-extras
|
||||
- poetry run funkwhale-manage migrate
|
||||
script:
|
||||
- poetry run funkwhale-manage spectacular --file ../docs/schema.yml
|
||||
|
@ -272,162 +360,151 @@ build_openapi_schema:
|
|||
paths:
|
||||
- docs/schema.yml
|
||||
|
||||
build_documentation:
|
||||
build_docs:
|
||||
stage: build
|
||||
image: python:3.11
|
||||
needs:
|
||||
- job: build_openapi_schema
|
||||
- job: build_api_schema
|
||||
artifacts: true
|
||||
variables:
|
||||
BUILD_PATH: "../public"
|
||||
GIT_STRATEGY: clone
|
||||
GIT_DEPTH: 0
|
||||
rules:
|
||||
- if: $CI_COMMIT_BRANCH =~ /(stable|develop)/
|
||||
- changes: [docs/**/*]
|
||||
|
||||
image: $CI_REGISTRY/funkwhale/ci/python-funkwhale-docs:3.11
|
||||
cache: *docs_cache
|
||||
before_script:
|
||||
- cd docs
|
||||
- apt-get update
|
||||
- apt-get install -y graphviz
|
||||
- pip install poetry
|
||||
- poetry install
|
||||
- git branch stable --track origin/stable || true
|
||||
- git branch develop --track origin/develop || true
|
||||
- make install
|
||||
script:
|
||||
- ./build_docs.sh
|
||||
cache:
|
||||
key: "$CI_PROJECT_ID__sphinx"
|
||||
paths:
|
||||
- "$PIP_CACHE_DIR"
|
||||
- make build-all BUILD_DIR=../public
|
||||
artifacts:
|
||||
expire_in: 2 weeks
|
||||
paths:
|
||||
- public
|
||||
rules:
|
||||
- if: $CI_COMMIT_BRANCH == "stable" || $CI_COMMIT_BRANCH == "develop"
|
||||
when: always
|
||||
- changes:
|
||||
- docs/**/*
|
||||
when: always
|
||||
|
||||
build_front:
|
||||
stage: build
|
||||
image: node:18-alpine
|
||||
needs:
|
||||
# The test_front job is currently disabled
|
||||
# - job: test_front
|
||||
- job: lint_front
|
||||
optional: true
|
||||
rules:
|
||||
- if: $CI_COMMIT_BRANCH =~ /(stable|develop)/
|
||||
- changes: [front/**/*]
|
||||
|
||||
image: $CI_REGISTRY/funkwhale/ci/node-python:18
|
||||
variables:
|
||||
<<: *keep_git_files_permissions
|
||||
NODE_OPTIONS: --max-old-space-size=4096
|
||||
cache: *front_cache
|
||||
before_script:
|
||||
- apk add --no-cache jq bash coreutils python3
|
||||
- cd front
|
||||
- yarn install --frozen-lockfile
|
||||
script:
|
||||
- yarn install
|
||||
# this is to ensure we don't have any errors in the output,
|
||||
# cf https://dev.funkwhale.audio/funkwhale/funkwhale/issues/169
|
||||
- yarn run build:deployment | tee /dev/stderr | (! grep -i 'ERROR in')
|
||||
- yarn run build:deployment
|
||||
artifacts:
|
||||
name: front_${CI_COMMIT_REF_NAME}
|
||||
paths:
|
||||
- front/dist/
|
||||
only:
|
||||
- tags@funkwhale/funkwhale
|
||||
- stable@funkwhale/funkwhale
|
||||
- develop@funkwhale/funkwhale
|
||||
- front/dist
|
||||
|
||||
build_api:
|
||||
stage: build
|
||||
image: bash
|
||||
needs:
|
||||
- job: test_api
|
||||
optional: true
|
||||
rules:
|
||||
- if: $CI_COMMIT_BRANCH =~ /(stable|develop)/
|
||||
- changes: [api/**/*]
|
||||
|
||||
image: $CI_REGISTRY/funkwhale/ci/python:3.11
|
||||
variables:
|
||||
<<: *keep_git_files_permissions
|
||||
script:
|
||||
- rm -rf api/tests
|
||||
- >
|
||||
if [ "$CI_COMMIT_REF_NAME" == "develop" ] || [ "$CI_COMMIT_REF_NAME" == "stable" ]; then
|
||||
if [[ -z "$CI_COMMIT_TAG" ]]; then
|
||||
./scripts/set-api-build-metadata.sh $CI_COMMIT_SHORT_SHA;
|
||||
fi
|
||||
artifacts:
|
||||
name: api_${CI_COMMIT_REF_NAME}
|
||||
paths:
|
||||
- api
|
||||
only:
|
||||
- tags@funkwhale/funkwhale
|
||||
- stable@funkwhale/funkwhale
|
||||
- develop@funkwhale/funkwhale
|
||||
|
||||
deploy_documentation:
|
||||
deploy_docs:
|
||||
interruptible: false
|
||||
extends: .ssh-agent
|
||||
stage: publish
|
||||
image: alpine
|
||||
needs:
|
||||
- job: build_documentation
|
||||
- job: build_docs
|
||||
artifacts: true
|
||||
before_script:
|
||||
- apk add openssh-client rsync
|
||||
- mkdir -p ~/.ssh
|
||||
- echo "$SSH_KNOWN_HOSTS" >> ~/.ssh/known_hosts
|
||||
- chmod 644 ~/.ssh/known_hosts
|
||||
- eval `ssh-agent -s`
|
||||
- echo "$SSH_PRIVATE_KEY" | tr -d '\r' | ssh-add -
|
||||
script:
|
||||
- rsync -r --delete -e "ssh -p 2282" $CI_PROJECT_DIR/public/ docs@docs.funkwhale.audio:/htdocs/$CI_COMMIT_REF_NAME
|
||||
rules:
|
||||
- if: $CI_COMMIT_BRANCH =~ /(stable|develop)/
|
||||
|
||||
.docker_publish:
|
||||
stage: publish
|
||||
image: egon0/docker-with-buildx-and-git:bash
|
||||
parallel:
|
||||
matrix:
|
||||
- COMPONENT: ["api", "front"]
|
||||
image: $CI_REGISTRY/funkwhale/ci/python:3.11
|
||||
variables:
|
||||
GIT_STRATEGY: none
|
||||
script:
|
||||
- rsync -r --delete -e "ssh -p 2282" $CI_PROJECT_DIR/public/ docs@docs.funkwhale.audio:/htdocs/$CI_COMMIT_REF_NAME
|
||||
|
||||
docker:
|
||||
interruptible: false
|
||||
tags: [docker, privileged, multiarch]
|
||||
stage: build
|
||||
needs:
|
||||
- job: build_metadata
|
||||
artifacts: true
|
||||
- job: test_api
|
||||
optional: true
|
||||
- job: test_front
|
||||
optional: true
|
||||
rules:
|
||||
- if: $CI_COMMIT_TAG
|
||||
variables:
|
||||
BUILD_ARGS: >
|
||||
--set *.platform=linux/amd64,linux/arm64,linux/arm/v7
|
||||
--no-cache
|
||||
--push
|
||||
|
||||
- if: $CI_COMMIT_BRANCH =~ /(stable|develop)/
|
||||
variables:
|
||||
BUILD_ARGS: >
|
||||
--set *.platform=linux/amd64,linux/arm64,linux/arm/v7
|
||||
--set *.cache-from=type=registry,ref=$DOCKER_CACHE_IMAGE:$CI_COMMIT_BRANCH,oci-mediatypes=false
|
||||
--set *.cache-to=type=registry,ref=$DOCKER_CACHE_IMAGE:$CI_COMMIT_BRANCH,mode=max,oci-mediatypes=false
|
||||
--push
|
||||
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
variables:
|
||||
BUILD_ARGS: >
|
||||
--set *.platform=linux/amd64
|
||||
--set *.cache-from=type=registry,ref=$DOCKER_CACHE_IMAGE:$CI_MERGE_REQUEST_TARGET_BRANCH_NAME,oci-mediatypes=false
|
||||
|
||||
image: $CI_REGISTRY/funkwhale/ci/docker:20
|
||||
services:
|
||||
- docker:20-dind
|
||||
variables:
|
||||
<<: *keep_git_files_permissions
|
||||
|
||||
IMAGE_NAME: funkwhale/$COMPONENT
|
||||
IMAGE: $IMAGE_NAME:$CI_COMMIT_REF_NAME
|
||||
IMAGE_LATEST: $IMAGE_NAME:latest
|
||||
|
||||
DOCKER_HOST: tcp://docker:2375/
|
||||
DOCKER_DRIVER: overlay2
|
||||
DOCKER_TLS_CERTDIR: ""
|
||||
BUILD_PLATFORMS: linux/amd64,linux/arm64,linux/arm/v7
|
||||
tags:
|
||||
- multiarch
|
||||
services:
|
||||
- docker:20-dind
|
||||
BUILDKIT_PROGRESS: plain
|
||||
|
||||
DOCKER_CACHE_IMAGE: $CI_REGISTRY/funkwhale/funkwhale/cache
|
||||
before_script:
|
||||
- docker login -u $DOCKER_LOGIN -p $DOCKER_PASSWORD
|
||||
cache:
|
||||
key: docker_public_${CI_COMMIT_REF_NAME}
|
||||
- >
|
||||
echo "$CI_REGISTRY_PASSWORD" | docker login --username "$CI_REGISTRY_USER" --password-stdin "$CI_REGISTRY";
|
||||
if [[ "$BUILD_ARGS" =~ "--push" ]]; then
|
||||
echo "$DOCKER_PASSWORD" | docker login --username "$DOCKER_LOGIN" --password-stdin docker.io;
|
||||
fi
|
||||
script:
|
||||
- >
|
||||
if [[ -z "$CI_COMMIT_TAG" ]]; then
|
||||
./scripts/set-api-build-metadata.sh $CI_COMMIT_SHORT_SHA;
|
||||
fi
|
||||
- docker buildx create --use
|
||||
- make docker-build BUILD_ARGS="--metadata-file metadata.json $BUILD_ARGS"
|
||||
- cat metadata.json
|
||||
artifacts:
|
||||
name: docker_metadata_${CI_COMMIT_REF_NAME}
|
||||
paths:
|
||||
- ~/.cargo
|
||||
|
||||
docker_publish_stable_release:
|
||||
# Publish a docker image for releases
|
||||
extends: .docker_publish
|
||||
rules:
|
||||
- if: $CI_COMMIT_TAG && $CI_COMMIT_REF_NAME =~ /^[0-9]+(.[0-9]+){1,2}$/
|
||||
script:
|
||||
# Check if this is the latest release
|
||||
- ./docs/get-releases-json.py | scripts/is-docker-latest.py $CI_COMMIT_TAG - && export DOCKER_LATEST_TAG="-t $IMAGE_LATEST" || export DOCKER_LATEST_TAG=;
|
||||
- export major="$(echo $CI_COMMIT_REF_NAME | cut -d '.' -f 1)"
|
||||
- export minor="$(echo $CI_COMMIT_REF_NAME | cut -d '.' -f 1,2)"
|
||||
- cd $COMPONENT
|
||||
- docker buildx create --use --name A$CI_COMMIT_SHORT_SHA
|
||||
- docker buildx build --platform $BUILD_PLATFORMS --push -t $IMAGE $DOCKER_LATEST_TAG -t $IMAGE_NAME:$major -t $IMAGE_NAME:$minor .
|
||||
|
||||
docker_publish_unstable_release:
|
||||
# Publish a docker image for releases
|
||||
extends: .docker_publish
|
||||
rules:
|
||||
- if: $CI_COMMIT_TAG && $CI_COMMIT_REF_NAME !~ /^[0-9]+(.[0-9]+){1,2}$/
|
||||
script:
|
||||
# Check if this is the latest release
|
||||
- cd $COMPONENT
|
||||
- docker buildx create --use --name A$CI_COMMIT_SHORT_SHA
|
||||
- docker buildx build --platform $BUILD_PLATFORMS --push -t $IMAGE .
|
||||
|
||||
docker_publish_non-release:
|
||||
# Publish a docker image for each commit on develop
|
||||
extends: .docker_publish
|
||||
only:
|
||||
- develop@funkwhale/funkwhale
|
||||
- stable@funkwhale/funkwhale
|
||||
script:
|
||||
- cd $COMPONENT
|
||||
- docker buildx create --use --name A$CI_COMMIT_SHORT_SHA
|
||||
- docker buildx build --platform $BUILD_PLATFORMS --push -t $IMAGE .
|
||||
- metadata.json
|
||||
|
|
|
@ -35,12 +35,6 @@
|
|||
"matchBaseBranches": ["stable"],
|
||||
"enabled": false
|
||||
},
|
||||
{
|
||||
"matchUpdateTypes": ["patch", "pin", "digest"],
|
||||
"matchBaseBranches": ["develop"],
|
||||
"automerge": true,
|
||||
"automergeType": "branch"
|
||||
},
|
||||
{
|
||||
"matchManagers": ["npm"],
|
||||
"addLabels": ["Area::Frontend"]
|
||||
|
@ -70,6 +64,10 @@
|
|||
],
|
||||
"fileFilters": ["changes/changelog.d/postgres.update"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"matchPackageNames": ["python"],
|
||||
"rangeStrategy": "widen"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
36
.gitpod.yml
36
.gitpod.yml
|
@ -19,6 +19,7 @@ tasks:
|
|||
gp ports await 5432
|
||||
|
||||
poetry run funkwhale-manage migrate
|
||||
poetry run funkwhale-manage fw users create --superuser --username gitpod --password funkwhale --email test@example.org
|
||||
poetry run funkwhale-manage gitpod init
|
||||
command: |
|
||||
echo "MEDIA_URL=`gp url 8000`/media/" >> ../.gitpod/.env
|
||||
|
@ -47,49 +48,66 @@ tasks:
|
|||
yarn install
|
||||
command: yarn dev --host 0.0.0.0 --base ./
|
||||
|
||||
- name: Documentation
|
||||
before: cd docs
|
||||
init: make install
|
||||
command: make dev
|
||||
|
||||
- name: Welcome to Funkwhale development!
|
||||
env:
|
||||
COMPOSE_FILE: /workspace/funkwhale/.gitpod/docker-compose.yml
|
||||
ENV_FILE: /workspace/funkwhale/.gitpod/.env
|
||||
VUE_EDITOR: code
|
||||
DJANGO_SETTINGS_MODULE: config.settings.local
|
||||
init: pre-commit install
|
||||
init: |
|
||||
pre-commit install
|
||||
pre-commit run --all
|
||||
command: |
|
||||
pre-commit run --all && clear
|
||||
echo ""
|
||||
echo -e " ⠀⠀⠸⣿⣷⣦⣄⣠⣶⣾⣿⠇⠀⠀ You can now start developing Funkwhale with gitpod!"
|
||||
echo -e " ⠀⠀⠀⠈⠉⠻⣿⣿⠟⠉⠁⠀⠀⠀"
|
||||
echo -e " \u1b[34m⣀⠀⢀⡀⢀⣀\u1b[0m⠹⠇\u1b[34m⣀⡀⢀⡀⠀⣀ \u1b[0mTo sign in to the superuser account,"
|
||||
echo -e " \u1b[34m⢻⣇⠘⣧⡈⠻⠶⠶⠟⢁⣾⠃⣸⡟ \u1b[0mplease use these credentials:"
|
||||
echo -e " \u1b[34m⠀⠻⣦⡈⠻⠶⣶⣶⠶⠟⢁⣴⠟⠀"
|
||||
echo -e " \u1b[34m⠀⠀⠈⠻⠷⣦⣤⣤⣴⠾⠟⠁⠀⠀ gitpod\u1b[0m:\u1b[34mgitpod"
|
||||
echo -e " \u1b[34m⠀⠀⠈⠻⠷⣦⣤⣤⣴⠾⠟⠁⠀⠀ gitpod\u1b[0m:\u1b[34mfunkwhale"
|
||||
echo ""
|
||||
|
||||
ports:
|
||||
- port: 8000
|
||||
- name: Funkwhale
|
||||
port: 8000
|
||||
visibility: public
|
||||
onOpen: notify
|
||||
|
||||
- port: 5000
|
||||
- name: Funkwhale API
|
||||
port: 5000
|
||||
visibility: private
|
||||
onOpen: ignore
|
||||
|
||||
- port: 5432
|
||||
- name: PostgreSQL
|
||||
port: 5432
|
||||
visibility: private
|
||||
onOpen: ignore
|
||||
|
||||
- port: 5678
|
||||
- name: Debugpy
|
||||
port: 5678
|
||||
visibility: private
|
||||
onOpen: ignore
|
||||
|
||||
- port: 6379
|
||||
- name: Redis
|
||||
port: 6379
|
||||
visibility: private
|
||||
onOpen: ignore
|
||||
|
||||
- port: 8080
|
||||
- name: Frontend
|
||||
port: 8080
|
||||
visibility: private
|
||||
onOpen: ignore
|
||||
|
||||
- name: Documentation
|
||||
port: 8001
|
||||
visibility: public
|
||||
onOpen: notify
|
||||
|
||||
vscode:
|
||||
extensions:
|
||||
- Vue.volar
|
||||
|
|
|
@ -1,9 +1,11 @@
|
|||
FROM gitpod/workspace-full:2022-11-15-17-00-18
|
||||
FROM gitpod/workspace-full:2023-10-25-20-43-33
|
||||
USER gitpod
|
||||
|
||||
RUN sudo apt update -y \
|
||||
&& sudo apt install libsasl2-dev libldap2-dev libssl-dev ffmpeg gettext -y
|
||||
|
||||
RUN pip install poetry pre-commit \
|
||||
RUN pyenv install 3.11 && pyenv global 3.11
|
||||
|
||||
RUN pip install poetry pre-commit jinja2 towncrier \
|
||||
&& poetry config virtualenvs.create true \
|
||||
&& poetry config virtualenvs.in-project true
|
||||
|
|
|
@ -18,7 +18,6 @@ services:
|
|||
- 6379:6379
|
||||
|
||||
nginx:
|
||||
command: /entrypoint.sh
|
||||
env_file:
|
||||
- ./.env
|
||||
image: nginx
|
||||
|
@ -29,15 +28,16 @@ services:
|
|||
environment:
|
||||
- "NGINX_MAX_BODY_SIZE=100M"
|
||||
- "FUNKWHALE_API_IP=host.docker.internal"
|
||||
- "FUNKWHALE_API_HOST=host.docker.internal"
|
||||
- "FUNKWHALE_API_PORT=5000"
|
||||
- "FUNKWHALE_FRONT_IP=host.docker.internal"
|
||||
- "FUNKWHALE_FRONT_PORT=8080"
|
||||
- "FUNKWHALE_HOSTNAME=${FUNKWHALE_HOSTNAME-host.docker.internal}"
|
||||
- "FUNKWHALE_PROTOCOL=https"
|
||||
volumes:
|
||||
- ../data/media:/protected/media:ro
|
||||
- ../data/media:/workspace/funkwhale/data/media:ro
|
||||
- ../data/music:/music:ro
|
||||
- ../data/staticfiles:/staticfiles:ro
|
||||
- ../data/staticfiles:/usr/share/nginx/html/staticfiles/:ro
|
||||
- ../deploy/funkwhale_proxy.conf:/etc/nginx/funkwhale_proxy.conf:ro
|
||||
- ../docker/nginx/conf.dev:/etc/nginx/nginx.conf.template:ro
|
||||
- ../docker/nginx/entrypoint.sh:/entrypoint.sh:ro
|
||||
- ../docker/nginx/conf.dev:/etc/nginx/templates/default.conf.template:ro
|
||||
- ../front:/frontend:ro
|
||||
|
|
|
@ -24,11 +24,22 @@ repos:
|
|||
- id: mixed-line-ending
|
||||
- id: trailing-whitespace
|
||||
|
||||
- repo: https://github.com/python-poetry/poetry
|
||||
rev: 1.5.1
|
||||
hooks:
|
||||
- id: poetry-check
|
||||
files: ^api/pyproject.toml$
|
||||
args: [--directory=api]
|
||||
|
||||
- id: poetry-lock
|
||||
files: ^api/pyproject.toml$
|
||||
args: [--directory=api, --check]
|
||||
|
||||
- repo: https://github.com/asottile/pyupgrade
|
||||
rev: v3.9.0
|
||||
hooks:
|
||||
- id: pyupgrade
|
||||
args: [--py37-plus]
|
||||
args: [--py38-plus]
|
||||
exclude: ^(api/.*/migrations/.*)
|
||||
|
||||
- repo: https://github.com/psf/black
|
||||
|
|
|
@ -1,36 +0,0 @@
|
|||
{
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"name": "Attach python debugger",
|
||||
"type": "python",
|
||||
"request": "attach",
|
||||
"connect": {
|
||||
"host": "localhost",
|
||||
"port": 5678
|
||||
},
|
||||
"django": true
|
||||
},
|
||||
{
|
||||
"name": "Debug python",
|
||||
"type": "python",
|
||||
"request": "launch",
|
||||
"module": "uvicorn",
|
||||
"cwd": "${workspaceFolder}/api",
|
||||
"envFile": "${workspaceFolder}/.gitpod/.env",
|
||||
"args": [
|
||||
"--reload",
|
||||
"config.asgi:application",
|
||||
"--host",
|
||||
"0.0.0.0",
|
||||
"--port",
|
||||
"5000",
|
||||
"--reload-dir",
|
||||
"config/",
|
||||
"--reload-dir",
|
||||
"funkwhale_api/"
|
||||
],
|
||||
"django": true
|
||||
}
|
||||
]
|
||||
}
|
|
@ -1,108 +0,0 @@
|
|||
{
|
||||
"Callout": {
|
||||
"prefix": "callout",
|
||||
"description": "Add a callout to highlight information",
|
||||
"body": [
|
||||
":::{${1|attention,caution,danger,error,hint,important,note,seealso,tip,warning|}} ${2:Optional title}",
|
||||
"${0:${TM_SELECTED_TEXT}}",
|
||||
":::"
|
||||
],
|
||||
},
|
||||
"Code tabs": {
|
||||
"prefix": "code-tabs",
|
||||
"description": "Insert a set of code tabs",
|
||||
"body": [
|
||||
":::{tab-set-code}",
|
||||
"",
|
||||
"$0",
|
||||
"",
|
||||
":::"
|
||||
]
|
||||
},
|
||||
"Tab set": {
|
||||
"prefix": "tab-set",
|
||||
"description": "Insert a group of generic tabs",
|
||||
"body": [
|
||||
"::::{tab-set}",
|
||||
":::{tab-item} ${1:Tab title}",
|
||||
"$2",
|
||||
":::",
|
||||
"",
|
||||
":::{tab-item} ${3:Tab title}",
|
||||
"$0",
|
||||
":::",
|
||||
"",
|
||||
"::::"
|
||||
]
|
||||
},
|
||||
"Insert fragment": {
|
||||
"prefix": "insert fragment",
|
||||
"description": "Insert reusable text from another file",
|
||||
"body": [
|
||||
":::{include} ${1:full path to file}",
|
||||
":start-after: ${2:the text to start after}",
|
||||
":end-before: ${0:the text to end before}",
|
||||
":::"
|
||||
]
|
||||
},
|
||||
"Dropdown": {
|
||||
"prefix": "insert dropdown",
|
||||
"description": "Insert a dropdown (accordion)",
|
||||
"body": [
|
||||
":::{dropdown} ${1:title}",
|
||||
"${0:${TM_SELECTED_TEXT}}",
|
||||
":::"
|
||||
]
|
||||
},
|
||||
"Versioning": {
|
||||
"prefix": "version change",
|
||||
"description": "Specify when a feature was added, changed, or deprecated",
|
||||
"body": [
|
||||
":::{${1|versionadded,versionchanged,deprecated|}} ${2:v4.32.0}",
|
||||
"${0:${TM_SELECTED_TEXT}}",
|
||||
":::"
|
||||
]
|
||||
},
|
||||
"List table": {
|
||||
"prefix": "list table",
|
||||
"description": "Insert a table defined as a set of lists",
|
||||
"body": [
|
||||
":::{list-table} ${1:Optional title}",
|
||||
":header-rows: ${2:Number of header rows}",
|
||||
"",
|
||||
"* - ${3: First row column 1}",
|
||||
" - ${4: First row column 2}",
|
||||
"* - ${5: Second row column 1}",
|
||||
" - ${0: Second row column 2}",
|
||||
":::"
|
||||
]
|
||||
},
|
||||
"Guilabel": {
|
||||
"prefix": "guilabel",
|
||||
"description": "Format text as a GUI label (e.g. a button label or interface label",
|
||||
"body": [
|
||||
"{guilabel}`${0:${TM_SELECTED_TEXT}}`"
|
||||
]
|
||||
},
|
||||
"File": {
|
||||
"prefix": "file",
|
||||
"description": "Format text as a file name or path",
|
||||
"body": [
|
||||
"{file}`${0:${TM_SELECTED_TEXT}}`"
|
||||
]
|
||||
},
|
||||
"Increase indent": {
|
||||
"prefix": "increase indent",
|
||||
"description": "Increase the indentation of all selected colon or backtick fences",
|
||||
"body": [
|
||||
"${TM_SELECTED_TEXT/((?<c>[`:])\\k<c>{2,})/$1$2/gm}"
|
||||
]
|
||||
},
|
||||
"Deprecation warning": {
|
||||
"prefix": "insert deprecation warning",
|
||||
"description": "Inserts an inline deprecation badge. Useful in tables of parameters",
|
||||
"body": [
|
||||
"{bdg-warning}`Deprecated`"
|
||||
]
|
||||
}
|
||||
}
|
|
@ -1,14 +0,0 @@
|
|||
{
|
||||
"python.defaultInterpreterPath": "/workspace/funkwhale/api/.venv/bin/python",
|
||||
"python.testing.cwd": "/workspace/funkwhale/api",
|
||||
"python.envFile": "/workspace/funkwhale/.gitpod/.env",
|
||||
"python.testing.pytestArgs": ["--cov=funkwhale_api", "tests/"],
|
||||
"python.testing.unittestEnabled": false,
|
||||
"python.testing.pytestEnabled": true,
|
||||
"vitest.enable": true,
|
||||
"vitest.commandLine": "yarn vitest",
|
||||
"i18n-ally.localesPaths": ["front/src/locales"],
|
||||
"i18n-ally.pathMatcher": "*.json",
|
||||
"i18n-ally.enabledFrameworks": ["vue"],
|
||||
"i18n-ally.keystyle": "nested"
|
||||
}
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1 @@
|
|||
*.md @funkwhale/documentation
|
|
@ -0,0 +1,19 @@
|
|||
SHELL := bash
|
||||
CPU_CORES := $(shell N=$$(nproc); echo $$(( $$N > 4 ? 4 : $$N )))
|
||||
|
||||
BAKE_FILES = \
|
||||
docker-bake.json \
|
||||
docker-bake.api.json \
|
||||
docker-bake.front.json
|
||||
|
||||
docker-bake.%.json:
|
||||
./scripts/build_metadata.py --format bake --bake-target $* --bake-image docker.io/funkwhale/$* > $@
|
||||
|
||||
docker-metadata: $(BAKE_FILES)
|
||||
|
||||
docker-build: docker-metadata
|
||||
docker buildx bake $(foreach FILE,$(BAKE_FILES), --file $(FILE)) --print $(BUILD_ARGS)
|
||||
docker buildx bake $(foreach FILE,$(BAKE_FILES), --file $(FILE)) $(BUILD_ARGS)
|
||||
|
||||
build-metadata:
|
||||
./scripts/build_metadata.py --format env | tee build_metadata.env
|
|
@ -23,4 +23,4 @@ If you find a security issue or vulnerability, please report it on our [GitLab i
|
|||
|
||||
## Code of conduct
|
||||
|
||||
The Funkwhale collective adheres to a [code of conduct](https://funkwhale.audio/en_US/code-of-conduct) in all our community spaces. Please familiarize yourself with this code and follow it when participating in discussions in our spaces.
|
||||
The Funkwhale collective adheres to a [code of conduct](https://funkwhale.audio/code-of-conduct) in all our community spaces. Please familiarize yourself with this code and follow it when participating in discussions in our spaces.
|
||||
|
|
|
@ -1,14 +1,20 @@
|
|||
FROM alpine:3.17 as pre-build
|
||||
FROM alpine:3.17 as requirements
|
||||
|
||||
# We need this additional step to avoid having poetrys deps interacting with our
|
||||
# dependencies. This is only required until alpine 3.16 is released, since this
|
||||
# allows us to install poetry as package.
|
||||
|
||||
RUN apk add --no-cache python3 py3-cryptography py3-pip poetry
|
||||
COPY pyproject.toml poetry.lock /
|
||||
RUN poetry export --without-hashes > requirements.txt
|
||||
RUN poetry export --with dev --without-hashes > dev-requirements.txt
|
||||
RUN set -eux; \
|
||||
apk add --no-cache \
|
||||
poetry \
|
||||
py3-cryptography \
|
||||
py3-pip \
|
||||
python3
|
||||
|
||||
COPY pyproject.toml poetry.lock /
|
||||
RUN set -eux; \
|
||||
poetry export --without-hashes --extras typesense > requirements.txt; \
|
||||
poetry export --without-hashes --with dev > dev-requirements.txt;
|
||||
|
||||
FROM alpine:3.17 as builder
|
||||
|
||||
|
@ -22,6 +28,7 @@ RUN set -eux; \
|
|||
cargo \
|
||||
curl \
|
||||
gcc \
|
||||
g++ \
|
||||
git \
|
||||
jpeg-dev \
|
||||
libffi-dev \
|
||||
|
@ -41,15 +48,15 @@ RUN set -eux; \
|
|||
py3-watchfiles=0.18.1-r0 \
|
||||
python3-dev
|
||||
|
||||
# create virtual env for next stage
|
||||
# Create virtual env
|
||||
RUN python3 -m venv --system-site-packages /venv
|
||||
# emulate activation by prefixing PATH
|
||||
ENV PATH="/venv/bin:/root/.local/bin:$PATH" VIRTUAL_ENV=/venv
|
||||
ENV PATH="/venv/bin:$PATH"
|
||||
|
||||
COPY --from=pre-build /requirements.txt /requirements.txt
|
||||
COPY --from=pre-build /dev-requirements.txt /dev-requirements.txt
|
||||
COPY --from=requirements /requirements.txt /requirements.txt
|
||||
COPY --from=requirements /dev-requirements.txt /dev-requirements.txt
|
||||
|
||||
RUN set -eux; \
|
||||
RUN --mount=type=cache,target=~/.cache/pip; \
|
||||
set -eux; \
|
||||
pip3 install --upgrade pip; \
|
||||
pip3 install setuptools wheel; \
|
||||
# Currently we are unable to relieably build rust-based packages on armv7. This
|
||||
|
@ -65,7 +72,8 @@ RUN set -eux; \
|
|||
watchfiles==0.18.1
|
||||
|
||||
ARG install_dev_deps=0
|
||||
RUN set -eux; \
|
||||
RUN --mount=type=cache,target=~/.cache/pip; \
|
||||
set -eux; \
|
||||
if [ "$install_dev_deps" = "1" ] ; then \
|
||||
grep -Ev 'cryptography|lxml|pillow|psycopg2|watchfiles' /dev-requirements.txt \
|
||||
| pip3 install -r /dev/stdin \
|
||||
|
@ -76,7 +84,7 @@ RUN set -eux; \
|
|||
watchfiles==0.18.1; \
|
||||
fi
|
||||
|
||||
FROM alpine:3.17 as image
|
||||
FROM alpine:3.17 as production
|
||||
|
||||
ENV PYTHONDONTWRITEBYTECODE=1
|
||||
ENV PYTHONUNBUFFERED=1
|
||||
|
@ -98,16 +106,17 @@ RUN set -eux; \
|
|||
py3-pillow=9.3.0-r0 \
|
||||
py3-psycopg2=2.9.5-r0 \
|
||||
py3-watchfiles=0.18.1-r0 \
|
||||
python3
|
||||
python3 \
|
||||
tzdata
|
||||
|
||||
COPY --from=builder /venv /venv
|
||||
# emulate activation by prefixing PATH
|
||||
ENV PATH="/venv/bin:$PATH"
|
||||
|
||||
COPY . /app
|
||||
WORKDIR /app
|
||||
|
||||
RUN set -eux; \
|
||||
RUN --mount=type=cache,target=~/.cache/pip; \
|
||||
set -eux; \
|
||||
pip3 install --no-deps --editable .
|
||||
|
||||
ENV IS_DOCKER_SETUP=true
|
||||
|
|
|
@ -0,0 +1,14 @@
|
|||
SHELL := bash
|
||||
CPU_CORES := $(shell N=$$(nproc); echo $$(( $$N > 4 ? 4 : $$N )))
|
||||
|
||||
.PHONY: install lint
|
||||
|
||||
install:
|
||||
poetry install
|
||||
|
||||
lint:
|
||||
poetry run pylint \
|
||||
--jobs=$(CPU_CORES) \
|
||||
--output-format=colorized \
|
||||
--recursive=true \
|
||||
config funkwhale_api tests
|
|
@ -1,3 +0,0 @@
|
|||
# loads what is required to generate the swagger docs
|
||||
# https://matrix.to/#/!nNBDNverFlbfNpReEO:matrix.org/$16579878472182UmZUv:tchncs.de?via=tchncs.de&via=matrix.org&via=juniorjpdj.pl
|
||||
import config.schema # noqa: F401
|
|
@ -1,97 +0,0 @@
|
|||
from django.conf.urls import include, url
|
||||
from rest_framework import routers
|
||||
from rest_framework.urlpatterns import format_suffix_patterns
|
||||
|
||||
from funkwhale_api.activity import views as activity_views
|
||||
from funkwhale_api.audio import views as audio_views
|
||||
from funkwhale_api.common import routers as common_routers
|
||||
from funkwhale_api.common import views as common_views
|
||||
from funkwhale_api.music import views
|
||||
from funkwhale_api.playlists import views as playlists_views
|
||||
from funkwhale_api.subsonic.views import SubsonicViewSet
|
||||
from funkwhale_api.tags import views as tags_views
|
||||
|
||||
router = common_routers.OptionalSlashRouter()
|
||||
router.register(r"activity", activity_views.ActivityViewSet, "activity")
|
||||
router.register(r"tags", tags_views.TagViewSet, "tags")
|
||||
router.register(r"plugins", common_views.PluginViewSet, "plugins")
|
||||
router.register(r"tracks", views.TrackViewSet, "tracks")
|
||||
router.register(r"uploads", views.UploadViewSet, "uploads")
|
||||
router.register(r"libraries", views.LibraryViewSet, "libraries")
|
||||
router.register(r"listen", views.ListenViewSet, "listen")
|
||||
router.register(r"stream", views.StreamViewSet, "stream")
|
||||
router.register(r"artists", views.ArtistViewSet, "artists")
|
||||
router.register(r"channels", audio_views.ChannelViewSet, "channels")
|
||||
router.register(r"subscriptions", audio_views.SubscriptionsViewSet, "subscriptions")
|
||||
router.register(r"albums", views.AlbumViewSet, "albums")
|
||||
router.register(r"licenses", views.LicenseViewSet, "licenses")
|
||||
router.register(r"playlists", playlists_views.PlaylistViewSet, "playlists")
|
||||
router.register(r"mutations", common_views.MutationViewSet, "mutations")
|
||||
router.register(r"attachments", common_views.AttachmentViewSet, "attachments")
|
||||
v1_patterns = router.urls
|
||||
|
||||
subsonic_router = routers.SimpleRouter(trailing_slash=False)
|
||||
subsonic_router.register(r"subsonic/rest", SubsonicViewSet, basename="subsonic")
|
||||
|
||||
|
||||
v1_patterns += [
|
||||
url(r"^oembed/$", views.OembedView.as_view(), name="oembed"),
|
||||
url(
|
||||
r"^instance/",
|
||||
include(("funkwhale_api.instance.urls", "instance"), namespace="instance"),
|
||||
),
|
||||
url(
|
||||
r"^manage/",
|
||||
include(("funkwhale_api.manage.urls", "manage"), namespace="manage"),
|
||||
),
|
||||
url(
|
||||
r"^moderation/",
|
||||
include(
|
||||
("funkwhale_api.moderation.urls", "moderation"), namespace="moderation"
|
||||
),
|
||||
),
|
||||
url(
|
||||
r"^federation/",
|
||||
include(
|
||||
("funkwhale_api.federation.api_urls", "federation"), namespace="federation"
|
||||
),
|
||||
),
|
||||
url(
|
||||
r"^providers/",
|
||||
include(("funkwhale_api.providers.urls", "providers"), namespace="providers"),
|
||||
),
|
||||
url(
|
||||
r"^favorites/",
|
||||
include(("funkwhale_api.favorites.urls", "favorites"), namespace="favorites"),
|
||||
),
|
||||
url(r"^search$", views.Search.as_view(), name="search"),
|
||||
url(
|
||||
r"^radios/",
|
||||
include(("funkwhale_api.radios.urls", "radios"), namespace="radios"),
|
||||
),
|
||||
url(
|
||||
r"^history/",
|
||||
include(("funkwhale_api.history.urls", "history"), namespace="history"),
|
||||
),
|
||||
url(
|
||||
r"^",
|
||||
include(("funkwhale_api.users.api_urls", "users"), namespace="users"),
|
||||
),
|
||||
# XXX: remove if Funkwhale 1.1
|
||||
url(
|
||||
r"^users/",
|
||||
include(("funkwhale_api.users.api_urls", "users"), namespace="users-nested"),
|
||||
),
|
||||
url(
|
||||
r"^oauth/",
|
||||
include(("funkwhale_api.users.oauth.urls", "oauth"), namespace="oauth"),
|
||||
),
|
||||
url(r"^rate-limit/?$", common_views.RateLimitView.as_view(), name="rate-limit"),
|
||||
url(
|
||||
r"^text-preview/?$", common_views.TextPreviewView.as_view(), name="text-preview"
|
||||
),
|
||||
]
|
||||
|
||||
urlpatterns = [
|
||||
url(r"^v1/", include((v1_patterns, "v1"), namespace="v1"))
|
||||
] + format_suffix_patterns(subsonic_router.urls, allowed=["view"])
|
|
@ -13,7 +13,29 @@ APPS_DIR = ROOT_DIR.path("funkwhale_api")
|
|||
|
||||
env = environ.Env()
|
||||
ENV = env
|
||||
LOGLEVEL = env("LOGLEVEL", default="info").upper()
|
||||
# If DEBUG is `true`, we automatically set the loglevel to "DEBUG"
|
||||
# If DEBUG is `false`, we try to read the level from LOGLEVEL environment and default to "INFO"
|
||||
LOGLEVEL = (
|
||||
"DEBUG" if env.bool("DEBUG", False) else env("LOGLEVEL", default="info").upper()
|
||||
)
|
||||
"""
|
||||
Default logging level for the Funkwhale processes.
|
||||
|
||||
.. note::
|
||||
The `DEBUG` variable overrides the `LOGLEVEL` if it is set to `TRUE`.
|
||||
|
||||
The `LOGLEVEL` value only applies if `DEBUG` is `false` or not present.
|
||||
|
||||
Available levels:
|
||||
|
||||
- ``debug``
|
||||
- ``info``
|
||||
- ``warning``
|
||||
- ``error``
|
||||
- ``critical``
|
||||
|
||||
"""
|
||||
|
||||
IS_DOCKER_SETUP = env.bool("IS_DOCKER_SETUP", False)
|
||||
|
||||
|
||||
|
@ -35,19 +57,6 @@ if env("FUNKWHALE_SENTRY_DSN", default=None) is not None:
|
|||
)
|
||||
sentry_sdk.set_tag("instance", env("FUNKWHALE_HOSTNAME"))
|
||||
|
||||
"""
|
||||
Default logging level for the Funkwhale processes
|
||||
|
||||
Available levels:
|
||||
|
||||
- ``debug``
|
||||
- ``info``
|
||||
- ``warning``
|
||||
- ``error``
|
||||
- ``critical``
|
||||
|
||||
""" # pylint: disable=W0105
|
||||
|
||||
LOGGING_CONFIG = None
|
||||
logging.config.dictConfig(
|
||||
{
|
||||
|
@ -187,9 +196,7 @@ request errors related to this.
|
|||
FUNKWHALE_SPA_HTML_CACHE_DURATION = env.int(
|
||||
"FUNKWHALE_SPA_HTML_CACHE_DURATION", default=60 * 15
|
||||
)
|
||||
FUNKWHALE_EMBED_URL = env(
|
||||
"FUNKWHALE_EMBED_URL", default=FUNKWHALE_URL + "/front/embed.html"
|
||||
)
|
||||
FUNKWHALE_EMBED_URL = env("FUNKWHALE_EMBED_URL", default=FUNKWHALE_URL + "/embed.html")
|
||||
FUNKWHALE_SPA_REWRITE_MANIFEST = env.bool(
|
||||
"FUNKWHALE_SPA_REWRITE_MANIFEST", default=True
|
||||
)
|
||||
|
@ -272,6 +279,7 @@ LOCAL_APPS = (
|
|||
"funkwhale_api.playlists",
|
||||
"funkwhale_api.subsonic",
|
||||
"funkwhale_api.tags",
|
||||
"funkwhale_api.typesense",
|
||||
)
|
||||
|
||||
# See: https://docs.djangoproject.com/en/dev/ref/settings/#installed-apps
|
||||
|
@ -281,9 +289,9 @@ ADDITIONAL_APPS = env.list("ADDITIONAL_APPS", default=[])
|
|||
List of Django apps to load in addition to Funkwhale plugins and apps.
|
||||
"""
|
||||
INSTALLED_APPS = (
|
||||
DJANGO_APPS
|
||||
LOCAL_APPS
|
||||
+ DJANGO_APPS
|
||||
+ THIRD_PARTY_APPS
|
||||
+ LOCAL_APPS
|
||||
+ tuple(ADDITIONAL_APPS)
|
||||
+ tuple(plugins.trigger_filter(plugins.PLUGINS_APPS, [], enabled=True))
|
||||
)
|
||||
|
@ -822,7 +830,7 @@ If you're using password auth (the extra slash is important)
|
|||
.. note::
|
||||
|
||||
If you want to use Redis over unix sockets, you also need to update
|
||||
:attr:`CELERY_BROKER_URL`, because the scheme differ from the one used by
|
||||
:attr:`CELERY_BROKER_URL`, because the scheme differs from the one used by
|
||||
:attr:`CACHE_URL`.
|
||||
|
||||
"""
|
||||
|
@ -873,7 +881,7 @@ to use a different server or use Redis sockets to connect.
|
|||
|
||||
Example:
|
||||
|
||||
- ``redis://127.0.0.1:6379/0``
|
||||
- ``unix://127.0.0.1:6379/0``
|
||||
- ``redis+socket:///run/redis/redis.sock?virtual_host=0``
|
||||
|
||||
"""
|
||||
|
@ -934,6 +942,11 @@ CELERY_BEAT_SCHEDULE = {
|
|||
),
|
||||
"options": {"expires": 60 * 60},
|
||||
},
|
||||
"typesense.build_canonical_index": {
|
||||
"task": "typesense.build_canonical_index",
|
||||
"schedule": crontab(day_of_week="*/2", minute="0", hour="3"),
|
||||
"options": {"expires": 60 * 60 * 24},
|
||||
},
|
||||
}
|
||||
|
||||
if env.bool("ADD_ALBUM_TAGS_FROM_TRACKS", default=True):
|
||||
|
@ -1459,3 +1472,22 @@ instead of request header.
|
|||
|
||||
HASHING_ALGORITHM = "sha256"
|
||||
HASHING_CHUNK_SIZE = 1024 * 100
|
||||
|
||||
"""
|
||||
Typenses Settings
|
||||
"""
|
||||
TYPESENSE_API_KEY = env("TYPESENSE_API_KEY", default=None)
|
||||
""" Typesense API key. This need to be defined in the .env file for Typenses to be activated."""
|
||||
TYPESENSE_PORT = env("TYPESENSE_PORT", default="8108")
|
||||
"""Typesense listening port"""
|
||||
TYPESENSE_PROTOCOL = env("TYPESENSE_PROTOCOL", default="http")
|
||||
"""Typesense listening protocol"""
|
||||
TYPESENSE_HOST = env(
|
||||
"TYPESENSE_HOST",
|
||||
default="typesense" if IS_DOCKER_SETUP else "localhost",
|
||||
)
|
||||
"""
|
||||
Typesense hostname. Defaults to `localhost` on non-Docker deployments and to `typesense` on
|
||||
Docker deployments.
|
||||
"""
|
||||
TYPESENSE_NUM_TYPO = env("TYPESENSE_NUM_TYPO", default=5)
|
||||
|
|
|
@ -76,7 +76,7 @@ DEBUG_TOOLBAR_PANELS = [
|
|||
|
||||
# django-extensions
|
||||
# ------------------------------------------------------------------------------
|
||||
# INSTALLED_APPS += ('django_extensions', )
|
||||
INSTALLED_APPS += ("django_extensions",)
|
||||
|
||||
INSTALLED_APPS += ("drf_spectacular",)
|
||||
|
||||
|
@ -149,3 +149,5 @@ MIDDLEWARE = (
|
|||
"funkwhale_api.common.middleware.ProfilerMiddleware",
|
||||
"funkwhale_api.common.middleware.PymallocMiddleware",
|
||||
) + MIDDLEWARE
|
||||
|
||||
TYPESENSE_API_KEY = "apikey"
|
||||
|
|
|
@ -0,0 +1,9 @@
|
|||
import os
|
||||
|
||||
os.environ.setdefault("FUNKWHALE_URL", "http://funkwhale.dev")
|
||||
|
||||
from .common import * # noqa
|
||||
|
||||
DEBUG = True
|
||||
SECRET_KEY = "a_super_secret_key!"
|
||||
TYPESENSE_API_KEY = "apikey"
|
|
@ -8,7 +8,11 @@ v2_patterns = router.urls
|
|||
v2_patterns += [
|
||||
url(
|
||||
r"^instance/",
|
||||
include(("funkwhale_api.instance.urls", "instance"), namespace="instance"),
|
||||
include(("funkwhale_api.instance.urls_v2", "instance"), namespace="instance"),
|
||||
),
|
||||
url(
|
||||
r"^radios/",
|
||||
include(("funkwhale_api.radios.urls_v2", "radios"), namespace="radios"),
|
||||
),
|
||||
]
|
||||
|
||||
|
|
|
@ -1,7 +1,4 @@
|
|||
__version__ = "1.3.4"
|
||||
__version_info__ = tuple(
|
||||
[
|
||||
int(num) if num.isdigit() else num
|
||||
for num in __version__.replace("-", ".", 1).split(".")
|
||||
]
|
||||
)
|
||||
from importlib.metadata import version as get_version
|
||||
|
||||
version = get_version("funkwhale_api")
|
||||
__version__ = version
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
import datetime
|
||||
import logging
|
||||
import sys
|
||||
import time
|
||||
import uuid
|
||||
|
||||
import feedparser
|
||||
import pytz
|
||||
import requests
|
||||
from django.conf import settings
|
||||
from django.db import transaction
|
||||
|
@ -33,6 +33,11 @@ from funkwhale_api.users import serializers as users_serializers
|
|||
|
||||
from . import categories, models
|
||||
|
||||
if sys.version_info < (3, 9):
|
||||
from backports.zoneinfo import ZoneInfo
|
||||
else:
|
||||
from zoneinfo import ZoneInfo
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
|
@ -769,7 +774,7 @@ class RssFeedItemSerializer(serializers.Serializer):
|
|||
if "published_parsed" in validated_data:
|
||||
track_defaults["creation_date"] = datetime.datetime.fromtimestamp(
|
||||
time.mktime(validated_data["published_parsed"])
|
||||
).replace(tzinfo=pytz.utc)
|
||||
).replace(tzinfo=ZoneInfo("UTC"))
|
||||
|
||||
upload_defaults = {
|
||||
"source": validated_data["links"]["audio"]["source"],
|
||||
|
|
|
@ -38,7 +38,7 @@ def handler_create_user(
|
|||
utils.logger.debug("Creating user…")
|
||||
user = serializer.save(request=request)
|
||||
utils.logger.debug("Setting permissions and other attributes…")
|
||||
user.is_staff = is_staff
|
||||
user.is_staff = is_staff or is_superuser # Always set staff if superuser is set
|
||||
user.upload_quota = upload_quota
|
||||
user.is_superuser = is_superuser
|
||||
for permission in permissions:
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
from django import forms
|
||||
from django.db.models import Q
|
||||
from django.db.models.functions import Lower
|
||||
from django_filters import rest_framework as filters
|
||||
from django_filters import widgets
|
||||
from drf_spectacular.utils import extend_schema_field
|
||||
|
@ -239,3 +240,19 @@ class ActorScopeFilter(filters.CharFilter):
|
|||
raise EmptyQuerySet()
|
||||
|
||||
return Q(**{self.actor_field: actor})
|
||||
|
||||
|
||||
class CaseInsensitiveNameOrderingFilter(filters.OrderingFilter):
|
||||
def filter(self, qs, value):
|
||||
order_by = []
|
||||
|
||||
if value is None:
|
||||
return qs
|
||||
|
||||
for param in value:
|
||||
if param == "name":
|
||||
order_by.append(Lower("name"))
|
||||
else:
|
||||
order_by.append(self.get_ordering_value(param))
|
||||
|
||||
return qs.order_by(*order_by)
|
||||
|
|
|
@ -0,0 +1,22 @@
|
|||
import os
|
||||
|
||||
from django.contrib.auth.management.commands.createsuperuser import (
|
||||
Command as BaseCommand,
|
||||
)
|
||||
from django.core.management.base import CommandError
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
def handle(self, *apps_label, **options):
|
||||
"""
|
||||
Creating Django Superusers would bypass some of our username checks, which can lead to unexpected behaviour.
|
||||
We therefore prohibit the execution of the command.
|
||||
"""
|
||||
if not os.environ.get("FORCE") == "1":
|
||||
raise CommandError(
|
||||
"Running createsuperuser on your Funkwhale instance bypasses some of our checks "
|
||||
"which can lead to unexpected behavior of your instance. We therefore suggest to "
|
||||
"run `funkwhale-manage fw users create --superuser` instead."
|
||||
)
|
||||
|
||||
return super().handle(*apps_label, **options)
|
|
@ -36,22 +36,7 @@ class Command(BaseCommand):
|
|||
self.stdout.write("")
|
||||
|
||||
def init(self):
|
||||
try:
|
||||
user = User.objects.get(username="gitpod")
|
||||
except Exception:
|
||||
call_command(
|
||||
"createsuperuser",
|
||||
username="gitpod",
|
||||
email="gitpod@example.com",
|
||||
no_input=False,
|
||||
)
|
||||
user = User.objects.get(username="gitpod")
|
||||
|
||||
user.set_password("gitpod")
|
||||
if not user.actor:
|
||||
user.create_actor()
|
||||
|
||||
user.save()
|
||||
user = User.objects.get(username="gitpod")
|
||||
|
||||
# Allow anonymous access
|
||||
preferences.set("common__api_authentication_required", False)
|
||||
|
|
|
@ -0,0 +1,95 @@
|
|||
import pathlib
|
||||
from argparse import RawTextHelpFormatter
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.db import transaction
|
||||
|
||||
from funkwhale_api.music import models
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = """
|
||||
Update the reference for Uploads that have been imported with --in-place and are now moved to s3.
|
||||
|
||||
Please note: This does not move any file! Make sure you already moved the files to your s3 bucket.
|
||||
|
||||
Specify --source to filter the reference to update to files from a specific in-place directory. If no
|
||||
--source is given, all in-place imported track references will be updated.
|
||||
|
||||
Specify --target to specify a subdirectory in the S3 bucket where you moved the files. If no --target is
|
||||
given, the file is expected to be stored in the same path as before.
|
||||
|
||||
Examples:
|
||||
|
||||
Music File: /music/Artist/Album/track.ogg
|
||||
--source: /music
|
||||
--target unset
|
||||
|
||||
All files imported from /music will be updated and expected to be in the same folder structure in the bucket
|
||||
|
||||
Music File: /music/Artist/Album/track.ogg
|
||||
--source: /music
|
||||
--target: /in_place
|
||||
|
||||
The music file is expected to be stored in the bucket in the directory /in_place/Artist/Album/track.ogg
|
||||
"""
|
||||
|
||||
def create_parser(self, *args, **kwargs):
|
||||
parser = super().create_parser(*args, **kwargs)
|
||||
parser.formatter_class = RawTextHelpFormatter
|
||||
return parser
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument(
|
||||
"--no-dry-run",
|
||||
action="store_false",
|
||||
dest="dry_run",
|
||||
default=True,
|
||||
help="Disable dry run mode and apply updates for real on the database",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--source",
|
||||
type=pathlib.Path,
|
||||
required=True,
|
||||
help="Specify the path of the directory where the files originally were stored to update their reference.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--target",
|
||||
type=pathlib.Path,
|
||||
help="Specify a subdirectory in the S3 bucket where you moved the files to.",
|
||||
)
|
||||
|
||||
@transaction.atomic
|
||||
def handle(self, *args, **options):
|
||||
if options["dry_run"]:
|
||||
self.stdout.write("Dry-run on, will not touch the database")
|
||||
else:
|
||||
self.stdout.write("Dry-run off, *changing the database*")
|
||||
self.stdout.write("")
|
||||
|
||||
prefix = f"file://{options['source']}"
|
||||
|
||||
to_change = models.Upload.objects.filter(source__startswith=prefix)
|
||||
|
||||
self.stdout.write(f"Found {to_change.count()} uploads to update.")
|
||||
|
||||
target = options.get("target")
|
||||
if target is None:
|
||||
target = options["source"]
|
||||
|
||||
for upl in to_change:
|
||||
upl.audio_file = str(upl.source).replace(str(prefix), str(target))
|
||||
upl.source = None
|
||||
self.stdout.write(f"Upload expected in {upl.audio_file}")
|
||||
if not options["dry_run"]:
|
||||
upl.save()
|
||||
|
||||
self.stdout.write("")
|
||||
if options["dry_run"]:
|
||||
self.stdout.write(
|
||||
"Nothing was updated, rerun this command with --no-dry-run to apply the changes"
|
||||
)
|
||||
else:
|
||||
self.stdout.write("Updating completed!")
|
||||
|
||||
self.stdout.write("")
|
|
@ -26,7 +26,7 @@ class Command(BaseCommand):
|
|||
script = available_scripts[name]
|
||||
except KeyError:
|
||||
raise CommandError(
|
||||
"{} is not a valid script. Run funkwhale-manage for a "
|
||||
"{} is not a valid script. Run funkwhale-manage script for a "
|
||||
"list of available scripts".format(name)
|
||||
)
|
||||
|
||||
|
@ -43,7 +43,7 @@ class Command(BaseCommand):
|
|||
def show_help(self):
|
||||
self.stdout.write("")
|
||||
self.stdout.write("Available scripts:")
|
||||
self.stdout.write("Launch with: funkwhale-manage <script_name>")
|
||||
self.stdout.write("Launch with: funkwhale-manage script <script_name>")
|
||||
available_scripts = self.get_scripts()
|
||||
for name, script in sorted(available_scripts.items()):
|
||||
self.stdout.write("")
|
||||
|
|
|
@ -0,0 +1,43 @@
|
|||
from django.core.management.commands.migrate import Command as BaseCommand
|
||||
|
||||
from funkwhale_api.federation import factories
|
||||
from funkwhale_api.federation.models import Actor
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
self.help = "Helper to generate randomized testdata"
|
||||
self.type_choices = {"notifications": self.handle_notifications}
|
||||
self.missing_args_message = f"Please specify one of the following sub-commands: { *self.type_choices.keys(), }"
|
||||
|
||||
def add_arguments(self, parser):
|
||||
subparsers = parser.add_subparsers(dest="subcommand")
|
||||
|
||||
notification_parser = subparsers.add_parser("notifications")
|
||||
notification_parser.add_argument(
|
||||
"username", type=str, help="Username to send the notifications to"
|
||||
)
|
||||
notification_parser.add_argument(
|
||||
"--count", type=int, help="Number of elements to create", default=1
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
self.type_choices[options["subcommand"]](options)
|
||||
|
||||
def handle_notifications(self, options):
|
||||
self.stdout.write(
|
||||
f"Create {options['count']} notification(s) for {options['username']}"
|
||||
)
|
||||
try:
|
||||
actor = Actor.objects.get(preferred_username=options["username"])
|
||||
except Actor.DoesNotExist:
|
||||
self.stdout.write(
|
||||
"The user you want to create notifications for does not exist"
|
||||
)
|
||||
return
|
||||
|
||||
follow_activity = factories.ActivityFactory(type="Follow")
|
||||
for _ in range(options["count"]):
|
||||
factories.InboxItemFactory(actor=actor, activity=follow_activity)
|
|
@ -150,7 +150,9 @@ def get_default_head_tags(path):
|
|||
{
|
||||
"tag": "meta",
|
||||
"property": "og:image",
|
||||
"content": utils.join_url(settings.FUNKWHALE_URL, "/front/favicon.png"),
|
||||
"content": utils.join_url(
|
||||
settings.FUNKWHALE_URL, "/android-chrome-512x512.png"
|
||||
),
|
||||
},
|
||||
{
|
||||
"tag": "meta",
|
||||
|
|
|
@ -349,7 +349,7 @@ class ScopesSerializer(serializers.Serializer):
|
|||
|
||||
class IdentSerializer(serializers.Serializer):
|
||||
type = serializers.CharField()
|
||||
id = serializers.IntegerField()
|
||||
id = serializers.CharField()
|
||||
|
||||
|
||||
class RateLimitSerializer(serializers.Serializer):
|
||||
|
|
|
@ -7,7 +7,7 @@ from rest_framework import throttling as rest_throttling
|
|||
|
||||
def get_ident(user, request):
|
||||
if user and user.is_authenticated:
|
||||
return {"type": "authenticated", "id": user.pk}
|
||||
return {"type": "authenticated", "id": f"{user.pk}"}
|
||||
ident = rest_throttling.BaseThrottle().get_ident(request)
|
||||
|
||||
return {"type": "anonymous", "id": ident}
|
||||
|
|
|
@ -477,14 +477,13 @@ def monkey_patch_request_build_absolute_uri():
|
|||
def get_file_hash(file, algo=None, chunk_size=None, full_read=False):
|
||||
algo = algo or settings.HASHING_ALGORITHM
|
||||
chunk_size = chunk_size or settings.HASHING_CHUNK_SIZE
|
||||
handler = getattr(hashlib, algo)
|
||||
hash = handler()
|
||||
hasher = hashlib.new(algo)
|
||||
file.seek(0)
|
||||
if full_read:
|
||||
for byte_block in iter(lambda: file.read(chunk_size), b""):
|
||||
hash.update(byte_block)
|
||||
hasher.update(byte_block)
|
||||
else:
|
||||
# sometimes, it's useful to only hash the beginning of the file, e.g
|
||||
# to avoid a lot of I/O when crawling large libraries
|
||||
hash.update(file.read(chunk_size))
|
||||
return f"{algo}:{hash.hexdigest()}"
|
||||
hasher.update(file.read(chunk_size))
|
||||
return f"{algo}:{hasher.hexdigest()}"
|
||||
|
|
|
@ -17,31 +17,40 @@ def submit_listen(listening, conf, **kwargs):
|
|||
return
|
||||
|
||||
logger = PLUGIN["logger"]
|
||||
logger.info("Submitting listening to Majola at %s", server_url)
|
||||
payload = get_payload(listening, api_key)
|
||||
logger.debug("Majola payload: %r", payload)
|
||||
logger.info("Submitting listening to Maloja at %s", server_url)
|
||||
payload = get_payload(listening, api_key, conf)
|
||||
logger.debug("Maloja payload: %r", payload)
|
||||
url = server_url.rstrip("/") + "/apis/mlj_1/newscrobble"
|
||||
session = plugins.get_session()
|
||||
response = session.post(url, json=payload)
|
||||
response.raise_for_status()
|
||||
details = json.loads(response.text)
|
||||
if details["status"] == "success":
|
||||
logger.info("Majola listening submitted successfully")
|
||||
logger.info("Maloja listening submitted successfully")
|
||||
else:
|
||||
raise MalojaException(response.text)
|
||||
|
||||
|
||||
def get_payload(listening, api_key):
|
||||
def get_payload(listening, api_key, conf):
|
||||
track = listening.track
|
||||
|
||||
# See https://github.com/krateng/maloja/blob/master/API.md
|
||||
payload = {
|
||||
"key": api_key,
|
||||
"artists": [track.artist.name],
|
||||
"title": track.title,
|
||||
"time": int(listening.creation_date.timestamp()),
|
||||
"nofix": bool(conf.get("nofix")),
|
||||
}
|
||||
|
||||
if track.album:
|
||||
if track.album.title:
|
||||
payload["album"] = track.album.title
|
||||
if track.album.artist:
|
||||
payload["albumartists"] = [track.album.artist.name]
|
||||
|
||||
upload = track.uploads.filter(duration__gte=0).first()
|
||||
if upload:
|
||||
payload["length"] = upload.duration
|
||||
|
||||
return payload
|
||||
|
|
|
@ -5,10 +5,16 @@ PLUGIN = plugins.get_plugin_config(
|
|||
label="Maloja",
|
||||
description="A plugin that allows you to submit your listens to your Maloja server.",
|
||||
homepage="https://docs.funkwhale.audio/users/builtinplugins.html#maloja-plugin",
|
||||
version="0.1.1",
|
||||
version="0.2",
|
||||
user=True,
|
||||
conf=[
|
||||
{"name": "server_url", "type": "text", "label": "Maloja server URL"},
|
||||
{"name": "api_key", "type": "text", "label": "Your Maloja API key"},
|
||||
{
|
||||
"name": "nofix",
|
||||
"type": "boolean",
|
||||
"label": "Skip server-side metadata fixing",
|
||||
"default": False,
|
||||
},
|
||||
],
|
||||
)
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
import datetime
|
||||
import logging
|
||||
import sys
|
||||
|
||||
import cryptography.exceptions
|
||||
import pytz
|
||||
import requests
|
||||
import requests_http_message_signatures
|
||||
from django import forms
|
||||
|
@ -11,6 +11,11 @@ from django.utils.http import parse_http_date
|
|||
|
||||
from . import exceptions, utils
|
||||
|
||||
if sys.version_info < (3, 9):
|
||||
from backports.zoneinfo import ZoneInfo
|
||||
else:
|
||||
from zoneinfo import ZoneInfo
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# the request Date should be between now - 30s and now + 30s
|
||||
|
@ -26,7 +31,7 @@ def verify_date(raw_date):
|
|||
except ValueError as e:
|
||||
raise forms.ValidationError(str(e))
|
||||
dt = datetime.datetime.utcfromtimestamp(ts)
|
||||
dt = dt.replace(tzinfo=pytz.utc)
|
||||
dt = dt.replace(tzinfo=ZoneInfo("UTC"))
|
||||
delta = datetime.timedelta(seconds=DATE_HEADER_VALID_FOR)
|
||||
now = timezone.now()
|
||||
if dt < now - delta or dt > now + delta:
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
import pycountry
|
||||
from django.core.validators import FileExtensionValidator
|
||||
from django.forms import widgets
|
||||
from dynamic_preferences import types
|
||||
|
@ -170,3 +171,18 @@ class Banner(ImagePreference):
|
|||
default = None
|
||||
help_text = "This banner will be displayed on your pod's landing and about page. At least 600x100px recommended."
|
||||
field_kwargs = {"required": False}
|
||||
|
||||
|
||||
@global_preferences_registry.register
|
||||
class Location(types.ChoicePreference):
|
||||
show_in_api = True
|
||||
section = instance
|
||||
name = "location"
|
||||
verbose_name = "Server Location"
|
||||
default = ""
|
||||
choices = [(country.alpha_2, country.name) for country in pycountry.countries]
|
||||
help_text = (
|
||||
"The country or territory in which your server is located. This is displayed in the server's Nodeinfo "
|
||||
"endpoint."
|
||||
)
|
||||
field_kwargs = {"choices": choices, "required": False}
|
||||
|
|
|
@ -12,6 +12,17 @@ class SoftwareSerializer(serializers.Serializer):
|
|||
return "funkwhale"
|
||||
|
||||
|
||||
class SoftwareSerializer_v2(SoftwareSerializer):
|
||||
repository = serializers.SerializerMethodField()
|
||||
homepage = serializers.SerializerMethodField()
|
||||
|
||||
def get_repository(self, obj):
|
||||
return "https://dev.funkwhale.audio/funkwhale/funkwhale"
|
||||
|
||||
def get_homepage(self, obj):
|
||||
return "https://funkwhale.audio"
|
||||
|
||||
|
||||
class ServicesSerializer(serializers.Serializer):
|
||||
inbound = serializers.ListField(child=serializers.CharField(), default=[])
|
||||
outbound = serializers.ListField(child=serializers.CharField(), default=[])
|
||||
|
@ -31,6 +42,8 @@ class UsersUsageSerializer(serializers.Serializer):
|
|||
|
||||
class UsageSerializer(serializers.Serializer):
|
||||
users = UsersUsageSerializer()
|
||||
localPosts = serializers.IntegerField(required=False)
|
||||
localComments = serializers.IntegerField(required=False)
|
||||
|
||||
|
||||
class TotalCountSerializer(serializers.Serializer):
|
||||
|
@ -92,19 +105,14 @@ class MetadataSerializer(serializers.Serializer):
|
|||
private = serializers.SerializerMethodField()
|
||||
shortDescription = serializers.SerializerMethodField()
|
||||
longDescription = serializers.SerializerMethodField()
|
||||
rules = serializers.SerializerMethodField()
|
||||
contactEmail = serializers.SerializerMethodField()
|
||||
terms = serializers.SerializerMethodField()
|
||||
nodeName = serializers.SerializerMethodField()
|
||||
banner = serializers.SerializerMethodField()
|
||||
defaultUploadQuota = serializers.SerializerMethodField()
|
||||
library = serializers.SerializerMethodField()
|
||||
supportedUploadExtensions = serializers.ListField(child=serializers.CharField())
|
||||
allowList = serializers.SerializerMethodField()
|
||||
reportTypes = ReportTypeSerializer(source="report_types", many=True)
|
||||
funkwhaleSupportMessageEnabled = serializers.SerializerMethodField()
|
||||
instanceSupportMessage = serializers.SerializerMethodField()
|
||||
endpoints = EndpointsSerializer()
|
||||
usage = MetadataUsageSerializer(source="stats", required=False)
|
||||
|
||||
def get_private(self, obj) -> bool:
|
||||
|
@ -116,15 +124,9 @@ class MetadataSerializer(serializers.Serializer):
|
|||
def get_longDescription(self, obj) -> str:
|
||||
return obj["preferences"].get("instance__long_description")
|
||||
|
||||
def get_rules(self, obj) -> str:
|
||||
return obj["preferences"].get("instance__rules")
|
||||
|
||||
def get_contactEmail(self, obj) -> str:
|
||||
return obj["preferences"].get("instance__contact_email")
|
||||
|
||||
def get_terms(self, obj) -> str:
|
||||
return obj["preferences"].get("instance__terms")
|
||||
|
||||
def get_nodeName(self, obj) -> str:
|
||||
return obj["preferences"].get("instance__name")
|
||||
|
||||
|
@ -137,15 +139,6 @@ class MetadataSerializer(serializers.Serializer):
|
|||
def get_defaultUploadQuota(self, obj) -> int:
|
||||
return obj["preferences"].get("users__upload_quota")
|
||||
|
||||
@extend_schema_field(NodeInfoLibrarySerializer)
|
||||
def get_library(self, obj):
|
||||
data = obj["stats"] or {}
|
||||
data["federationEnabled"] = obj["preferences"].get("federation__enabled")
|
||||
data["anonymousCanListen"] = not obj["preferences"].get(
|
||||
"common__api_authentication_required"
|
||||
)
|
||||
return NodeInfoLibrarySerializer(data).data
|
||||
|
||||
@extend_schema_field(AllowListStatSerializer)
|
||||
def get_allowList(self, obj):
|
||||
return AllowListStatSerializer(
|
||||
|
@ -166,6 +159,62 @@ class MetadataSerializer(serializers.Serializer):
|
|||
return MetadataUsageSerializer(obj["stats"]).data
|
||||
|
||||
|
||||
class Metadata20Serializer(MetadataSerializer):
|
||||
library = serializers.SerializerMethodField()
|
||||
reportTypes = ReportTypeSerializer(source="report_types", many=True)
|
||||
endpoints = EndpointsSerializer()
|
||||
rules = serializers.SerializerMethodField()
|
||||
terms = serializers.SerializerMethodField()
|
||||
|
||||
def get_rules(self, obj) -> str:
|
||||
return obj["preferences"].get("instance__rules")
|
||||
|
||||
def get_terms(self, obj) -> str:
|
||||
return obj["preferences"].get("instance__terms")
|
||||
|
||||
@extend_schema_field(NodeInfoLibrarySerializer)
|
||||
def get_library(self, obj):
|
||||
data = obj["stats"] or {}
|
||||
data["federationEnabled"] = obj["preferences"].get("federation__enabled")
|
||||
data["anonymousCanListen"] = not obj["preferences"].get(
|
||||
"common__api_authentication_required"
|
||||
)
|
||||
return NodeInfoLibrarySerializer(data).data
|
||||
|
||||
|
||||
class MetadataContentLocalSerializer(serializers.Serializer):
|
||||
artists = serializers.IntegerField()
|
||||
releases = serializers.IntegerField()
|
||||
recordings = serializers.IntegerField()
|
||||
hoursOfContent = serializers.IntegerField()
|
||||
|
||||
|
||||
class MetadataContentCategorySerializer(serializers.Serializer):
|
||||
name = serializers.CharField()
|
||||
count = serializers.IntegerField()
|
||||
|
||||
|
||||
class MetadataContentSerializer(serializers.Serializer):
|
||||
local = MetadataContentLocalSerializer()
|
||||
topMusicCategories = MetadataContentCategorySerializer(many=True)
|
||||
topPodcastCategories = MetadataContentCategorySerializer(many=True)
|
||||
|
||||
|
||||
class Metadata21Serializer(MetadataSerializer):
|
||||
languages = serializers.ListField(child=serializers.CharField())
|
||||
location = serializers.CharField()
|
||||
content = MetadataContentSerializer()
|
||||
features = serializers.ListField(child=serializers.CharField())
|
||||
codeOfConduct = serializers.SerializerMethodField()
|
||||
|
||||
def get_codeOfConduct(self, obj) -> str:
|
||||
return (
|
||||
full_url("/about/pod#rules")
|
||||
if obj["preferences"].get("instance__rules")
|
||||
else ""
|
||||
)
|
||||
|
||||
|
||||
class NodeInfo20Serializer(serializers.Serializer):
|
||||
version = serializers.SerializerMethodField()
|
||||
software = SoftwareSerializer()
|
||||
|
@ -196,9 +245,36 @@ class NodeInfo20Serializer(serializers.Serializer):
|
|||
usage = {"users": {"total": 0, "activeMonth": 0, "activeHalfyear": 0}}
|
||||
return UsageSerializer(usage).data
|
||||
|
||||
@extend_schema_field(MetadataSerializer)
|
||||
@extend_schema_field(Metadata20Serializer)
|
||||
def get_metadata(self, obj):
|
||||
return MetadataSerializer(obj).data
|
||||
return Metadata20Serializer(obj).data
|
||||
|
||||
|
||||
class NodeInfo21Serializer(NodeInfo20Serializer):
|
||||
version = serializers.SerializerMethodField()
|
||||
software = SoftwareSerializer_v2()
|
||||
|
||||
def get_version(self, obj) -> str:
|
||||
return "2.1"
|
||||
|
||||
@extend_schema_field(UsageSerializer)
|
||||
def get_usage(self, obj):
|
||||
usage = None
|
||||
if obj["preferences"]["instance__nodeinfo_stats_enabled"]:
|
||||
usage = obj["stats"]
|
||||
usage["localPosts"] = 0
|
||||
usage["localComments"] = 0
|
||||
else:
|
||||
usage = {
|
||||
"users": {"total": 0, "activeMonth": 0, "activeHalfyear": 0},
|
||||
"localPosts": 0,
|
||||
"localComments": 0,
|
||||
}
|
||||
return UsageSerializer(usage).data
|
||||
|
||||
@extend_schema_field(Metadata21Serializer)
|
||||
def get_metadata(self, obj):
|
||||
return Metadata21Serializer(obj).data
|
||||
|
||||
|
||||
class SpaManifestIconSerializer(serializers.Serializer):
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import datetime
|
||||
|
||||
from django.db.models import Sum
|
||||
from django.db.models import Count, F, Sum
|
||||
from django.utils import timezone
|
||||
|
||||
from funkwhale_api.favorites.models import TrackFavorite
|
||||
|
@ -22,6 +22,39 @@ def get():
|
|||
}
|
||||
|
||||
|
||||
def get_content():
|
||||
return {
|
||||
"local": {
|
||||
"artists": get_artists(),
|
||||
"releases": get_albums(),
|
||||
"recordings": get_tracks(),
|
||||
"hoursOfContent": get_music_duration(),
|
||||
},
|
||||
"topMusicCategories": get_top_music_categories(),
|
||||
"topPodcastCategories": get_top_podcast_categories(),
|
||||
}
|
||||
|
||||
|
||||
def get_top_music_categories():
|
||||
return (
|
||||
models.Track.objects.filter(artist__content_category="music")
|
||||
.exclude(tagged_items__tag_id=None)
|
||||
.values(name=F("tagged_items__tag__name"))
|
||||
.annotate(count=Count("name"))
|
||||
.order_by("-count")[:3]
|
||||
)
|
||||
|
||||
|
||||
def get_top_podcast_categories():
|
||||
return (
|
||||
models.Track.objects.filter(artist__content_category="podcast")
|
||||
.exclude(tagged_items__tag_id=None)
|
||||
.values(name=F("tagged_items__tag__name"))
|
||||
.annotate(count=Count("name"))
|
||||
.order_by("-count")[:3]
|
||||
)
|
||||
|
||||
|
||||
def get_users():
|
||||
qs = User.objects.filter(is_active=True)
|
||||
now = timezone.now()
|
||||
|
|
|
@ -8,7 +8,7 @@ admin_router = routers.OptionalSlashRouter()
|
|||
admin_router.register(r"admin/settings", views.AdminSettings, "admin-settings")
|
||||
|
||||
urlpatterns = [
|
||||
url(r"^nodeinfo/2.0/?$", views.NodeInfo.as_view(), name="nodeinfo-2.0"),
|
||||
url(r"^nodeinfo/2.0/?$", views.NodeInfo20.as_view(), name="nodeinfo-2.0"),
|
||||
url(r"^settings/?$", views.InstanceSettings.as_view(), name="settings"),
|
||||
url(r"^spa-manifest.json", views.SpaManifest.as_view(), name="spa-manifest"),
|
||||
] + admin_router.urls
|
||||
|
|
|
@ -0,0 +1,7 @@
|
|||
from django.conf.urls import url
|
||||
|
||||
from . import views
|
||||
|
||||
urlpatterns = [
|
||||
url(r"^nodeinfo/2.1/?$", views.NodeInfo21.as_view(), name="nodeinfo-2.1"),
|
||||
]
|
|
@ -11,6 +11,7 @@ from dynamic_preferences.api import viewsets as preferences_viewsets
|
|||
from dynamic_preferences.api.serializers import GlobalPreferenceSerializer
|
||||
from dynamic_preferences.registries import global_preferences_registry
|
||||
from rest_framework import generics, views
|
||||
from rest_framework.renderers import JSONRenderer
|
||||
from rest_framework.response import Response
|
||||
|
||||
from funkwhale_api import __version__ as funkwhale_version
|
||||
|
@ -58,9 +59,11 @@ class InstanceSettings(generics.GenericAPIView):
|
|||
|
||||
|
||||
@method_decorator(ensure_csrf_cookie, name="dispatch")
|
||||
class NodeInfo(views.APIView):
|
||||
class NodeInfo20(views.APIView):
|
||||
permission_classes = []
|
||||
authentication_classes = []
|
||||
serializer_class = serializers.NodeInfo20Serializer
|
||||
renderer_classes = (JSONRenderer,)
|
||||
|
||||
@extend_schema(
|
||||
responses=serializers.NodeInfo20Serializer, operation_id="getNodeInfo20"
|
||||
|
@ -81,6 +84,7 @@ class NodeInfo(views.APIView):
|
|||
|
||||
data = {
|
||||
"software": {"version": funkwhale_version},
|
||||
"services": {"inbound": ["atom1.0"], "outbound": ["atom1.0"]},
|
||||
"preferences": pref,
|
||||
"stats": cache_memoize(600, prefix="memoize:instance:stats")(stats.get)()
|
||||
if pref["instance__nodeinfo_stats_enabled"]
|
||||
|
@ -112,7 +116,62 @@ class NodeInfo(views.APIView):
|
|||
data["endpoints"]["channels"] = reverse(
|
||||
"federation:index:index-channels"
|
||||
)
|
||||
serializer = serializers.NodeInfo20Serializer(data)
|
||||
serializer = self.serializer_class(data)
|
||||
return Response(
|
||||
serializer.data, status=200, content_type=NODEINFO_2_CONTENT_TYPE
|
||||
)
|
||||
|
||||
|
||||
class NodeInfo21(NodeInfo20):
|
||||
serializer_class = serializers.NodeInfo21Serializer
|
||||
|
||||
@extend_schema(
|
||||
responses=serializers.NodeInfo20Serializer, operation_id="getNodeInfo20"
|
||||
)
|
||||
def get(self, request):
|
||||
pref = preferences.all()
|
||||
if (
|
||||
pref["moderation__allow_list_public"]
|
||||
and pref["moderation__allow_list_enabled"]
|
||||
):
|
||||
allowed_domains = list(
|
||||
Domain.objects.filter(allowed=True)
|
||||
.order_by("name")
|
||||
.values_list("name", flat=True)
|
||||
)
|
||||
else:
|
||||
allowed_domains = None
|
||||
|
||||
data = {
|
||||
"software": {"version": funkwhale_version},
|
||||
"services": {"inbound": ["atom1.0"], "outbound": ["atom1.0"]},
|
||||
"preferences": pref,
|
||||
"stats": cache_memoize(600, prefix="memoize:instance:stats")(stats.get)()
|
||||
if pref["instance__nodeinfo_stats_enabled"]
|
||||
else None,
|
||||
"actorId": get_service_actor().fid,
|
||||
"supportedUploadExtensions": SUPPORTED_EXTENSIONS,
|
||||
"allowed_domains": allowed_domains,
|
||||
"languages": pref.get("moderation__languages"),
|
||||
"location": pref.get("instance__location"),
|
||||
"content": cache_memoize(600, prefix="memoize:instance:content")(
|
||||
stats.get_content
|
||||
)()
|
||||
if pref["instance__nodeinfo_stats_enabled"]
|
||||
else None,
|
||||
"features": [
|
||||
"channels",
|
||||
"podcasts",
|
||||
],
|
||||
}
|
||||
|
||||
if not pref.get("common__api_authentication_required"):
|
||||
data["features"].append("anonymousCanListen")
|
||||
|
||||
if pref.get("federation__enabled"):
|
||||
data["features"].append("federation")
|
||||
|
||||
serializer = self.serializer_class(data)
|
||||
return Response(
|
||||
serializer.data, status=200, content_type=NODEINFO_2_CONTENT_TYPE
|
||||
)
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
import pycountry
|
||||
from dynamic_preferences import types
|
||||
from dynamic_preferences.registries import global_preferences_registry
|
||||
from rest_framework import serializers
|
||||
|
@ -92,3 +93,18 @@ class SignupFormCustomization(common_preferences.SerializedPreference):
|
|||
required = False
|
||||
default = {}
|
||||
data_serializer_class = CustomFormSerializer
|
||||
|
||||
|
||||
@global_preferences_registry.register
|
||||
class Languages(common_preferences.StringListPreference):
|
||||
show_in_api = True
|
||||
section = moderation
|
||||
name = "languages"
|
||||
default = ["en"]
|
||||
verbose_name = "Moderation languages"
|
||||
help_text = (
|
||||
"The language(s) spoken by the server moderator(s). Set this to inform users "
|
||||
"what languages they should write reports and requests in."
|
||||
)
|
||||
choices = [(lang.alpha_3, lang.name) for lang in pycountry.languages]
|
||||
field_kwargs = {"choices": choices, "required": False}
|
||||
|
|
|
@ -32,3 +32,18 @@ class MusicCacheDuration(types.IntPreference):
|
|||
"will be erased and retranscoded on the next listening."
|
||||
)
|
||||
field_kwargs = {"required": False}
|
||||
|
||||
|
||||
@global_preferences_registry.register
|
||||
class MbidTaggedContent(types.BooleanPreference):
|
||||
show_in_api = True
|
||||
section = music
|
||||
name = "only_allow_musicbrainz_tagged_files"
|
||||
verbose_name = "Only allow Musicbrainz tagged files"
|
||||
help_text = (
|
||||
"Requires uploaded files to be tagged with a MusicBrainz ID. "
|
||||
"Enabling this setting has no impact on previously uploaded files. "
|
||||
"You can use the CLI to clear files that don't contain an MBID or "
|
||||
"or enable quality filtering to hide untagged content from API calls. "
|
||||
)
|
||||
default = False
|
||||
|
|
|
@ -151,8 +151,9 @@ class TrackFactory(
|
|||
if created:
|
||||
self.save()
|
||||
|
||||
@factory.post_generation
|
||||
def license(self, created, extracted, **kwargs):
|
||||
# The @factory.post_generation is not used because we must
|
||||
# not redefine the builtin `license` function.
|
||||
def _license_post_generation(self, created, extracted, **kwargs):
|
||||
if not created:
|
||||
return
|
||||
|
||||
|
@ -160,6 +161,8 @@ class TrackFactory(
|
|||
self.license = LicenseFactory(code=extracted)
|
||||
self.save()
|
||||
|
||||
license = factory.PostGeneration(_license_post_generation)
|
||||
|
||||
|
||||
@registry.register
|
||||
class UploadFactory(NoUpdateOnCreate, factory.django.DjangoModelFactory):
|
||||
|
|
|
@ -15,7 +15,10 @@ def create_data(count=25):
|
|||
)
|
||||
for album in albums:
|
||||
factories.UploadFactory.create_batch(
|
||||
track__album=album, size=random.randint(3, 18)
|
||||
track__album=album,
|
||||
size=random.randint(3, 18),
|
||||
playable=True,
|
||||
in_place=True,
|
||||
)
|
||||
|
||||
|
||||
|
|
|
@ -104,7 +104,7 @@ class ArtistFilter(
|
|||
distinct=True,
|
||||
library_field="tracks__uploads__library",
|
||||
)
|
||||
ordering = django_filters.OrderingFilter(
|
||||
ordering = common_filters.CaseInsensitiveNameOrderingFilter(
|
||||
fields=(
|
||||
("id", "id"),
|
||||
("name", "name"),
|
||||
|
|
|
@ -28,7 +28,7 @@ def load(data):
|
|||
|
||||
for row in data:
|
||||
try:
|
||||
license = existing_by_code[row["code"]]
|
||||
license_ = existing_by_code[row["code"]]
|
||||
except KeyError:
|
||||
logger.debug("Loading new license: {}".format(row["code"]))
|
||||
to_create.append(
|
||||
|
@ -36,15 +36,15 @@ def load(data):
|
|||
)
|
||||
else:
|
||||
logger.debug("Updating license: {}".format(row["code"]))
|
||||
stored = [getattr(license, f) for f in MODEL_FIELDS]
|
||||
stored = [getattr(license_, f) for f in MODEL_FIELDS]
|
||||
wanted = [row[f] for f in MODEL_FIELDS]
|
||||
if wanted == stored:
|
||||
continue
|
||||
# the object in database needs an update
|
||||
for f in MODEL_FIELDS:
|
||||
setattr(license, f, row[f])
|
||||
setattr(license_, f, row[f])
|
||||
|
||||
license.save()
|
||||
license_.save()
|
||||
|
||||
models.License.objects.bulk_create(to_create)
|
||||
return sorted(models.License.objects.all(), key=lambda o: o.code)
|
||||
|
@ -78,12 +78,12 @@ def match(*values):
|
|||
else:
|
||||
existing = load(LICENSES)
|
||||
_cache = existing
|
||||
for license in existing:
|
||||
if license.conf is None:
|
||||
for license_ in existing:
|
||||
if license_.conf is None:
|
||||
continue
|
||||
for i in license.conf["identifiers"]:
|
||||
for i in license_.conf["identifiers"]:
|
||||
if match_urls(url, i):
|
||||
return license
|
||||
return license_
|
||||
|
||||
|
||||
def match_urls(*urls):
|
||||
|
|
|
@ -0,0 +1,13 @@
|
|||
from django.core.management.base import BaseCommand
|
||||
|
||||
from funkwhale_api.typesense import tasks
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = """
|
||||
Trigger the generation of a new typesense index for canonical Funkwhale tracks metadata.
|
||||
This is use to resolve Funkwhale tracks to MusicBrainz ids"""
|
||||
|
||||
def handle(self, *args, **kwargs):
|
||||
tasks.build_canonical_index.delay()
|
||||
self.stdout.write("Tasks launched in celery worker.")
|
|
@ -247,6 +247,13 @@ def process_upload(upload, update_denormalization=True):
|
|||
return fail_import(
|
||||
upload, "invalid_metadata", detail=detail, file_metadata=metadata_dump
|
||||
)
|
||||
check_mbid = preferences.get("music__only_allow_musicbrainz_tagged_files")
|
||||
if check_mbid and not serializer.validated_data.get("mbid"):
|
||||
return fail_import(
|
||||
upload,
|
||||
"Only content tagged with a MusicBrainz ID is permitted on this pod.",
|
||||
detail="You can tag your files with MusicBrainz Picard",
|
||||
)
|
||||
|
||||
final_metadata = collections.ChainMap(
|
||||
additional_data, serializer.validated_data, internal_config
|
||||
|
|
|
@ -583,7 +583,7 @@ def handle_serve(
|
|||
try:
|
||||
f.download_audio_from_remote(actor=actor)
|
||||
except requests.exceptions.RequestException:
|
||||
return Response({"detail": "Remove track is unavailable"}, status=503)
|
||||
return Response({"detail": "Remote track is unavailable"}, status=503)
|
||||
data = f.get_audio_data()
|
||||
if data:
|
||||
f.duration = data["duration"]
|
||||
|
|
|
@ -0,0 +1,148 @@
|
|||
import logging
|
||||
import time
|
||||
|
||||
import troi
|
||||
import troi.core
|
||||
from django.core.cache import cache
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.db.models import Q
|
||||
from requests.exceptions import ConnectTimeout
|
||||
|
||||
from funkwhale_api.music import models as music_models
|
||||
from funkwhale_api.typesense import utils
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
patches = troi.utils.discover_patches()
|
||||
|
||||
SUPPORTED_PATCHES = patches.keys()
|
||||
|
||||
|
||||
def run(config, **kwargs):
|
||||
"""Validate the received config and run the queryset generation"""
|
||||
candidates = kwargs.pop("candidates", music_models.Track.objects.all())
|
||||
validate(config)
|
||||
return TroiPatch().get_queryset(config, candidates)
|
||||
|
||||
|
||||
def validate(config):
|
||||
patch = config.get("patch")
|
||||
if patch not in SUPPORTED_PATCHES:
|
||||
raise ValidationError(
|
||||
'Invalid patch "{}". Supported patches: {}'.format(
|
||||
config["patch"], SUPPORTED_PATCHES
|
||||
)
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def build_radio_queryset(patch, config, radio_qs):
|
||||
"""Take a troi patch and its arg, match the missing mbid and then build a radio queryset"""
|
||||
|
||||
logger.info("Config used for troi radio generation is " + str(config))
|
||||
|
||||
start_time = time.time()
|
||||
try:
|
||||
recommendations = troi.core.generate_playlist(patch, config)
|
||||
except ConnectTimeout:
|
||||
raise ValueError(
|
||||
"Timed out while connecting to ListenBrainz. No candidates could be retrieved for the radio."
|
||||
)
|
||||
end_time_rec = time.time()
|
||||
logger.info("Troi fetch took :" + str(end_time_rec - start_time))
|
||||
|
||||
if not recommendations:
|
||||
raise ValueError("No candidates found by troi")
|
||||
|
||||
recommended_mbids = [
|
||||
recommended_recording.mbid
|
||||
for recommended_recording in recommendations.playlists[0].recordings
|
||||
]
|
||||
|
||||
logger.info("Searching for MusicBrainz ID in Funkwhale database")
|
||||
|
||||
qs_recommended = (
|
||||
music_models.Track.objects.all()
|
||||
.filter(mbid__in=recommended_mbids)
|
||||
.order_by("mbid", "pk")
|
||||
.distinct("mbid")
|
||||
)
|
||||
qs_recommended_mbid = [str(i.mbid) for i in qs_recommended]
|
||||
|
||||
recommended_mbids_not_qs = [
|
||||
mbid for mbid in recommended_mbids if mbid not in qs_recommended_mbid
|
||||
]
|
||||
cached_match = cache.get_many(recommended_mbids_not_qs)
|
||||
cached_match_mbid = [str(i) for i in cached_match.keys()]
|
||||
|
||||
if qs_recommended and cached_match_mbid:
|
||||
logger.info("MusicBrainz IDs found in Funkwhale database and redis")
|
||||
qs_recommended_mbid.extend(cached_match_mbid)
|
||||
mbids_found = qs_recommended_mbid
|
||||
elif qs_recommended and not cached_match_mbid:
|
||||
logger.info("MusicBrainz IDs found in Funkwhale database")
|
||||
mbids_found = qs_recommended_mbid
|
||||
elif not qs_recommended and cached_match_mbid:
|
||||
logger.info("MusicBrainz IDs found in redis cache")
|
||||
mbids_found = cached_match_mbid
|
||||
else:
|
||||
logger.info(
|
||||
"Couldn't find any matches in Funkwhale database. Trying to match all"
|
||||
)
|
||||
mbids_found = []
|
||||
|
||||
recommended_recordings_not_found = [
|
||||
i for i in recommendations.playlists[0].recordings if i.mbid not in mbids_found
|
||||
]
|
||||
|
||||
logger.info("Matching missing MusicBrainz ID to Funkwhale track")
|
||||
|
||||
start_time_resolv = time.time()
|
||||
utils.resolve_recordings_to_fw_track(recommended_recordings_not_found)
|
||||
end_time_resolv = time.time()
|
||||
|
||||
logger.info(
|
||||
"Resolving "
|
||||
+ str(len(recommended_recordings_not_found))
|
||||
+ " tracks in "
|
||||
+ str(end_time_resolv - start_time_resolv)
|
||||
)
|
||||
|
||||
cached_match = cache.get_many(recommended_mbids)
|
||||
|
||||
if not mbids_found and not cached_match:
|
||||
raise ValueError("No candidates found for troi radio")
|
||||
|
||||
mbids_found_pks = list(
|
||||
music_models.Track.objects.all()
|
||||
.filter(mbid__in=mbids_found)
|
||||
.order_by("mbid", "pk")
|
||||
.distinct("mbid")
|
||||
.values_list("pk", flat=True)
|
||||
)
|
||||
|
||||
mbids_found_pks_unique = [
|
||||
i for i in mbids_found_pks if i not in cached_match.keys()
|
||||
]
|
||||
|
||||
if mbids_found and cached_match:
|
||||
return radio_qs.filter(
|
||||
Q(pk__in=mbids_found_pks_unique) | Q(pk__in=cached_match.values())
|
||||
)
|
||||
if mbids_found and not cached_match:
|
||||
return radio_qs.filter(pk__in=mbids_found_pks_unique)
|
||||
|
||||
if not mbids_found and cached_match:
|
||||
return radio_qs.filter(pk__in=cached_match.values())
|
||||
|
||||
|
||||
class TroiPatch:
|
||||
code = "troi-patch"
|
||||
label = "Troi Patch"
|
||||
|
||||
def get_queryset(self, config, qs):
|
||||
patch_string = config.pop("patch")
|
||||
patch = patches[patch_string]
|
||||
return build_radio_queryset(patch(), config, qs)
|
|
@ -54,10 +54,6 @@ class RadioSession(models.Model):
|
|||
CONFIG_VERSION = 0
|
||||
config = JSONField(encoder=DjangoJSONEncoder, blank=True, null=True)
|
||||
|
||||
def save(self, **kwargs):
|
||||
self.radio.clean(self)
|
||||
super().save(**kwargs)
|
||||
|
||||
@property
|
||||
def next_position(self):
|
||||
next_position = 1
|
||||
|
@ -68,16 +64,24 @@ class RadioSession(models.Model):
|
|||
|
||||
return next_position
|
||||
|
||||
def add(self, track):
|
||||
new_session_track = RadioSessionTrack.objects.create(
|
||||
track=track, session=self, position=self.next_position
|
||||
)
|
||||
def add(self, tracks):
|
||||
next_position = self.next_position
|
||||
radio_session_tracks = []
|
||||
for i, track in enumerate(tracks):
|
||||
radio_session_track = RadioSessionTrack(
|
||||
track=track, session=self, position=next_position + i
|
||||
)
|
||||
radio_session_tracks.append(radio_session_track)
|
||||
|
||||
return new_session_track
|
||||
new_session_tracks = RadioSessionTrack.objects.bulk_create(radio_session_tracks)
|
||||
|
||||
@property
|
||||
def radio(self):
|
||||
from .registries import registry
|
||||
return new_session_tracks
|
||||
|
||||
def radio(self, api_version):
|
||||
if api_version == 2:
|
||||
from .registries_v2 import registry
|
||||
else:
|
||||
from .registries import registry
|
||||
|
||||
return registry[self.radio_type](session=self)
|
||||
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import datetime
|
||||
import json
|
||||
import logging
|
||||
import random
|
||||
from typing import List, Optional, Tuple
|
||||
|
@ -14,7 +15,7 @@ from funkwhale_api.moderation import filters as moderation_filters
|
|||
from funkwhale_api.music.models import Artist, Library, Track, Upload
|
||||
from funkwhale_api.tags.models import Tag
|
||||
|
||||
from . import filters, models
|
||||
from . import filters, lb_recommendations, models
|
||||
from .registries import registry
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
@ -61,11 +62,19 @@ class SessionRadio(SimpleRadio):
|
|||
return self.session
|
||||
|
||||
def get_queryset(self, **kwargs):
|
||||
qs = Track.objects.all()
|
||||
if not self.session:
|
||||
return qs
|
||||
if not self.session.user:
|
||||
return qs
|
||||
if not self.session or not self.session.user:
|
||||
return (
|
||||
Track.objects.all()
|
||||
.with_playable_uploads(actor=None)
|
||||
.select_related("artist", "album__artist", "attributed_to")
|
||||
)
|
||||
else:
|
||||
qs = (
|
||||
Track.objects.all()
|
||||
.with_playable_uploads(self.session.user.actor)
|
||||
.select_related("artist", "album__artist", "attributed_to")
|
||||
)
|
||||
|
||||
query = moderation_filters.get_filtered_content_query(
|
||||
config=moderation_filters.USER_FILTER_CONFIG["TRACK"],
|
||||
user=self.session.user,
|
||||
|
@ -75,6 +84,16 @@ class SessionRadio(SimpleRadio):
|
|||
def get_queryset_kwargs(self):
|
||||
return {}
|
||||
|
||||
def filter_queryset(self, queryset):
|
||||
return queryset
|
||||
|
||||
def filter_from_session(self, queryset):
|
||||
already_played = self.session.session_tracks.all().values_list(
|
||||
"track", flat=True
|
||||
)
|
||||
queryset = queryset.exclude(pk__in=already_played)
|
||||
return queryset
|
||||
|
||||
def get_choices(self, **kwargs):
|
||||
kwargs.update(self.get_queryset_kwargs())
|
||||
queryset = self.get_queryset(**kwargs)
|
||||
|
@ -87,16 +106,6 @@ class SessionRadio(SimpleRadio):
|
|||
queryset = self.filter_queryset(queryset)
|
||||
return queryset
|
||||
|
||||
def filter_queryset(self, queryset):
|
||||
return queryset
|
||||
|
||||
def filter_from_session(self, queryset):
|
||||
already_played = self.session.session_tracks.all().values_list(
|
||||
"track", flat=True
|
||||
)
|
||||
queryset = queryset.exclude(pk__in=already_played)
|
||||
return queryset
|
||||
|
||||
def pick(self, **kwargs):
|
||||
return self.pick_many(quantity=1, **kwargs)[0]
|
||||
|
||||
|
@ -104,8 +113,7 @@ class SessionRadio(SimpleRadio):
|
|||
choices = self.get_choices(**kwargs)
|
||||
picked_choices = super().pick_many(choices=choices, quantity=quantity)
|
||||
if self.session:
|
||||
for choice in picked_choices:
|
||||
self.session.add(choice)
|
||||
self.session.add(picked_choices)
|
||||
return picked_choices
|
||||
|
||||
def validate_session(self, data, **context):
|
||||
|
@ -405,3 +413,58 @@ class RecentlyAdded(SessionRadio):
|
|||
Q(artist__content_category="music"),
|
||||
Q(creation_date__gt=date),
|
||||
)
|
||||
|
||||
|
||||
# Use this to experiment on the custom multiple radio with troi
|
||||
@registry.register(name="troi")
|
||||
class Troi(SessionRadio):
|
||||
"""
|
||||
Receive a vuejs generated config and use it to launch a troi radio session.
|
||||
The config data should follow :
|
||||
{"patch": "troi_patch_name", "troi_arg1":"troi_arg_1", "troi_arg2": ...}
|
||||
Validation of the config (args) is done by troi during track fetch.
|
||||
Funkwhale only checks if the patch is implemented
|
||||
"""
|
||||
|
||||
config = serializers.JSONField(required=True)
|
||||
|
||||
def append_lb_config(self, data):
|
||||
if self.session.user.settings is None:
|
||||
logger.warning(
|
||||
"No lb_user_name set in user settings. Some troi patches will fail"
|
||||
)
|
||||
return data
|
||||
elif self.session.user.settings.get("lb_user_name") is None:
|
||||
logger.warning(
|
||||
"No lb_user_name set in user settings. Some troi patches will fail"
|
||||
)
|
||||
else:
|
||||
data["user_name"] = self.session.user.settings["lb_user_name"]
|
||||
|
||||
if self.session.user.settings.get("lb_user_token") is None:
|
||||
logger.warning(
|
||||
"No lb_user_token set in user settings. Some troi patch will fail"
|
||||
)
|
||||
else:
|
||||
data["user_token"] = self.session.user.settings["lb_user_token"]
|
||||
|
||||
return data
|
||||
|
||||
def get_queryset_kwargs(self):
|
||||
kwargs = super().get_queryset_kwargs()
|
||||
kwargs["config"] = self.session.config
|
||||
return kwargs
|
||||
|
||||
def validate_session(self, data, **context):
|
||||
data = super().validate_session(data, **context)
|
||||
if data.get("config") is None:
|
||||
raise serializers.ValidationError(
|
||||
"You must provide a configuration for this radio"
|
||||
)
|
||||
return data
|
||||
|
||||
def get_queryset(self, **kwargs):
|
||||
qs = super().get_queryset(**kwargs)
|
||||
config = self.append_lb_config(json.loads(kwargs["config"]))
|
||||
|
||||
return lb_recommendations.run(config, candidates=qs)
|
||||
|
|
|
@ -0,0 +1,510 @@
|
|||
import datetime
|
||||
import json
|
||||
import logging
|
||||
import pickle
|
||||
import random
|
||||
from typing import List, Optional, Tuple
|
||||
|
||||
from django.core.cache import cache
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.db import connection
|
||||
from django.db.models import Q
|
||||
from rest_framework import serializers
|
||||
|
||||
from funkwhale_api.federation import fields as federation_fields
|
||||
from funkwhale_api.federation import models as federation_models
|
||||
from funkwhale_api.moderation import filters as moderation_filters
|
||||
from funkwhale_api.music.models import Artist, Library, Track, Upload
|
||||
from funkwhale_api.tags.models import Tag
|
||||
|
||||
from . import filters, lb_recommendations, models
|
||||
from .registries_v2 import registry
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SimpleRadio:
|
||||
related_object_field = None
|
||||
|
||||
def clean(self, instance):
|
||||
return
|
||||
|
||||
def weighted_pick(
|
||||
self,
|
||||
choices: List[Tuple[int, int]],
|
||||
previous_choices: Optional[List[int]] = None,
|
||||
) -> int:
|
||||
total = sum(weight for c, weight in choices)
|
||||
r = random.uniform(0, total)
|
||||
upto = 0
|
||||
for choice, weight in choices:
|
||||
if upto + weight >= r:
|
||||
return choice
|
||||
upto += weight
|
||||
|
||||
|
||||
class SessionRadio(SimpleRadio):
|
||||
def __init__(self, session=None):
|
||||
self.session = session
|
||||
|
||||
def start_session(self, user, **kwargs):
|
||||
self.session = models.RadioSession.objects.create(
|
||||
user=user, radio_type=self.radio_type, **kwargs
|
||||
)
|
||||
return self.session
|
||||
|
||||
def get_queryset(self, **kwargs):
|
||||
actor = None
|
||||
try:
|
||||
actor = self.session.user.actor
|
||||
except KeyError:
|
||||
pass # Maybe logging would be helpful
|
||||
|
||||
qs = (
|
||||
Track.objects.all()
|
||||
.with_playable_uploads(actor=actor)
|
||||
.select_related("artist", "album__artist", "attributed_to")
|
||||
)
|
||||
|
||||
query = moderation_filters.get_filtered_content_query(
|
||||
config=moderation_filters.USER_FILTER_CONFIG["TRACK"],
|
||||
user=self.session.user,
|
||||
)
|
||||
return qs.exclude(query)
|
||||
|
||||
def get_queryset_kwargs(self):
|
||||
return {}
|
||||
|
||||
def filter_queryset(self, queryset):
|
||||
return queryset
|
||||
|
||||
def filter_from_session(self, queryset):
|
||||
already_played = self.session.session_tracks.all().values_list(
|
||||
"track", flat=True
|
||||
)
|
||||
queryset = queryset.exclude(pk__in=already_played)
|
||||
return queryset
|
||||
|
||||
def cache_batch_radio_track(self, **kwargs):
|
||||
BATCH_SIZE = 100
|
||||
# get cached RadioTracks if any
|
||||
try:
|
||||
cached_evaluated_radio_tracks = pickle.loads(
|
||||
cache.get(f"radiotracks{self.session.id}")
|
||||
)
|
||||
except TypeError:
|
||||
cached_evaluated_radio_tracks = None
|
||||
|
||||
# get the queryset and apply filters
|
||||
kwargs.update(self.get_queryset_kwargs())
|
||||
queryset = self.get_queryset(**kwargs)
|
||||
queryset = self.filter_from_session(queryset)
|
||||
|
||||
if kwargs["filter_playable"] is True:
|
||||
queryset = queryset.playable_by(
|
||||
self.session.user.actor if self.session.user else None
|
||||
)
|
||||
queryset = self.filter_queryset(queryset)
|
||||
|
||||
# select a random batch of the qs
|
||||
sliced_queryset = queryset.order_by("?")[:BATCH_SIZE]
|
||||
if len(sliced_queryset) <= 0 and not cached_evaluated_radio_tracks:
|
||||
raise ValueError("No more radio candidates")
|
||||
|
||||
# create the radio session tracks into db in bulk
|
||||
self.session.add(sliced_queryset)
|
||||
|
||||
# evaluate the queryset to save it in cache
|
||||
radio_tracks = list(sliced_queryset)
|
||||
|
||||
if cached_evaluated_radio_tracks is not None:
|
||||
radio_tracks.extend(cached_evaluated_radio_tracks)
|
||||
logger.info(
|
||||
f"Setting redis cache for radio generation with radio id {self.session.id}"
|
||||
)
|
||||
cache.set(f"radiotracks{self.session.id}", pickle.dumps(radio_tracks), 3600)
|
||||
cache.set(f"radioqueryset{self.session.id}", sliced_queryset, 3600)
|
||||
|
||||
return sliced_queryset
|
||||
|
||||
def get_choices(self, quantity, **kwargs):
|
||||
if cache.get(f"radiotracks{self.session.id}"):
|
||||
cached_radio_tracks = pickle.loads(
|
||||
cache.get(f"radiotracks{self.session.id}")
|
||||
)
|
||||
logger.info("Using redis cache for radio generation")
|
||||
radio_tracks = cached_radio_tracks
|
||||
if len(radio_tracks) < quantity:
|
||||
logger.info(
|
||||
"Not enough radio tracks in cache. Trying to generate new cache"
|
||||
)
|
||||
sliced_queryset = self.cache_batch_radio_track(**kwargs)
|
||||
sliced_queryset = cache.get(f"radioqueryset{self.session.id}")
|
||||
else:
|
||||
sliced_queryset = self.cache_batch_radio_track(**kwargs)
|
||||
|
||||
return sliced_queryset[:quantity]
|
||||
|
||||
def pick_many(self, quantity, **kwargs):
|
||||
if self.session:
|
||||
sliced_queryset = self.get_choices(quantity=quantity, **kwargs)
|
||||
else:
|
||||
logger.info(
|
||||
"No radio session. Can't track user playback. Won't cache queryset results"
|
||||
)
|
||||
sliced_queryset = self.get_choices(quantity=quantity, **kwargs)
|
||||
|
||||
return sliced_queryset
|
||||
|
||||
def validate_session(self, data, **context):
|
||||
return data
|
||||
|
||||
|
||||
@registry.register(name="random")
|
||||
class RandomRadio(SessionRadio):
|
||||
def get_queryset(self, **kwargs):
|
||||
qs = super().get_queryset(**kwargs)
|
||||
return qs.filter(artist__content_category="music").order_by("?")
|
||||
|
||||
|
||||
@registry.register(name="random_library")
|
||||
class RandomLibraryRadio(SessionRadio):
|
||||
def get_queryset(self, **kwargs):
|
||||
qs = super().get_queryset(**kwargs)
|
||||
tracks_ids = self.session.user.actor.attributed_tracks.all().values_list(
|
||||
"id", flat=True
|
||||
)
|
||||
query = Q(artist__content_category="music") & Q(pk__in=tracks_ids)
|
||||
return qs.filter(query).order_by("?")
|
||||
|
||||
|
||||
@registry.register(name="favorites")
|
||||
class FavoritesRadio(SessionRadio):
|
||||
def get_queryset_kwargs(self):
|
||||
kwargs = super().get_queryset_kwargs()
|
||||
if self.session:
|
||||
kwargs["user"] = self.session.user
|
||||
return kwargs
|
||||
|
||||
def get_queryset(self, **kwargs):
|
||||
qs = super().get_queryset(**kwargs)
|
||||
track_ids = kwargs["user"].track_favorites.all().values_list("track", flat=True)
|
||||
return qs.filter(pk__in=track_ids, artist__content_category="music")
|
||||
|
||||
|
||||
@registry.register(name="custom")
|
||||
class CustomRadio(SessionRadio):
|
||||
def get_queryset_kwargs(self):
|
||||
kwargs = super().get_queryset_kwargs()
|
||||
kwargs["user"] = self.session.user
|
||||
kwargs["custom_radio"] = self.session.custom_radio
|
||||
return kwargs
|
||||
|
||||
def get_queryset(self, **kwargs):
|
||||
qs = super().get_queryset(**kwargs)
|
||||
return filters.run(kwargs["custom_radio"].config, candidates=qs)
|
||||
|
||||
def validate_session(self, data, **context):
|
||||
data = super().validate_session(data, **context)
|
||||
try:
|
||||
user = data["user"]
|
||||
except KeyError:
|
||||
user = context.get("user")
|
||||
try:
|
||||
assert data["custom_radio"].user == user or data["custom_radio"].is_public
|
||||
except KeyError:
|
||||
raise serializers.ValidationError("You must provide a custom radio")
|
||||
except AssertionError:
|
||||
raise serializers.ValidationError("You don't have access to this radio")
|
||||
return data
|
||||
|
||||
|
||||
@registry.register(name="custom_multiple")
|
||||
class CustomMultiple(SessionRadio):
|
||||
"""
|
||||
Receive a vuejs generated config and use it to launch a radio session
|
||||
"""
|
||||
|
||||
config = serializers.JSONField(required=True)
|
||||
|
||||
def get_config(self, data):
|
||||
return data["config"]
|
||||
|
||||
def get_queryset_kwargs(self):
|
||||
kwargs = super().get_queryset_kwargs()
|
||||
kwargs["config"] = self.session.config
|
||||
return kwargs
|
||||
|
||||
def validate_session(self, data, **context):
|
||||
data = super().validate_session(data, **context)
|
||||
try:
|
||||
data["config"] is not None
|
||||
except KeyError:
|
||||
raise serializers.ValidationError(
|
||||
"You must provide a configuration for this radio"
|
||||
)
|
||||
return data
|
||||
|
||||
def get_queryset(self, **kwargs):
|
||||
qs = super().get_queryset(**kwargs)
|
||||
return filters.run([kwargs["config"]], candidates=qs)
|
||||
|
||||
|
||||
class RelatedObjectRadio(SessionRadio):
|
||||
"""Abstract radio related to an object (tag, artist, user...)"""
|
||||
|
||||
related_object_field = serializers.IntegerField(required=True)
|
||||
|
||||
def clean(self, instance):
|
||||
super().clean(instance)
|
||||
if not instance.related_object:
|
||||
raise ValidationError(
|
||||
"Cannot start RelatedObjectRadio without related object"
|
||||
)
|
||||
if not isinstance(instance.related_object, self.model):
|
||||
raise ValidationError("Trying to start radio with bad related object")
|
||||
|
||||
def get_related_object(self, pk):
|
||||
return self.model.objects.get(pk=pk)
|
||||
|
||||
|
||||
@registry.register(name="tag")
|
||||
class TagRadio(RelatedObjectRadio):
|
||||
model = Tag
|
||||
related_object_field = serializers.CharField(required=True)
|
||||
|
||||
def get_related_object(self, name):
|
||||
return self.model.objects.get(name=name)
|
||||
|
||||
def get_queryset(self, **kwargs):
|
||||
qs = super().get_queryset(**kwargs)
|
||||
query = (
|
||||
Q(tagged_items__tag=self.session.related_object)
|
||||
| Q(artist__tagged_items__tag=self.session.related_object)
|
||||
| Q(album__tagged_items__tag=self.session.related_object)
|
||||
)
|
||||
return qs.filter(query)
|
||||
|
||||
def get_related_object_id_repr(self, obj):
|
||||
return obj.name
|
||||
|
||||
|
||||
def weighted_choice(choices):
|
||||
total = sum(w for c, w in choices)
|
||||
r = random.uniform(0, total)
|
||||
upto = 0
|
||||
for c, w in choices:
|
||||
if upto + w >= r:
|
||||
return c
|
||||
upto += w
|
||||
assert False, "Shouldn't get here"
|
||||
|
||||
|
||||
class NextNotFound(Exception):
|
||||
pass
|
||||
|
||||
|
||||
@registry.register(name="similar")
|
||||
class SimilarRadio(RelatedObjectRadio):
|
||||
model = Track
|
||||
|
||||
def filter_queryset(self, queryset):
|
||||
queryset = super().filter_queryset(queryset)
|
||||
seeds = list(
|
||||
self.session.session_tracks.all()
|
||||
.values_list("track_id", flat=True)
|
||||
.order_by("-id")[:3]
|
||||
) + [self.session.related_object.pk]
|
||||
for seed in seeds:
|
||||
try:
|
||||
return queryset.filter(pk=self.find_next_id(queryset, seed))
|
||||
except NextNotFound:
|
||||
continue
|
||||
|
||||
return queryset.none()
|
||||
|
||||
def find_next_id(self, queryset, seed):
|
||||
with connection.cursor() as cursor:
|
||||
query = """
|
||||
SELECT next, count(next) AS c
|
||||
FROM (
|
||||
SELECT
|
||||
track_id,
|
||||
creation_date,
|
||||
LEAD(track_id) OVER (
|
||||
PARTITION by user_id order by creation_date asc
|
||||
) AS next
|
||||
FROM history_listening
|
||||
INNER JOIN users_user ON (users_user.id = user_id)
|
||||
WHERE users_user.privacy_level = 'instance' OR users_user.privacy_level = 'everyone' OR user_id = %s
|
||||
ORDER BY creation_date ASC
|
||||
) t WHERE track_id = %s AND next != %s GROUP BY next ORDER BY c DESC;
|
||||
"""
|
||||
cursor.execute(query, [self.session.user_id, seed, seed])
|
||||
next_candidates = list(cursor.fetchall())
|
||||
|
||||
if not next_candidates:
|
||||
raise NextNotFound()
|
||||
|
||||
matching_tracks = list(
|
||||
queryset.filter(pk__in=[c[0] for c in next_candidates]).values_list(
|
||||
"id", flat=True
|
||||
)
|
||||
)
|
||||
next_candidates = [n for n in next_candidates if n[0] in matching_tracks]
|
||||
if not next_candidates:
|
||||
raise NextNotFound()
|
||||
return random.choice([c[0] for c in next_candidates])
|
||||
|
||||
|
||||
@registry.register(name="artist")
|
||||
class ArtistRadio(RelatedObjectRadio):
|
||||
model = Artist
|
||||
|
||||
def get_queryset(self, **kwargs):
|
||||
qs = super().get_queryset(**kwargs)
|
||||
return qs.filter(artist=self.session.related_object)
|
||||
|
||||
|
||||
@registry.register(name="less-listened")
|
||||
class LessListenedRadio(SessionRadio):
|
||||
def clean(self, instance):
|
||||
instance.related_object = instance.user
|
||||
super().clean(instance)
|
||||
|
||||
def get_queryset(self, **kwargs):
|
||||
qs = super().get_queryset(**kwargs)
|
||||
listened = self.session.user.listenings.all().values_list("track", flat=True)
|
||||
return (
|
||||
qs.filter(artist__content_category="music")
|
||||
.exclude(pk__in=listened)
|
||||
.order_by("?")
|
||||
)
|
||||
|
||||
|
||||
@registry.register(name="less-listened_library")
|
||||
class LessListenedLibraryRadio(SessionRadio):
|
||||
def clean(self, instance):
|
||||
instance.related_object = instance.user
|
||||
super().clean(instance)
|
||||
|
||||
def get_queryset(self, **kwargs):
|
||||
qs = super().get_queryset(**kwargs)
|
||||
listened = self.session.user.listenings.all().values_list("track", flat=True)
|
||||
tracks_ids = self.session.user.actor.attributed_tracks.all().values_list(
|
||||
"id", flat=True
|
||||
)
|
||||
query = Q(artist__content_category="music") & Q(pk__in=tracks_ids)
|
||||
return qs.filter(query).exclude(pk__in=listened).order_by("?")
|
||||
|
||||
|
||||
@registry.register(name="actor-content")
|
||||
class ActorContentRadio(RelatedObjectRadio):
|
||||
"""
|
||||
Play content from given actor libraries
|
||||
"""
|
||||
|
||||
model = federation_models.Actor
|
||||
related_object_field = federation_fields.ActorRelatedField(required=True)
|
||||
|
||||
def get_related_object(self, value):
|
||||
return value
|
||||
|
||||
def get_queryset(self, **kwargs):
|
||||
qs = super().get_queryset(**kwargs)
|
||||
actor_uploads = Upload.objects.filter(
|
||||
library__actor=self.session.related_object,
|
||||
)
|
||||
return qs.filter(pk__in=actor_uploads.values("track"))
|
||||
|
||||
def get_related_object_id_repr(self, obj):
|
||||
return obj.full_username
|
||||
|
||||
|
||||
@registry.register(name="library")
|
||||
class LibraryRadio(RelatedObjectRadio):
|
||||
"""
|
||||
Play content from a given library
|
||||
"""
|
||||
|
||||
model = Library
|
||||
related_object_field = serializers.UUIDField(required=True)
|
||||
|
||||
def get_related_object(self, value):
|
||||
return Library.objects.get(uuid=value)
|
||||
|
||||
def get_queryset(self, **kwargs):
|
||||
qs = super().get_queryset(**kwargs)
|
||||
actor_uploads = Upload.objects.filter(
|
||||
library=self.session.related_object,
|
||||
)
|
||||
return qs.filter(pk__in=actor_uploads.values("track"))
|
||||
|
||||
def get_related_object_id_repr(self, obj):
|
||||
return obj.uuid
|
||||
|
||||
|
||||
@registry.register(name="recently-added")
|
||||
class RecentlyAdded(SessionRadio):
|
||||
def get_queryset(self, **kwargs):
|
||||
date = datetime.date.today() - datetime.timedelta(days=30)
|
||||
qs = super().get_queryset(**kwargs)
|
||||
return qs.filter(
|
||||
Q(artist__content_category="music"),
|
||||
Q(creation_date__gt=date),
|
||||
)
|
||||
|
||||
|
||||
# Use this to experiment on the custom multiple radio with troi
|
||||
@registry.register(name="troi")
|
||||
class Troi(SessionRadio):
|
||||
"""
|
||||
Receive a vuejs generated config and use it to launch a troi radio session.
|
||||
The config data should follow :
|
||||
{"patch": "troi_patch_name", "troi_arg1":"troi_arg_1", "troi_arg2": ...}
|
||||
Validation of the config (args) is done by troi during track fetch.
|
||||
Funkwhale only checks if the patch is implemented
|
||||
"""
|
||||
|
||||
config = serializers.JSONField(required=True)
|
||||
|
||||
def append_lb_config(self, data):
|
||||
if self.session.user.settings is None:
|
||||
logger.warning(
|
||||
"No lb_user_name set in user settings. Some troi patches will fail"
|
||||
)
|
||||
return data
|
||||
elif self.session.user.settings.get("lb_user_name") is None:
|
||||
logger.warning(
|
||||
"No lb_user_name set in user settings. Some troi patches will fail"
|
||||
)
|
||||
else:
|
||||
data["user_name"] = self.session.user.settings["lb_user_name"]
|
||||
|
||||
if self.session.user.settings.get("lb_user_token") is None:
|
||||
logger.warning(
|
||||
"No lb_user_token set in user settings. Some troi patch will fail"
|
||||
)
|
||||
else:
|
||||
data["user_token"] = self.session.user.settings["lb_user_token"]
|
||||
|
||||
return data
|
||||
|
||||
def get_queryset_kwargs(self):
|
||||
kwargs = super().get_queryset_kwargs()
|
||||
kwargs["config"] = self.session.config
|
||||
return kwargs
|
||||
|
||||
def validate_session(self, data, **context):
|
||||
data = super().validate_session(data, **context)
|
||||
if data.get("config") is None:
|
||||
raise serializers.ValidationError(
|
||||
"You must provide a configuration for this radio"
|
||||
)
|
||||
return data
|
||||
|
||||
def get_queryset(self, **kwargs):
|
||||
qs = super().get_queryset(**kwargs)
|
||||
config = self.append_lb_config(json.loads(kwargs["config"]))
|
||||
|
||||
return lb_recommendations.run(config, candidates=qs)
|
|
@ -0,0 +1,10 @@
|
|||
import persisting_theory
|
||||
|
||||
|
||||
class RadioRegistry_v2(persisting_theory.Registry):
|
||||
def prepare_name(self, data, name=None):
|
||||
setattr(data, "radio_type", name)
|
||||
return name
|
||||
|
||||
|
||||
registry = RadioRegistry_v2()
|
|
@ -40,9 +40,11 @@ class RadioSerializer(serializers.ModelSerializer):
|
|||
|
||||
|
||||
class RadioSessionTrackSerializerCreate(serializers.ModelSerializer):
|
||||
count = serializers.IntegerField(required=False, allow_null=True)
|
||||
|
||||
class Meta:
|
||||
model = models.RadioSessionTrack
|
||||
fields = ("session",)
|
||||
fields = ("session", "count")
|
||||
|
||||
|
||||
class RadioSessionTrackSerializer(serializers.ModelSerializer):
|
||||
|
|
|
@ -5,7 +5,7 @@ from . import views
|
|||
router = routers.OptionalSlashRouter()
|
||||
router.register(r"sessions", views.RadioSessionViewSet, "sessions")
|
||||
router.register(r"radios", views.RadioViewSet, "radios")
|
||||
router.register(r"tracks", views.RadioSessionTrackViewSet, "tracks")
|
||||
router.register(r"tracks", views.V1_RadioSessionTrackViewSet, "tracks")
|
||||
|
||||
|
||||
urlpatterns = router.urls
|
||||
|
|
|
@ -0,0 +1,10 @@
|
|||
from funkwhale_api.common import routers
|
||||
|
||||
from . import views
|
||||
|
||||
router = routers.OptionalSlashRouter()
|
||||
|
||||
router.register(r"sessions", views.V2_RadioSessionViewSet, "sessions")
|
||||
|
||||
|
||||
urlpatterns = router.urls
|
|
@ -1,3 +1,6 @@
|
|||
import pickle
|
||||
|
||||
from django.core.cache import cache
|
||||
from django.db.models import Q
|
||||
from drf_spectacular.utils import extend_schema
|
||||
from rest_framework import mixins, status, viewsets
|
||||
|
@ -121,7 +124,7 @@ class RadioSessionViewSet(
|
|||
return context
|
||||
|
||||
|
||||
class RadioSessionTrackViewSet(mixins.CreateModelMixin, viewsets.GenericViewSet):
|
||||
class V1_RadioSessionTrackViewSet(mixins.CreateModelMixin, viewsets.GenericViewSet):
|
||||
serializer_class = serializers.RadioSessionTrackSerializer
|
||||
queryset = models.RadioSessionTrack.objects.all()
|
||||
permission_classes = []
|
||||
|
@ -133,21 +136,19 @@ class RadioSessionTrackViewSet(mixins.CreateModelMixin, viewsets.GenericViewSet)
|
|||
session = serializer.validated_data["session"]
|
||||
if not request.user.is_authenticated and not request.session.session_key:
|
||||
self.request.session.create()
|
||||
try:
|
||||
assert (request.user == session.user) or (
|
||||
request.session.session_key == session.session_key
|
||||
and session.session_key
|
||||
)
|
||||
except AssertionError:
|
||||
if not request.user == session.user or (
|
||||
not request.session.session_key == session.session_key
|
||||
and not session.session_key
|
||||
):
|
||||
return Response(status=status.HTTP_403_FORBIDDEN)
|
||||
|
||||
try:
|
||||
session.radio.pick()
|
||||
session.radio(api_version=1).pick()
|
||||
except ValueError:
|
||||
return Response(
|
||||
"Radio doesn't have more candidates", status=status.HTTP_404_NOT_FOUND
|
||||
)
|
||||
session_track = session.session_tracks.all().latest("id")
|
||||
# self.perform_create(serializer)
|
||||
# dirty override here, since we use a different serializer for creation and detail
|
||||
serializer = self.serializer_class(
|
||||
instance=session_track, context=self.get_serializer_context()
|
||||
|
@ -161,3 +162,99 @@ class RadioSessionTrackViewSet(mixins.CreateModelMixin, viewsets.GenericViewSet)
|
|||
if self.action == "create":
|
||||
return serializers.RadioSessionTrackSerializerCreate
|
||||
return super().get_serializer_class(*args, **kwargs)
|
||||
|
||||
|
||||
class V2_RadioSessionViewSet(
|
||||
mixins.CreateModelMixin, mixins.RetrieveModelMixin, viewsets.GenericViewSet
|
||||
):
|
||||
"""Returns a list of RadioSessions"""
|
||||
|
||||
serializer_class = serializers.RadioSessionSerializer
|
||||
queryset = models.RadioSession.objects.all()
|
||||
permission_classes = []
|
||||
|
||||
@action(detail=True, serializer_class=serializers.RadioSessionTrackSerializerCreate)
|
||||
def tracks(self, request, pk, *args, **kwargs):
|
||||
data = {"session": pk}
|
||||
data["count"] = (
|
||||
request.query_params["count"]
|
||||
if "count" in request.query_params.keys()
|
||||
else 1
|
||||
)
|
||||
serializer = serializers.RadioSessionTrackSerializerCreate(data=data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
session = serializer.validated_data["session"]
|
||||
|
||||
count = int(data["count"])
|
||||
# this is used for test purpose.
|
||||
filter_playable = (
|
||||
request.query_params["filter_playable"]
|
||||
if "filter_playable" in request.query_params.keys()
|
||||
else True
|
||||
)
|
||||
if not request.user.is_authenticated and not request.session.session_key:
|
||||
self.request.session.create()
|
||||
|
||||
if not request.user == session.user or (
|
||||
not request.session.session_key == session.session_key
|
||||
and not session.session_key
|
||||
):
|
||||
return Response(status=status.HTTP_403_FORBIDDEN)
|
||||
try:
|
||||
from . import radios_v2 # noqa
|
||||
|
||||
session.radio(api_version=2).pick_many(
|
||||
count, filter_playable=filter_playable
|
||||
)
|
||||
except ValueError:
|
||||
return Response(
|
||||
"Radio doesn't have more candidates", status=status.HTTP_404_NOT_FOUND
|
||||
)
|
||||
|
||||
# dirty override here, since we use a different serializer for creation and detail
|
||||
evaluated_radio_tracks = pickle.loads(cache.get(f"radiotracks{session.id}"))
|
||||
batch = evaluated_radio_tracks[:count]
|
||||
serializer = TrackSerializer(
|
||||
data=batch,
|
||||
many="true",
|
||||
)
|
||||
serializer.is_valid()
|
||||
|
||||
# delete the tracks we sent from the cache
|
||||
new_cached_radiotracks = evaluated_radio_tracks[count:]
|
||||
cache.set(f"radiotracks{session.id}", pickle.dumps(new_cached_radiotracks))
|
||||
|
||||
return Response(
|
||||
serializer.data,
|
||||
status=status.HTTP_201_CREATED,
|
||||
)
|
||||
|
||||
def get_queryset(self):
|
||||
queryset = super().get_queryset()
|
||||
if self.request.user.is_authenticated:
|
||||
return queryset.filter(
|
||||
Q(user=self.request.user)
|
||||
| Q(session_key=self.request.session.session_key)
|
||||
)
|
||||
|
||||
return queryset.filter(session_key=self.request.session.session_key).exclude(
|
||||
session_key=None
|
||||
)
|
||||
|
||||
def perform_create(self, serializer):
|
||||
if (
|
||||
not self.request.user.is_authenticated
|
||||
and not self.request.session.session_key
|
||||
):
|
||||
self.request.session.create()
|
||||
return serializer.save(
|
||||
user=self.request.user if self.request.user.is_authenticated else None,
|
||||
session_key=self.request.session.session_key,
|
||||
)
|
||||
|
||||
def get_serializer_context(self):
|
||||
context = super().get_serializer_context()
|
||||
context["user"] = (
|
||||
self.request.user if self.request.user.is_authenticated else None
|
||||
)
|
||||
return context
|
||||
|
|
|
@ -0,0 +1,111 @@
|
|||
from troi import Artist, Element, Playlist, Recording
|
||||
from troi.patch import Patch
|
||||
|
||||
recording_list = [
|
||||
Recording(
|
||||
name="I Want It That Way",
|
||||
mbid="87dfa566-21c3-45ed-bc42-1d345b8563fa",
|
||||
artist=Artist(name="artist_name"),
|
||||
),
|
||||
Recording(name="Untouchable", artist=Artist(name="Another lol")),
|
||||
Recording(
|
||||
name="The Perfect Kiss",
|
||||
mbid="ec0da94e-fbfe-4eb0-968e-024d4c32d1d0",
|
||||
artist=Artist(name="artist_name2"),
|
||||
),
|
||||
Recording(
|
||||
name="Love Your Voice",
|
||||
mbid="93726547-f8c0-4efd-8e16-d2dee76500f6",
|
||||
artist=Artist(name="artist_name"),
|
||||
),
|
||||
Recording(
|
||||
name="Hall of Fame",
|
||||
mbid="395bd5a1-79cc-4e04-8869-ca9eabc78d09",
|
||||
artist=Artist(name="artist_name_3"),
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
class DummyElement(Element):
|
||||
"""Dummy element that returns a fixed playlist for testing"""
|
||||
|
||||
@staticmethod
|
||||
def outputs():
|
||||
return [Playlist]
|
||||
|
||||
def read(self, sources):
|
||||
recordings = recording_list
|
||||
|
||||
return [
|
||||
Playlist(
|
||||
name="Test Export Playlist",
|
||||
description="A playlist to test exporting playlists to spotify",
|
||||
recordings=recordings,
|
||||
)
|
||||
]
|
||||
|
||||
|
||||
class DummyPatch(Patch):
|
||||
"""Dummy patch that always returns a fixed set of recordings for testing"""
|
||||
|
||||
@staticmethod
|
||||
def slug():
|
||||
return "test-patch"
|
||||
|
||||
def create(self, inputs):
|
||||
return DummyElement()
|
||||
|
||||
@staticmethod
|
||||
def outputs():
|
||||
return [Recording]
|
||||
|
||||
|
||||
recommended_recording_mbids = [
|
||||
"87dfa566-21c3-45ed-bc42-1d345b8563fa",
|
||||
"ec0da94e-fbfe-4eb0-968e-024d4c32d1d0",
|
||||
"93726547-f8c0-4efd-8e16-d2dee76500f6",
|
||||
"395bd5a1-79cc-4e04-8869-ca9eabc78d09",
|
||||
]
|
||||
|
||||
typesense_search_result = {
|
||||
"facet_counts": [],
|
||||
"found": 1,
|
||||
"out_of": 1,
|
||||
"page": 1,
|
||||
"request_params": {
|
||||
"collection_name": "canonical_fw_data",
|
||||
"per_page": 10,
|
||||
"q": "artist_nameiwantitthatway",
|
||||
},
|
||||
"search_time_ms": 1,
|
||||
"hits": [
|
||||
{
|
||||
"highlights": [
|
||||
{
|
||||
"field": "combined",
|
||||
"snippet": "string",
|
||||
"matched_tokens": ["string"],
|
||||
}
|
||||
],
|
||||
"document": {
|
||||
"pk": "1",
|
||||
"combined": "artist_nameiwantitthatway",
|
||||
},
|
||||
"text_match": 130916,
|
||||
},
|
||||
{
|
||||
"highlights": [
|
||||
{
|
||||
"field": "combined",
|
||||
"snippet": "string",
|
||||
"matched_tokens": ["string"],
|
||||
}
|
||||
],
|
||||
"document": {
|
||||
"pk": "2",
|
||||
"combined": "artist_nameiwantitthatway",
|
||||
},
|
||||
"text_match": 130916,
|
||||
},
|
||||
],
|
||||
}
|
|
@ -0,0 +1,108 @@
|
|||
import logging
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
from funkwhale_api.music import models as music_models
|
||||
from funkwhale_api.taskapp import celery
|
||||
|
||||
from . import utils
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class TypesenseNotActivate(Exception):
|
||||
pass
|
||||
|
||||
|
||||
if not settings.TYPESENSE_API_KEY:
|
||||
logger.info(
|
||||
"Typesense is not activated. You can enable it by setting the TYPESENSE_API_KEY env variable."
|
||||
)
|
||||
else:
|
||||
import typesense
|
||||
from typesense.exceptions import ObjectAlreadyExists
|
||||
|
||||
|
||||
api_key = settings.TYPESENSE_API_KEY
|
||||
host = settings.TYPESENSE_HOST
|
||||
port = settings.TYPESENSE_PORT
|
||||
protocol = settings.TYPESENSE_PROTOCOL
|
||||
|
||||
collection_name = "canonical_fw_data"
|
||||
BATCH_SIZE = 10000
|
||||
|
||||
|
||||
@celery.app.task(name="typesense.add_tracks_to_index")
|
||||
def add_tracks_to_index(tracks_pk):
|
||||
"""
|
||||
This will add fw tracks data to the typesense index. It will concatenate the artist name
|
||||
and the track title into one string.
|
||||
"""
|
||||
|
||||
client = typesense.Client(
|
||||
{
|
||||
"api_key": api_key,
|
||||
"nodes": [{"host": host, "port": port, "protocol": protocol}],
|
||||
"connection_timeout_seconds": 2,
|
||||
}
|
||||
)
|
||||
|
||||
try:
|
||||
logger.info(f"Updating index {collection_name}")
|
||||
tracks = music_models.Track.objects.all().filter(pk__in=tracks_pk)
|
||||
documents = []
|
||||
for track in tracks:
|
||||
document = dict()
|
||||
document["pk"] = track.pk
|
||||
document["combined"] = utils.delete_non_alnum_characters(
|
||||
track.artist.name + track.title
|
||||
)
|
||||
documents.append(document)
|
||||
|
||||
client.collections[collection_name].documents.import_(
|
||||
documents, {"action": "upsert"}
|
||||
)
|
||||
|
||||
except typesense.exceptions.TypesenseClientError as err:
|
||||
logger.error(f"Can't build index: {str(err)}")
|
||||
|
||||
|
||||
@celery.app.task(name="typesense.build_canonical_index")
|
||||
def build_canonical_index():
|
||||
if not settings.TYPESENSE_API_KEY:
|
||||
raise TypesenseNotActivate(
|
||||
"Typesense is not activated. You can enable it by setting the TYPESENSE_API_KEY env variable."
|
||||
)
|
||||
|
||||
schema = {
|
||||
"name": collection_name,
|
||||
"fields": [
|
||||
{"name": "combined", "type": "string"},
|
||||
{"name": "pk", "type": "int32"},
|
||||
],
|
||||
"default_sorting_field": "pk",
|
||||
}
|
||||
client = typesense.Client(
|
||||
{
|
||||
"api_key": api_key,
|
||||
"nodes": [{"host": host, "port": port, "protocol": protocol}],
|
||||
"connection_timeout_seconds": 2,
|
||||
}
|
||||
)
|
||||
try:
|
||||
client.collections.create(schema)
|
||||
except ObjectAlreadyExists:
|
||||
pass
|
||||
|
||||
tracks = music_models.Track.objects.all().values_list("pk", flat=True)
|
||||
total_tracks = tracks.count()
|
||||
total_batches = (total_tracks - 1) // BATCH_SIZE + 1
|
||||
|
||||
for i in range(total_batches):
|
||||
start_index = i * BATCH_SIZE
|
||||
end_index = (i + 1) * (BATCH_SIZE - 1)
|
||||
batch_tracks = tracks[start_index:end_index]
|
||||
logger.info(
|
||||
f"Launching async task to add {str(batch_tracks)} tracks pks to index"
|
||||
)
|
||||
add_tracks_to_index.delay(list(batch_tracks))
|
|
@ -0,0 +1,92 @@
|
|||
import logging
|
||||
import re
|
||||
|
||||
import unidecode
|
||||
from django.conf import settings
|
||||
from django.core.cache import cache
|
||||
from lb_matching_tools.cleaner import MetadataCleaner
|
||||
|
||||
from funkwhale_api.music import models as music_models
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
api_key = settings.TYPESENSE_API_KEY
|
||||
host = settings.TYPESENSE_HOST
|
||||
port = settings.TYPESENSE_PORT
|
||||
protocol = settings.TYPESENSE_PROTOCOL
|
||||
TYPESENSE_NUM_TYPO = settings.TYPESENSE_NUM_TYPO
|
||||
|
||||
|
||||
class TypesenseNotActivate(Exception):
|
||||
pass
|
||||
|
||||
|
||||
if not settings.TYPESENSE_API_KEY:
|
||||
logger.info(
|
||||
"Typesense is not activated. You can enable it by setting the TYPESENSE_API_KEY env variable."
|
||||
)
|
||||
else:
|
||||
import typesense
|
||||
|
||||
|
||||
def delete_non_alnum_characters(text):
|
||||
return unidecode.unidecode(re.sub(r"[^\w]+", "", text).lower())
|
||||
|
||||
|
||||
def resolve_recordings_to_fw_track(recordings):
|
||||
"""
|
||||
Tries to match a troi recording entity to a fw track using the typesense index.
|
||||
It will save the results in the match_mbid attribute of the Track table.
|
||||
For test purposes : if multiple fw tracks are returned, we log the information
|
||||
but only keep the best result in db to avoid duplicates.
|
||||
"""
|
||||
|
||||
if not settings.TYPESENSE_API_KEY:
|
||||
raise TypesenseNotActivate(
|
||||
"Typesense is not activated. You can enable it by setting the TYPESENSE_API_KEY env variable."
|
||||
)
|
||||
|
||||
client = typesense.Client(
|
||||
{
|
||||
"api_key": api_key,
|
||||
"nodes": [{"host": host, "port": port, "protocol": protocol}],
|
||||
"connection_timeout_seconds": 2,
|
||||
}
|
||||
)
|
||||
|
||||
mc = MetadataCleaner()
|
||||
|
||||
for recording in recordings:
|
||||
rec = mc.clean_recording(recording.name)
|
||||
artist = mc.clean_artist(recording.artist.name)
|
||||
canonical_name_for_track = delete_non_alnum_characters(artist + rec)
|
||||
|
||||
logger.debug(f"Trying to resolve : {canonical_name_for_track}")
|
||||
|
||||
search_parameters = {
|
||||
"q": canonical_name_for_track,
|
||||
"query_by": "combined",
|
||||
"num_typos": TYPESENSE_NUM_TYPO,
|
||||
"drop_tokens_threshold": 0,
|
||||
}
|
||||
matches = client.collections["canonical_fw_data"].documents.search(
|
||||
search_parameters
|
||||
)
|
||||
|
||||
if matches["hits"]:
|
||||
hit = matches["hits"][0]
|
||||
pk = hit["document"]["pk"]
|
||||
logger.debug(f"Saving match for track with primary key {pk}")
|
||||
cache.set(recording.mbid, pk)
|
||||
|
||||
if settings.DEBUG and matches["hits"][1]:
|
||||
for hit in matches["hits"][1:]:
|
||||
pk = hit["document"]["pk"]
|
||||
fw_track = music_models.Track.objects.get(pk=pk)
|
||||
logger.info(
|
||||
f"Duplicate match found for {fw_track.artist.name} {fw_track.title} \
|
||||
and primary key {pk}. Skipping because of better match."
|
||||
)
|
||||
else:
|
||||
logger.debug("No match found in fw db")
|
||||
return cache.get_many([rec.mbid for rec in recordings])
|
|
@ -1,5 +1,6 @@
|
|||
import sys
|
||||
|
||||
import factory
|
||||
import pytz
|
||||
from django.contrib.auth.models import Permission
|
||||
from django.utils import timezone
|
||||
|
||||
|
@ -7,6 +8,11 @@ from funkwhale_api.factories import ManyToManyFromList, NoUpdateOnCreate, regist
|
|||
|
||||
from . import models
|
||||
|
||||
if sys.version_info < (3, 9):
|
||||
from backports.zoneinfo import ZoneInfo
|
||||
else:
|
||||
from zoneinfo import ZoneInfo
|
||||
|
||||
|
||||
@registry.register
|
||||
class GroupFactory(NoUpdateOnCreate, factory.django.DjangoModelFactory):
|
||||
|
@ -159,7 +165,7 @@ class GrantFactory(factory.django.DjangoModelFactory):
|
|||
class AccessTokenFactory(factory.django.DjangoModelFactory):
|
||||
application = factory.SubFactory(ApplicationFactory)
|
||||
user = factory.SubFactory(UserFactory)
|
||||
expires = factory.Faker("future_datetime", tzinfo=pytz.UTC)
|
||||
expires = factory.Faker("future_datetime", tzinfo=ZoneInfo("UTC"))
|
||||
token = factory.Faker("uuid4")
|
||||
scope = "read"
|
||||
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,6 +1,6 @@
|
|||
[tool.poetry]
|
||||
name = "funkwhale-api"
|
||||
version = "1.2.9"
|
||||
version = "1.4.0"
|
||||
description = "Funkwhale API"
|
||||
|
||||
authors = ["Funkwhale Collective"]
|
||||
|
@ -25,7 +25,7 @@ exclude = ["tests"]
|
|||
funkwhale-manage = 'funkwhale_api.main:main'
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = "^3.7"
|
||||
python = "^3.8,<3.12"
|
||||
|
||||
# Django
|
||||
dj-rest-auth = { extras = ["with_social"], version = "2.2.8" }
|
||||
|
@ -43,7 +43,7 @@ django-redis = "==5.2.0"
|
|||
django-storages = "==1.13.2"
|
||||
django-versatileimagefield = "==2.2"
|
||||
djangorestframework = "==3.14.0"
|
||||
drf-spectacular = "==0.25.1"
|
||||
drf-spectacular = "==0.26.5"
|
||||
markdown = "==3.4.4"
|
||||
persisting-theory = "==1.0"
|
||||
psycopg2 = "==2.9.9"
|
||||
|
@ -51,7 +51,7 @@ redis = "==4.5.5"
|
|||
|
||||
# Django LDAP
|
||||
django-auth-ldap = "==4.1.0"
|
||||
python-ldap = "==3.4.3"
|
||||
python-ldap = "==3.4.4"
|
||||
|
||||
# Channels
|
||||
channels = { extras = ["daphne"], version = "==4.0.0" }
|
||||
|
@ -68,6 +68,7 @@ uvicorn = { version = "==0.20.0", extras = ["standard"] }
|
|||
# Libs
|
||||
aiohttp = "==3.8.6"
|
||||
arrow = "==1.2.3"
|
||||
backports-zoneinfo = { version = "==0.2.1", python = "<3.9" }
|
||||
bleach = "==5.0.1"
|
||||
boto3 = "==1.26.161"
|
||||
click = "==8.1.7"
|
||||
|
@ -79,23 +80,28 @@ pillow = "==9.3.0"
|
|||
pydub = "==0.25.1"
|
||||
pyld = "==2.0.3"
|
||||
python-magic = "==0.4.27"
|
||||
pytz = "==2022.7.1"
|
||||
requests = "==2.28.2"
|
||||
requests-http-message-signatures = "==0.3.1"
|
||||
sentry-sdk = "==1.12.1"
|
||||
sentry-sdk = "==1.19.1"
|
||||
watchdog = "==2.2.1"
|
||||
troi = { git = "https://github.com/metabrainz/troi-recommendation-playground.git", tag = "v-2023-10-30.0"}
|
||||
lb-matching-tools = { git = "https://github.com/metabrainz/listenbrainz-matching-tools.git", branch = "main"}
|
||||
unidecode = "==1.3.7"
|
||||
pycountry = "22.3.5"
|
||||
|
||||
# Typesense
|
||||
typesense = { version = "==0.15.1", optional = true }
|
||||
|
||||
# Dependencies pinning
|
||||
importlib-metadata = { version = "==4.13.0", python = "^3.7" }
|
||||
ipython = "==7.34.0"
|
||||
pluralizer = "==1.2.0"
|
||||
service-identity = "==21.1.0"
|
||||
unicode-slugify = "==0.1.5"
|
||||
|
||||
[tool.poetry.dev-dependencies]
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
aioresponses = "==0.7.6"
|
||||
asynctest = "==0.13.0"
|
||||
black = "==22.12.0"
|
||||
black = "==23.3.0"
|
||||
coverage = { version = "==6.5.0", extras = ["toml"] }
|
||||
debugpy = "==1.6.7.post1"
|
||||
django-coverage-plugin = "==3.0.0"
|
||||
|
@ -104,27 +110,30 @@ factory-boy = "==3.2.1"
|
|||
faker = "==15.3.4"
|
||||
flake8 = "==3.9.2"
|
||||
ipdb = "==0.13.13"
|
||||
pytest = "==7.4.3"
|
||||
pytest-asyncio = "==0.21.0"
|
||||
prompt-toolkit = "==3.0.41"
|
||||
pytest = "==7.2.1"
|
||||
pytest-asyncio = "==0.20.3"
|
||||
pytest-cov = "==4.0.0"
|
||||
pytest-django = "==4.5.2"
|
||||
pytest-env = "==0.8.1"
|
||||
pytest-env = "==0.8.2"
|
||||
pytest-mock = "==3.10.0"
|
||||
pytest-randomly = "==3.12.0"
|
||||
pytest-sugar = "==0.9.7"
|
||||
requests-mock = "==1.10.0"
|
||||
pylint = "==2.17.7"
|
||||
pylint-django = "==2.5.5"
|
||||
django-extensions = "==3.2.3"
|
||||
|
||||
[tool.poetry.extras]
|
||||
typesense = ["typesense"]
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry-core>=1.0.0"]
|
||||
build-backend = "poetry.core.masonry.api"
|
||||
|
||||
[tool.pylint.master]
|
||||
load-plugins = [
|
||||
"pylint_common",
|
||||
"pylint_django",
|
||||
"pylint_celery",
|
||||
]
|
||||
load-plugins = ["pylint_django"]
|
||||
django-settings-module = "config.settings.testing"
|
||||
|
||||
[tool.pylint.messages_control]
|
||||
disable = [
|
||||
|
@ -175,7 +184,7 @@ env = [
|
|||
|
||||
[tool.coverage.run]
|
||||
plugins = ["django_coverage_plugin"]
|
||||
include = ["funkwhale_api/*"]
|
||||
source = ["funkwhale_api"]
|
||||
omit = [
|
||||
"*migrations*",
|
||||
"*tests*",
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
import datetime
|
||||
import sys
|
||||
import uuid
|
||||
|
||||
import feedparser
|
||||
import pytest
|
||||
import pytz
|
||||
from django.templatetags.static import static
|
||||
from django.urls import reverse
|
||||
|
||||
|
@ -14,6 +14,11 @@ from funkwhale_api.federation import actors
|
|||
from funkwhale_api.federation import serializers as federation_serializers
|
||||
from funkwhale_api.federation import utils as federation_utils
|
||||
|
||||
if sys.version_info < (3, 9):
|
||||
from backports.zoneinfo import ZoneInfo
|
||||
else:
|
||||
from zoneinfo import ZoneInfo
|
||||
|
||||
|
||||
def test_channel_serializer_create(factories, mocker):
|
||||
attributed_to = factories["federation.Actor"](local=True)
|
||||
|
@ -456,7 +461,7 @@ def test_rss_duration(seconds, expected):
|
|||
"dt, expected",
|
||||
[
|
||||
(
|
||||
datetime.datetime(2020, 1, 30, 6, 0, 49, tzinfo=pytz.UTC),
|
||||
datetime.datetime(2020, 1, 30, 6, 0, 49, tzinfo=ZoneInfo("UTC")),
|
||||
"Thu, 30 Jan 2020 06:00:49 +0000",
|
||||
),
|
||||
],
|
||||
|
@ -678,7 +683,7 @@ def test_rss_feed_item_serializer_create(factories):
|
|||
assert upload.track.position == 33
|
||||
assert upload.track.disc_number == 2
|
||||
assert upload.track.creation_date == datetime.datetime(2020, 3, 11, 16).replace(
|
||||
tzinfo=pytz.utc
|
||||
tzinfo=ZoneInfo("UTC")
|
||||
)
|
||||
assert upload.track.get_tags() == ["pop", "rock"]
|
||||
assert upload.track.attachment_cover.url == "https://image.url/"
|
||||
|
@ -748,7 +753,7 @@ def test_rss_feed_item_serializer_update(factories):
|
|||
assert upload.track.position == 33
|
||||
assert upload.track.disc_number == 2
|
||||
assert upload.track.creation_date == datetime.datetime(2020, 3, 11, 16).replace(
|
||||
tzinfo=pytz.utc
|
||||
tzinfo=ZoneInfo("UTC")
|
||||
)
|
||||
assert upload.track.get_tags() == ["pop", "rock"]
|
||||
assert upload.track.attachment_cover.url == "https://image.url/"
|
||||
|
|
|
@ -32,6 +32,22 @@ def test_user_create_handler(factories, mocker, now):
|
|||
assert user.all_permissions == expected_permissions
|
||||
|
||||
|
||||
def test_user_implicit_staff():
|
||||
kwargs = {
|
||||
"username": "helloworld",
|
||||
"password": "securepassword",
|
||||
"is_superuser": True,
|
||||
"email": "hello@world.email",
|
||||
"upload_quota": 35,
|
||||
"permissions": ["moderation"],
|
||||
}
|
||||
user = users.handler_create_user(**kwargs)
|
||||
|
||||
assert user.username == kwargs["username"]
|
||||
assert user.is_superuser == kwargs["is_superuser"]
|
||||
assert user.is_staff is True
|
||||
|
||||
|
||||
def test_user_delete_handler_soft(factories, mocker, now):
|
||||
user1 = factories["federation.Actor"](local=True).user
|
||||
actor1 = user1.actor
|
||||
|
|
|
@ -1,5 +1,8 @@
|
|||
import os
|
||||
|
||||
import pytest
|
||||
from django.core.management import call_command
|
||||
from django.core.management.base import CommandError
|
||||
|
||||
from funkwhale_api.federation import models as federation_models
|
||||
from funkwhale_api.music import models as music_models
|
||||
|
@ -97,3 +100,31 @@ def test_load_test_data_skip_dependencies(factories):
|
|||
|
||||
assert music_models.Artist.objects.count() == 5
|
||||
assert music_models.Album.objects.count() == 10
|
||||
|
||||
|
||||
commands = ["createsuperuser", "makemigrations"]
|
||||
|
||||
|
||||
@pytest.mark.parametrize("command", commands)
|
||||
def test_blocked_commands(command):
|
||||
with pytest.raises(CommandError):
|
||||
call_command(command)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("command", commands)
|
||||
def test_unblocked_commands(command, mocker):
|
||||
mocker.patch.dict(os.environ, {"FORCE": "1"})
|
||||
|
||||
call_command(command)
|
||||
|
||||
|
||||
def test_inplace_to_s3_without_source():
|
||||
with pytest.raises(CommandError):
|
||||
call_command("inplace_to_s3")
|
||||
|
||||
|
||||
def test_inplace_to_s3_dryrun(factories):
|
||||
upload = factories["music.Upload"](in_place=True, source="file:///music/music.mp3")
|
||||
call_command("inplace_to_s3", "--source", "/music")
|
||||
assert upload.source == "file:///music/music.mp3"
|
||||
assert not upload.audio_file
|
||||
|
|
|
@ -108,7 +108,7 @@ def test_get_default_head_tags(preferences, settings):
|
|||
{
|
||||
"tag": "meta",
|
||||
"property": "og:image",
|
||||
"content": settings.FUNKWHALE_URL + "/front/favicon.png",
|
||||
"content": settings.FUNKWHALE_URL + "/android-chrome-512x512.png",
|
||||
},
|
||||
{"tag": "meta", "property": "og:url", "content": settings.FUNKWHALE_URL + "/"},
|
||||
]
|
||||
|
|
|
@ -17,7 +17,7 @@ def test_get_ident_anonymous(api_request):
|
|||
def test_get_ident_authenticated(api_request, factories):
|
||||
user = factories["users.User"]()
|
||||
request = api_request.get("/")
|
||||
expected = {"id": user.pk, "type": "authenticated"}
|
||||
expected = {"id": f"{user.pk}", "type": "authenticated"}
|
||||
assert throttling.get_ident(user, request) == expected
|
||||
|
||||
|
||||
|
@ -26,7 +26,7 @@ def test_get_ident_authenticated(api_request, factories):
|
|||
[
|
||||
(
|
||||
"create",
|
||||
{"id": 42, "type": "authenticated"},
|
||||
{"id": "42", "type": "authenticated"},
|
||||
"throttling:create:authenticated:42",
|
||||
),
|
||||
(
|
||||
|
|
|
@ -160,7 +160,7 @@ def test_cannot_approve_reject_without_perm(
|
|||
|
||||
|
||||
def test_rate_limit(logged_in_api_client, now_time, settings, mocker):
|
||||
expected_ident = {"type": "authenticated", "id": logged_in_api_client.user.pk}
|
||||
expected_ident = {"type": "authenticated", "id": f"{logged_in_api_client.user.pk}"}
|
||||
|
||||
expected = {
|
||||
"ident": expected_ident,
|
||||
|
|
|
@ -6,7 +6,7 @@ from funkwhale_api import __version__ as api_version
|
|||
from funkwhale_api.music.utils import SUPPORTED_EXTENSIONS
|
||||
|
||||
|
||||
def test_nodeinfo_default(api_client):
|
||||
def test_nodeinfo_20(api_client):
|
||||
url = reverse("api:v1:instance:nodeinfo-2.0")
|
||||
response = api_client.get(url)
|
||||
|
||||
|
@ -14,7 +14,7 @@ def test_nodeinfo_default(api_client):
|
|||
"version": "2.0",
|
||||
"software": OrderedDict([("name", "funkwhale"), ("version", api_version)]),
|
||||
"protocols": ["activitypub"],
|
||||
"services": OrderedDict([("inbound", []), ("outbound", [])]),
|
||||
"services": OrderedDict([("inbound", ["atom1.0"]), ("outbound", ["atom1.0"])]),
|
||||
"openRegistrations": False,
|
||||
"usage": {
|
||||
"users": OrderedDict(
|
||||
|
@ -89,3 +89,74 @@ def test_nodeinfo_default(api_client):
|
|||
}
|
||||
|
||||
assert response.data == expected
|
||||
|
||||
|
||||
def test_nodeinfo_21(api_client):
|
||||
url = reverse("api:v2:instance:nodeinfo-2.1")
|
||||
response = api_client.get(url)
|
||||
|
||||
expected = {
|
||||
"version": "2.1",
|
||||
"software": OrderedDict(
|
||||
[
|
||||
("name", "funkwhale"),
|
||||
("version", api_version),
|
||||
("repository", "https://dev.funkwhale.audio/funkwhale/funkwhale"),
|
||||
("homepage", "https://funkwhale.audio"),
|
||||
]
|
||||
),
|
||||
"protocols": ["activitypub"],
|
||||
"services": OrderedDict([("inbound", ["atom1.0"]), ("outbound", ["atom1.0"])]),
|
||||
"openRegistrations": False,
|
||||
"usage": {
|
||||
"users": OrderedDict(
|
||||
[("total", 0), ("activeHalfyear", 0), ("activeMonth", 0)]
|
||||
),
|
||||
"localPosts": 0,
|
||||
"localComments": 0,
|
||||
},
|
||||
"metadata": {
|
||||
"actorId": "https://test.federation/federation/actors/service",
|
||||
"private": False,
|
||||
"shortDescription": "",
|
||||
"longDescription": "",
|
||||
"contactEmail": "",
|
||||
"nodeName": "",
|
||||
"banner": None,
|
||||
"defaultUploadQuota": 1000,
|
||||
"supportedUploadExtensions": SUPPORTED_EXTENSIONS,
|
||||
"allowList": {"enabled": False, "domains": None},
|
||||
"funkwhaleSupportMessageEnabled": True,
|
||||
"instanceSupportMessage": "",
|
||||
"usage": OrderedDict(
|
||||
[
|
||||
("favorites", OrderedDict([("tracks", {"total": 0})])),
|
||||
("listenings", OrderedDict([("total", 0)])),
|
||||
("downloads", OrderedDict([("total", 0)])),
|
||||
]
|
||||
),
|
||||
"location": "",
|
||||
"languages": ["en"],
|
||||
"features": ["channels", "podcasts", "federation"],
|
||||
"content": OrderedDict(
|
||||
[
|
||||
(
|
||||
"local",
|
||||
OrderedDict(
|
||||
[
|
||||
("artists", 0),
|
||||
("releases", 0),
|
||||
("recordings", 0),
|
||||
("hoursOfContent", 0),
|
||||
]
|
||||
),
|
||||
),
|
||||
("topMusicCategories", []),
|
||||
("topPodcastCategories", []),
|
||||
]
|
||||
),
|
||||
"codeOfConduct": "",
|
||||
},
|
||||
}
|
||||
|
||||
assert response.data == expected
|
||||
|
|
|
@ -3,9 +3,32 @@ import pytest
|
|||
from funkwhale_api.music import filters, models
|
||||
|
||||
|
||||
def test_artist_filter_ordering(factories, mocker):
|
||||
# Lista de prueba
|
||||
artist1 = factories["music.Artist"](name="Anita Muller")
|
||||
artist2 = factories["music.Artist"](name="Jane Smith")
|
||||
artist3 = factories["music.Artist"](name="Adam Johnson")
|
||||
artist4 = factories["music.Artist"](name="anita iux")
|
||||
|
||||
qs = models.Artist.objects.all()
|
||||
|
||||
cf = factories["moderation.UserFilter"](for_artist=True)
|
||||
|
||||
# Request con ordenamiento
|
||||
filterset = filters.ArtistFilter(
|
||||
{"ordering": "name"}, request=mocker.Mock(user=cf.user), queryset=qs
|
||||
)
|
||||
|
||||
expected_order = [artist3.name, artist4.name, artist1.name, artist2.name]
|
||||
actual_order = list(filterset.qs.values_list("name", flat=True))
|
||||
|
||||
assert actual_order == expected_order
|
||||
|
||||
|
||||
def test_album_filter_hidden(factories, mocker, queryset_equal_list):
|
||||
factories["music.Album"]()
|
||||
cf = factories["moderation.UserFilter"](for_artist=True)
|
||||
|
||||
hidden_album = factories["music.Album"](artist=cf.target_artist)
|
||||
|
||||
qs = models.Album.objects.all()
|
||||
|
|
|
@ -1400,3 +1400,53 @@ def test_fs_import(factories, cache, mocker, settings):
|
|||
}
|
||||
assert cache.get("fs-import:status") == "finished"
|
||||
assert "Pruning dangling tracks" in cache.get("fs-import:logs")[-1]
|
||||
|
||||
|
||||
def test_upload_checks_mbid_tag(temp_signal, factories, mocker, preferences):
|
||||
preferences["music__only_allow_musicbrainz_tagged_files"] = True
|
||||
mocker.patch("funkwhale_api.federation.routes.outbox.dispatch")
|
||||
mocker.patch("funkwhale_api.music.tasks.populate_album_cover")
|
||||
mocker.patch("funkwhale_api.music.metadata.Metadata.get_picture")
|
||||
track = factories["music.Track"](album__attachment_cover=None, mbid=None)
|
||||
path = os.path.join(DATA_DIR, "with_cover.opus")
|
||||
|
||||
upload = factories["music.Upload"](
|
||||
track=None,
|
||||
audio_file__from_path=path,
|
||||
import_metadata={"funkwhale": {"track": {"uuid": str(track.uuid)}}},
|
||||
)
|
||||
mocker.patch("funkwhale_api.music.models.TrackActor.create_entries")
|
||||
|
||||
with temp_signal(signals.upload_import_status_updated):
|
||||
tasks.process_upload(upload_id=upload.pk)
|
||||
|
||||
upload.refresh_from_db()
|
||||
|
||||
assert upload.import_status == "errored"
|
||||
assert upload.import_details == {
|
||||
"error_code": "Only content tagged with a MusicBrainz ID is permitted on this pod.",
|
||||
"detail": "You can tag your files with MusicBrainz Picard",
|
||||
}
|
||||
|
||||
|
||||
def test_upload_checks_mbid_tag_pass(temp_signal, factories, mocker, preferences):
|
||||
preferences["music__only_allow_musicbrainz_tagged_files"] = True
|
||||
mocker.patch("funkwhale_api.federation.routes.outbox.dispatch")
|
||||
mocker.patch("funkwhale_api.music.tasks.populate_album_cover")
|
||||
mocker.patch("funkwhale_api.music.metadata.Metadata.get_picture")
|
||||
track = factories["music.Track"](album__attachment_cover=None, mbid=None)
|
||||
path = os.path.join(DATA_DIR, "test.mp3")
|
||||
|
||||
upload = factories["music.Upload"](
|
||||
track=None,
|
||||
audio_file__from_path=path,
|
||||
import_metadata={"funkwhale": {"track": {"uuid": str(track.uuid)}}},
|
||||
)
|
||||
mocker.patch("funkwhale_api.music.models.TrackActor.create_entries")
|
||||
|
||||
with temp_signal(signals.upload_import_status_updated):
|
||||
tasks.process_upload(upload_id=upload.pk)
|
||||
|
||||
upload.refresh_from_db()
|
||||
|
||||
assert upload.import_status == "finished"
|
||||
|
|
|
@ -0,0 +1,116 @@
|
|||
import pytest
|
||||
import troi.core
|
||||
from django.core.cache import cache
|
||||
from django.db.models import Q
|
||||
from requests.exceptions import ConnectTimeout
|
||||
|
||||
from funkwhale_api.music.models import Track
|
||||
from funkwhale_api.radios import lb_recommendations
|
||||
from funkwhale_api.typesense import factories as custom_factories
|
||||
from funkwhale_api.typesense import utils
|
||||
|
||||
|
||||
def test_can_build_radio_queryset_with_fw_db(factories, mocker):
|
||||
factories["music.Track"](
|
||||
title="I Want It That Way", mbid="87dfa566-21c3-45ed-bc42-1d345b8563fa"
|
||||
)
|
||||
factories["music.Track"](
|
||||
title="The Perfect Kiss", mbid="ec0da94e-fbfe-4eb0-968e-024d4c32d1d0"
|
||||
)
|
||||
factories["music.Track"]()
|
||||
|
||||
qs = Track.objects.all()
|
||||
|
||||
mocker.patch("funkwhale_api.typesense.utils.resolve_recordings_to_fw_track")
|
||||
|
||||
radio_qs = lb_recommendations.build_radio_queryset(
|
||||
custom_factories.DummyPatch(), {"min_recordings": 1}, qs
|
||||
)
|
||||
recommended_recording_mbids = [
|
||||
"87dfa566-21c3-45ed-bc42-1d345b8563fa",
|
||||
"ec0da94e-fbfe-4eb0-968e-024d4c32d1d0",
|
||||
]
|
||||
|
||||
assert list(
|
||||
Track.objects.all().filter(Q(mbid__in=recommended_recording_mbids))
|
||||
) == list(radio_qs)
|
||||
|
||||
|
||||
def test_build_radio_queryset_without_fw_db(mocker):
|
||||
resolve_recordings_to_fw_track = mocker.patch.object(
|
||||
utils, "resolve_recordings_to_fw_track", return_value=None
|
||||
)
|
||||
# mocker.patch.object(cache, "get_many", return_value=None)
|
||||
|
||||
qs = Track.objects.all()
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
lb_recommendations.build_radio_queryset(
|
||||
custom_factories.DummyPatch(), {"min_recordings": 1}, qs
|
||||
)
|
||||
|
||||
assert resolve_recordings_to_fw_track.called_once_with(
|
||||
custom_factories.recommended_recording_mbids
|
||||
)
|
||||
|
||||
|
||||
def test_build_radio_queryset_with_redis_and_fw_db(factories, mocker):
|
||||
factories["music.Track"](
|
||||
pk="1", title="I Want It That Way", mbid="87dfa566-21c3-45ed-bc42-1d345b8563fa"
|
||||
)
|
||||
mocker.patch.object(utils, "resolve_recordings_to_fw_track", return_value=None)
|
||||
redis_cache = {}
|
||||
redis_cache["ec0da94e-fbfe-4eb0-968e-024d4c32d1d0"] = 2
|
||||
mocker.patch.object(cache, "get_many", return_value=redis_cache)
|
||||
|
||||
qs = Track.objects.all()
|
||||
|
||||
assert list(
|
||||
lb_recommendations.build_radio_queryset(
|
||||
custom_factories.DummyPatch(), {"min_recordings": 1}, qs
|
||||
)
|
||||
) == list(Track.objects.all().filter(pk__in=[1, 2]))
|
||||
|
||||
|
||||
def test_build_radio_queryset_with_redis_and_without_fw_db(factories, mocker):
|
||||
factories["music.Track"](
|
||||
pk="1", title="Super title", mbid="87dfaaaa-2aaa-45ed-bc42-1d34aaaaaaaa"
|
||||
)
|
||||
mocker.patch.object(utils, "resolve_recordings_to_fw_track", return_value=None)
|
||||
redis_cache = {}
|
||||
redis_cache["87dfa566-21c3-45ed-bc42-1d345b8563fa"] = 1
|
||||
mocker.patch.object(cache, "get_many", return_value=redis_cache)
|
||||
qs = Track.objects.all()
|
||||
|
||||
assert list(
|
||||
lb_recommendations.build_radio_queryset(
|
||||
custom_factories.DummyPatch(), {"min_recordings": 1}, qs
|
||||
)
|
||||
) == list(Track.objects.all().filter(pk=1))
|
||||
|
||||
|
||||
def test_build_radio_queryset_catch_troi_ConnectTimeout(mocker):
|
||||
mocker.patch.object(
|
||||
troi.core,
|
||||
"generate_playlist",
|
||||
side_effect=ConnectTimeout,
|
||||
)
|
||||
qs = Track.objects.all()
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
lb_recommendations.build_radio_queryset(
|
||||
custom_factories.DummyPatch(), {"min_recordings": 1}, qs
|
||||
)
|
||||
|
||||
|
||||
def test_build_radio_queryset_catch_troi_no_candidates(mocker):
|
||||
mocker.patch.object(
|
||||
troi.core,
|
||||
"generate_playlist",
|
||||
)
|
||||
qs = Track.objects.all()
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
lb_recommendations.build_radio_queryset(
|
||||
custom_factories.DummyPatch(), {"min_recordings": 1}, qs
|
||||
)
|
|
@ -2,8 +2,8 @@ import json
|
|||
import random
|
||||
|
||||
import pytest
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.urls import reverse
|
||||
from rest_framework.exceptions import ValidationError
|
||||
|
||||
from funkwhale_api.favorites.models import TrackFavorite
|
||||
from funkwhale_api.radios import models, radios, serializers
|
||||
|
@ -98,7 +98,7 @@ def test_can_get_choices_for_custom_radio(factories):
|
|||
session = factories["radios.CustomRadioSession"](
|
||||
custom_radio__config=[{"type": "artist", "ids": [artist.pk]}]
|
||||
)
|
||||
choices = session.radio.get_choices(filter_playable=False)
|
||||
choices = session.radio(api_version=1).get_choices(filter_playable=False)
|
||||
|
||||
expected = [t.pk for t in tracks]
|
||||
assert list(choices.values_list("id", flat=True)) == expected
|
||||
|
@ -191,16 +191,17 @@ def test_can_get_track_for_session_from_api(factories, logged_in_api_client):
|
|||
|
||||
|
||||
def test_related_object_radio_validate_related_object(factories):
|
||||
user = factories["users.User"]()
|
||||
# cannot start without related object
|
||||
radio = radios.ArtistRadio()
|
||||
radio = {"radio_type": "tag"}
|
||||
serializer = serializers.RadioSessionSerializer()
|
||||
with pytest.raises(ValidationError):
|
||||
radio.start_session(user)
|
||||
serializer.validate(data=radio)
|
||||
|
||||
# cannot start with bad related object type
|
||||
radio = radios.ArtistRadio()
|
||||
radio = {"radio_type": "tag", "related_object": "whatever"}
|
||||
serializer = serializers.RadioSessionSerializer()
|
||||
with pytest.raises(ValidationError):
|
||||
radio.start_session(user, related_object=user)
|
||||
serializer.validate(data=radio)
|
||||
|
||||
|
||||
def test_can_start_artist_radio(factories):
|
||||
|
@ -391,7 +392,7 @@ def test_get_choices_for_custom_radio_exclude_artist(factories):
|
|||
{"type": "artist", "ids": [excluded_artist.pk], "not": True},
|
||||
]
|
||||
)
|
||||
choices = session.radio.get_choices(filter_playable=False)
|
||||
choices = session.radio(api_version=1).get_choices(filter_playable=False)
|
||||
|
||||
expected = [u.track.pk for u in included_uploads]
|
||||
assert list(choices.values_list("id", flat=True)) == expected
|
||||
|
@ -409,7 +410,7 @@ def test_get_choices_for_custom_radio_exclude_tag(factories):
|
|||
{"type": "tag", "names": ["rock"], "not": True},
|
||||
]
|
||||
)
|
||||
choices = session.radio.get_choices(filter_playable=False)
|
||||
choices = session.radio(api_version=1).get_choices(filter_playable=False)
|
||||
|
||||
expected = [u.track.pk for u in included_uploads]
|
||||
assert list(choices.values_list("id", flat=True)) == expected
|
||||
|
|
|
@ -0,0 +1,144 @@
|
|||
import json
|
||||
import logging
|
||||
import pickle
|
||||
import random
|
||||
|
||||
from django.core.cache import cache
|
||||
from django.urls import reverse
|
||||
|
||||
from funkwhale_api.favorites.models import TrackFavorite
|
||||
from funkwhale_api.radios import models, radios_v2
|
||||
|
||||
|
||||
def test_can_get_track_for_session_from_api_v2(factories, logged_in_api_client):
|
||||
actor = logged_in_api_client.user.create_actor()
|
||||
track = factories["music.Upload"](
|
||||
library__actor=actor, import_status="finished"
|
||||
).track
|
||||
url = reverse("api:v2:radios:sessions-list")
|
||||
response = logged_in_api_client.post(url, {"radio_type": "random"})
|
||||
session = models.RadioSession.objects.latest("id")
|
||||
|
||||
url = reverse("api:v2:radios:sessions-tracks", kwargs={"pk": session.pk})
|
||||
response = logged_in_api_client.get(url, {"session": session.pk})
|
||||
data = json.loads(response.content.decode("utf-8"))
|
||||
|
||||
assert data[0]["id"] == track.pk
|
||||
|
||||
next_track = factories["music.Upload"](
|
||||
library__actor=actor, import_status="finished"
|
||||
).track
|
||||
response = logged_in_api_client.get(url, {"session": session.pk})
|
||||
data = json.loads(response.content.decode("utf-8"))
|
||||
|
||||
assert data[0]["id"] == next_track.id
|
||||
|
||||
|
||||
def test_can_use_radio_session_to_filter_choices_v2(factories):
|
||||
factories["music.Upload"].create_batch(10)
|
||||
user = factories["users.User"]()
|
||||
radio = radios_v2.RandomRadio()
|
||||
session = radio.start_session(user)
|
||||
|
||||
radio.pick_many(quantity=10, filter_playable=False)
|
||||
|
||||
# ensure 10 different tracks have been suggested
|
||||
tracks_id = [
|
||||
session_track.track.pk for session_track in session.session_tracks.all()
|
||||
]
|
||||
assert len(set(tracks_id)) == 10
|
||||
|
||||
|
||||
def test_session_radio_excludes_previous_picks_v2(factories, logged_in_api_client):
|
||||
tracks = factories["music.Track"].create_batch(5)
|
||||
url = reverse("api:v2:radios:sessions-list")
|
||||
response = logged_in_api_client.post(url, {"radio_type": "random"})
|
||||
session = models.RadioSession.objects.latest("id")
|
||||
url = reverse("api:v2:radios:sessions-tracks", kwargs={"pk": session.pk})
|
||||
|
||||
previous_choices = []
|
||||
|
||||
for i in range(5):
|
||||
response = logged_in_api_client.get(
|
||||
url, {"session": session.pk, "filter_playable": False}
|
||||
)
|
||||
pick = json.loads(response.content.decode("utf-8"))
|
||||
assert pick[0]["title"] not in previous_choices
|
||||
assert pick[0]["title"] in [t.title for t in tracks]
|
||||
previous_choices.append(pick[0]["title"])
|
||||
|
||||
response = logged_in_api_client.get(url, {"session": session.pk})
|
||||
assert (
|
||||
json.loads(response.content.decode("utf-8"))
|
||||
== "Radio doesn't have more candidates"
|
||||
)
|
||||
|
||||
|
||||
def test_can_get_choices_for_favorites_radio_v2(factories):
|
||||
files = factories["music.Upload"].create_batch(10)
|
||||
tracks = [f.track for f in files]
|
||||
user = factories["users.User"]()
|
||||
for i in range(5):
|
||||
TrackFavorite.add(track=random.choice(tracks), user=user)
|
||||
|
||||
radio = radios_v2.FavoritesRadio()
|
||||
session = radio.start_session(user=user)
|
||||
choices = session.radio(api_version=2).get_choices(
|
||||
quantity=100, filter_playable=False
|
||||
)
|
||||
|
||||
assert len(choices) == user.track_favorites.all().count()
|
||||
|
||||
for favorite in user.track_favorites.all():
|
||||
assert favorite.track in choices
|
||||
|
||||
|
||||
def test_can_get_choices_for_custom_radio_v2(factories):
|
||||
artist = factories["music.Artist"]()
|
||||
files = factories["music.Upload"].create_batch(5, track__artist=artist)
|
||||
tracks = [f.track for f in files]
|
||||
factories["music.Upload"].create_batch(5)
|
||||
|
||||
session = factories["radios.CustomRadioSession"](
|
||||
custom_radio__config=[{"type": "artist", "ids": [artist.pk]}]
|
||||
)
|
||||
choices = session.radio(api_version=2).get_choices(
|
||||
quantity=1, filter_playable=False
|
||||
)
|
||||
|
||||
expected = [t.pk for t in tracks]
|
||||
for t in choices:
|
||||
assert t.id in expected
|
||||
|
||||
|
||||
def test_can_cache_radio_track(factories):
|
||||
uploads = factories["music.Track"].create_batch(10)
|
||||
user = factories["users.User"]()
|
||||
radio = radios_v2.RandomRadio()
|
||||
session = radio.start_session(user)
|
||||
picked = session.radio(api_version=2).pick_many(quantity=1, filter_playable=False)
|
||||
assert len(picked) == 1
|
||||
for t in pickle.loads(cache.get(f"radiotracks{session.id}")):
|
||||
assert t in uploads
|
||||
|
||||
|
||||
def test_regenerate_cache_if_not_enought_tracks_in_it(
|
||||
factories, caplog, logged_in_api_client
|
||||
):
|
||||
logger = logging.getLogger("funkwhale_api.radios.radios_v2")
|
||||
caplog.set_level(logging.INFO)
|
||||
logger.addHandler(caplog.handler)
|
||||
|
||||
factories["music.Track"].create_batch(10)
|
||||
factories["users.User"]()
|
||||
url = reverse("api:v2:radios:sessions-list")
|
||||
response = logged_in_api_client.post(url, {"radio_type": "random"})
|
||||
session = models.RadioSession.objects.latest("id")
|
||||
url = reverse("api:v2:radios:sessions-tracks", kwargs={"pk": session.pk})
|
||||
logged_in_api_client.get(url, {"count": 9, "filter_playable": False})
|
||||
response = logged_in_api_client.get(url, {"count": 10, "filter_playable": False})
|
||||
pick = json.loads(response.content.decode("utf-8"))
|
||||
assert (
|
||||
"Not enough radio tracks in cache. Trying to generate new cache" in caplog.text
|
||||
)
|
||||
assert len(pick) == 1
|
|
@ -0,0 +1,5 @@
|
|||
def test_version():
|
||||
from funkwhale_api import __version__, version
|
||||
|
||||
assert isinstance(version, str)
|
||||
assert version == __version__
|
|
@ -12,5 +12,5 @@ def test_can_resolve_subsonic():
|
|||
|
||||
|
||||
def test_can_resolve_v2():
|
||||
path = reverse("api:v2:instance:nodeinfo-2.0")
|
||||
assert path == "/api/v2/instance/nodeinfo/2.0"
|
||||
path = reverse("api:v2:instance:nodeinfo-2.1")
|
||||
assert path == "/api/v2/instance/nodeinfo/2.1"
|
||||
|
|
|
@ -0,0 +1,58 @@
|
|||
import logging
|
||||
|
||||
import requests_mock
|
||||
import typesense
|
||||
|
||||
from funkwhale_api.typesense import tasks
|
||||
|
||||
|
||||
def test_add_tracks_to_index_fails(mocker, caplog):
|
||||
logger = logging.getLogger("funkwhale_api.typesense.tasks")
|
||||
caplog.set_level(logging.INFO)
|
||||
logger.addHandler(caplog.handler)
|
||||
|
||||
client = typesense.Client(
|
||||
{
|
||||
"api_key": "api_key",
|
||||
"nodes": [{"host": "host", "port": "port", "protocol": "protocol"}],
|
||||
"connection_timeout_seconds": 2,
|
||||
}
|
||||
)
|
||||
|
||||
with requests_mock.Mocker() as r_mocker:
|
||||
r_mocker.post(
|
||||
"protocol://host:port/collections/canonical_fw_data/documents/import",
|
||||
json=[{"name": "data"}],
|
||||
)
|
||||
mocker.patch.object(typesense, "Client", return_value=client)
|
||||
mocker.patch.object(
|
||||
typesense.client.ApiCall,
|
||||
"post",
|
||||
side_effect=typesense.exceptions.TypesenseClientError("Hello"),
|
||||
)
|
||||
tasks.add_tracks_to_index([1, 2, 3])
|
||||
assert "Can't build index" in caplog.text
|
||||
|
||||
|
||||
def test_build_canonical_index_success(mocker, caplog, factories):
|
||||
logger = logging.getLogger("funkwhale_api.typesense.tasks")
|
||||
caplog.set_level(logging.INFO)
|
||||
logger.addHandler(caplog.handler)
|
||||
|
||||
client = typesense.Client(
|
||||
{
|
||||
"api_key": "api_key",
|
||||
"nodes": [{"host": "host", "port": "port", "protocol": "protocol"}],
|
||||
"connection_timeout_seconds": 2,
|
||||
}
|
||||
)
|
||||
|
||||
factories["music.Track"].create_batch(size=5)
|
||||
|
||||
with requests_mock.Mocker() as r_mocker:
|
||||
mocker.patch.object(typesense, "Client", return_value=client)
|
||||
|
||||
r_mocker.post("protocol://host:port/collections", json={"name": "data"})
|
||||
|
||||
tasks.build_canonical_index()
|
||||
assert "Launching async task to add " in caplog.text
|
|
@ -0,0 +1,43 @@
|
|||
import requests_mock
|
||||
import typesense
|
||||
from django.core.cache import cache
|
||||
|
||||
from funkwhale_api.typesense import factories as custom_factories
|
||||
from funkwhale_api.typesense import utils
|
||||
|
||||
|
||||
def test_resolve_recordings_to_fw_track(mocker, factories):
|
||||
artist = factories["music.Artist"](name="artist_name")
|
||||
factories["music.Track"](
|
||||
pk=1,
|
||||
title="I Want It That Way",
|
||||
artist=artist,
|
||||
mbid="87dfa566-21c3-45ed-bc42-1d345b8563fa",
|
||||
)
|
||||
factories["music.Track"](
|
||||
pk=2,
|
||||
title="I Want It That Way",
|
||||
artist=artist,
|
||||
)
|
||||
|
||||
client = typesense.Client(
|
||||
{
|
||||
"api_key": "api_key",
|
||||
"nodes": [{"host": "host", "port": "port", "protocol": "protocol"}],
|
||||
"connection_timeout_seconds": 2,
|
||||
}
|
||||
)
|
||||
with requests_mock.Mocker() as r_mocker:
|
||||
mocker.patch.object(typesense, "Client", return_value=client)
|
||||
mocker.patch.object(
|
||||
typesense.client.ApiCall,
|
||||
"post",
|
||||
return_value=custom_factories.typesense_search_result,
|
||||
)
|
||||
r_mocker.get(
|
||||
"protocol://host:port/collections/canonical_fw_data/documents/search",
|
||||
json=custom_factories.typesense_search_result,
|
||||
)
|
||||
|
||||
utils.resolve_recordings_to_fw_track(custom_factories.recording_list)
|
||||
assert cache.get("87dfa566-21c3-45ed-bc42-1d345b8563fa") == "1"
|
|
@ -0,0 +1,28 @@
|
|||
## {{ versiondata.version }} ({{ versiondata.date }})
|
||||
|
||||
Upgrade instructions are available at https://docs.funkwhale.audio/administrator/upgrade/index.html
|
||||
|
||||
{% for section, _ in sections.items() %}
|
||||
{% if sections[section] %}
|
||||
{% for category, val in definitions.items() if category in sections[section] %}
|
||||
{{ definitions[category]['name'] }}:
|
||||
|
||||
{% if definitions[category]['showcontent'] %}
|
||||
{% for text in sections[section][category].keys() | sort() %}
|
||||
- {{ text }}
|
||||
{% endfor %}
|
||||
|
||||
{% else %}
|
||||
|
||||
- {{ sections[section][category][''] | join(', ') }}
|
||||
|
||||
{% endif %}
|
||||
{% if sections[section][category] | length == 0 %}
|
||||
No significant changes.
|
||||
{% else %}
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
{% else %}
|
||||
No significant changes.
|
||||
{% endif %}
|
||||
{% endfor %}
|
|
@ -1,33 +0,0 @@
|
|||
{{ versiondata.version }} ({{ versiondata.date }})
|
||||
{{ top_underline * ((versiondata.version + versiondata.date)|length + 3)}}
|
||||
|
||||
Upgrade instructions are available at
|
||||
https://docs.funkwhale.audio/administrator/upgrade/index.html
|
||||
|
||||
{% for section, _ in sections.items() %}
|
||||
{% if sections[section] %}
|
||||
{% for category, val in definitions.items() if category in sections[section]%}
|
||||
{{ definitions[category]['name'] }}:
|
||||
|
||||
{% if definitions[category]['showcontent'] %}
|
||||
{% for text in sections[section][category].keys()|sort() %}
|
||||
- {{ text }}
|
||||
{% endfor %}
|
||||
|
||||
{% else %}
|
||||
- {{ sections[section][category]['']|join(', ') }}
|
||||
|
||||
{% endif %}
|
||||
{% if sections[section][category]|length == 0 %}
|
||||
No significant changes.
|
||||
|
||||
{% else %}
|
||||
{% endif %}
|
||||
|
||||
{% endfor %}
|
||||
{% else %}
|
||||
No significant changes.
|
||||
|
||||
|
||||
{% endif %}
|
||||
{% endfor %}
|
|
@ -92,3 +92,14 @@ services:
|
|||
ports:
|
||||
# override those variables in your .env file if needed
|
||||
- "${FUNKWHALE_API_IP}:${FUNKWHALE_API_PORT}:80"
|
||||
|
||||
typesense:
|
||||
restart: unless-stopped
|
||||
env_file:
|
||||
- .env
|
||||
image: typesense/typesense:0.24.0
|
||||
volumes:
|
||||
- ./typesense/data:/data
|
||||
command: --data-dir /data --enable-cors
|
||||
profiles:
|
||||
- typesense
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
upstream fw {
|
||||
# depending on your setup, you may want to update this
|
||||
server ${FUNKWHALE_API_IP}:${FUNKWHALE_API_PORT};
|
||||
}
|
||||
|
||||
# Required for websocket support.
|
||||
map $http_upgrade $connection_upgrade {
|
||||
default upgrade;
|
||||
'' close;
|
||||
|
@ -10,15 +11,31 @@ map $http_upgrade $connection_upgrade {
|
|||
server {
|
||||
listen 80;
|
||||
listen [::]:80;
|
||||
# update this to match your instance name
|
||||
server_name ${FUNKWHALE_HOSTNAME};
|
||||
location / { return 301 https://$host$request_uri; }
|
||||
|
||||
# useful for Let's Encrypt
|
||||
location /.well-known/acme-challenge/ {
|
||||
allow all;
|
||||
}
|
||||
|
||||
location / {
|
||||
return 301 https://$host$request_uri;
|
||||
}
|
||||
}
|
||||
|
||||
server {
|
||||
listen 443 ssl http2;
|
||||
listen [::]:443 ssl http2;
|
||||
|
||||
server_name ${FUNKWHALE_HOSTNAME};
|
||||
|
||||
# TLS
|
||||
# Feel free to use your own configuration for SSL here or simply remove the
|
||||
# lines and move the configuration to the previous server block if you
|
||||
# don't want to run funkwhale behind https (this is not recommended)
|
||||
# have a look here for let's encrypt configuration:
|
||||
# https://certbot.eff.org/all-instructions/#debian-9-stretch-nginx
|
||||
ssl_protocols TLSv1.2;
|
||||
ssl_ciphers HIGH:!MEDIUM:!LOW:!aNULL:!NULL:!SHA;
|
||||
ssl_prefer_server_ciphers on;
|
||||
|
@ -29,12 +46,10 @@ server {
|
|||
# HSTS
|
||||
add_header Strict-Transport-Security "max-age=31536000";
|
||||
|
||||
# Security related headers
|
||||
|
||||
# If you are using S3 to host your files, remember to add your S3 URL to the
|
||||
# media-src and img-src headers (e.g. img-src 'self' https://<your-S3-URL> data:)
|
||||
|
||||
add_header Content-Security-Policy "default-src 'self'; script-src 'self'; style-src 'self' 'unsafe-inline'; img-src 'self' data:; font-src 'self' data:; object-src 'none'; media-src 'self' data:";
|
||||
# General configs
|
||||
client_max_body_size ${NGINX_MAX_BODY_SIZE};
|
||||
charset utf-8;
|
||||
|
||||
# compression settings
|
||||
gzip on;
|
||||
|
@ -42,7 +57,6 @@ server {
|
|||
gzip_min_length 256;
|
||||
gzip_proxied any;
|
||||
gzip_vary on;
|
||||
|
||||
gzip_types
|
||||
application/javascript
|
||||
application/vnd.geo+json
|
||||
|
@ -61,10 +75,11 @@ server {
|
|||
text/vtt
|
||||
text/x-component
|
||||
text/x-cross-domain-policy;
|
||||
# end of compression settings
|
||||
|
||||
location / {
|
||||
include /etc/nginx/funkwhale_proxy.conf;
|
||||
client_max_body_size ${NGINX_MAX_BODY_SIZE};
|
||||
proxy_pass http://fw;
|
||||
proxy_pass http://fw;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -15,6 +15,9 @@
|
|||
# Additional options you may want to check:
|
||||
# - MUSIC_DIRECTORY_PATH and MUSIC_DIRECTORY_SERVE_PATH if you plan to use
|
||||
# in-place import
|
||||
# - TYPESENSE_API_KEY if you want to enable typesense to experiment with
|
||||
# the recommendation system set this. You can
|
||||
# generate one using `openssl rand -base64 45`, for example
|
||||
#
|
||||
# Docker only
|
||||
# -----------
|
||||
|
@ -96,7 +99,7 @@ REVERSE_PROXY_TYPE=nginx
|
|||
#
|
||||
# If you want to use Redis over unix sockets, you'll actually need two variables:
|
||||
# For the cache part:
|
||||
# CACHE_URL=redis:///run/redis/redis.sock?db=0
|
||||
# CACHE_URL=unix:///run/redis/redis.sock?db=0
|
||||
# For the Celery/asynchronous tasks part:
|
||||
# CELERY_BROKER_URL=redis+socket:///run/redis/redis.sock?virtual_host=0
|
||||
|
||||
|
|
|
@ -1,10 +1,15 @@
|
|||
# This file was generated from Funkwhale's nginx.template
|
||||
|
||||
upstream funkwhale-api {
|
||||
# depending on your setup, you may want to update this
|
||||
server ${FUNKWHALE_API_IP}:${FUNKWHALE_API_PORT};
|
||||
}
|
||||
|
||||
# Required for websocket support.
|
||||
map $http_upgrade $connection_upgrade {
|
||||
default upgrade;
|
||||
'' close;
|
||||
}
|
||||
|
||||
server {
|
||||
listen 80;
|
||||
listen [::]:80;
|
||||
|
@ -21,16 +26,10 @@ server {
|
|||
}
|
||||
}
|
||||
|
||||
# Required for websocket support.
|
||||
map $http_upgrade $connection_upgrade {
|
||||
default upgrade;
|
||||
'' close;
|
||||
}
|
||||
|
||||
server {
|
||||
listen 443 ssl http2;
|
||||
listen [::]:443 ssl http2;
|
||||
charset utf-8;
|
||||
|
||||
server_name ${FUNKWHALE_HOSTNAME};
|
||||
|
||||
# TLS
|
||||
|
@ -49,12 +48,11 @@ server {
|
|||
# HSTS
|
||||
add_header Strict-Transport-Security "max-age=31536000";
|
||||
|
||||
add_header Content-Security-Policy "default-src 'self'; connect-src https: wss: http: ws: 'self' 'unsafe-eval'; script-src 'self' 'wasm-unsafe-eval'; style-src https: http: 'self' 'unsafe-inline'; img-src https: http: 'self' data:; font-src https: http: 'self' data:; media-src https: http: 'self' data:; object-src 'none'";
|
||||
add_header Referrer-Policy "strict-origin-when-cross-origin";
|
||||
add_header X-Frame-Options "SAMEORIGIN" always;
|
||||
add_header Service-Worker-Allowed "/";
|
||||
|
||||
# General configs
|
||||
root ${FUNKWHALE_FRONTEND_PATH};
|
||||
client_max_body_size ${NGINX_MAX_BODY_SIZE};
|
||||
charset utf-8;
|
||||
|
||||
# compression settings
|
||||
gzip on;
|
||||
|
@ -62,7 +60,6 @@ server {
|
|||
gzip_min_length 256;
|
||||
gzip_proxied any;
|
||||
gzip_vary on;
|
||||
|
||||
gzip_types
|
||||
application/javascript
|
||||
application/vnd.geo+json
|
||||
|
@ -83,6 +80,12 @@ server {
|
|||
text/x-cross-domain-policy;
|
||||
# end of compression settings
|
||||
|
||||
# headers
|
||||
add_header Content-Security-Policy "default-src 'self'; connect-src https: wss: http: ws: 'self' 'unsafe-eval'; script-src 'self' 'wasm-unsafe-eval'; style-src https: http: 'self' 'unsafe-inline'; img-src https: http: 'self' data:; font-src https: http: 'self' data:; media-src https: http: 'self' data:; object-src 'none'";
|
||||
add_header Referrer-Policy "strict-origin-when-cross-origin";
|
||||
add_header X-Frame-Options "SAMEORIGIN" always;
|
||||
add_header Service-Worker-Allowed "/";
|
||||
|
||||
location /api/ {
|
||||
include /etc/nginx/funkwhale_proxy.conf;
|
||||
# This is needed if you have file import via upload enabled.
|
||||
|
@ -90,17 +93,39 @@ server {
|
|||
proxy_pass http://funkwhale-api;
|
||||
}
|
||||
|
||||
location / {
|
||||
location ~ ^/library/(albums|tracks|artists|playlists)/ {
|
||||
include /etc/nginx/funkwhale_proxy.conf;
|
||||
proxy_pass http://funkwhale-api;
|
||||
}
|
||||
|
||||
location /channels/ {
|
||||
include /etc/nginx/funkwhale_proxy.conf;
|
||||
proxy_pass http://funkwhale-api;
|
||||
}
|
||||
|
||||
location ~ ^/@(vite-plugin-pwa|vite|id)/ {
|
||||
include /etc/nginx/funkwhale_proxy.conf;
|
||||
alias ${FUNKWHALE_FRONTEND_PATH}/;
|
||||
try_files $uri $uri/ /index.html;
|
||||
}
|
||||
|
||||
location /@ {
|
||||
include /etc/nginx/funkwhale_proxy.conf;
|
||||
proxy_pass http://funkwhale-api;
|
||||
}
|
||||
|
||||
location / {
|
||||
expires 1d;
|
||||
include /etc/nginx/funkwhale_proxy.conf;
|
||||
alias ${FUNKWHALE_FRONTEND_PATH}/;
|
||||
try_files $uri $uri/ /index.html;
|
||||
}
|
||||
|
||||
location ~ "/(front/)?embed.html" {
|
||||
alias ${FUNKWHALE_FRONTEND_PATH}/embed.html;
|
||||
add_header Content-Security-Policy "connect-src https: http: 'self'; default-src 'self'; script-src 'self' unpkg.com 'unsafe-inline' 'unsafe-eval'; style-src https: http: 'self' 'unsafe-inline'; img-src https: http: 'self' data:; font-src https: http: 'self' data:; object-src 'none'; media-src https: http: 'self' data:";
|
||||
add_header Referrer-Policy "strict-origin-when-cross-origin";
|
||||
|
||||
alias ${FUNKWHALE_FRONTEND_PATH}/embed.html;
|
||||
expires 1d;
|
||||
}
|
||||
|
||||
|
@ -158,7 +183,7 @@ server {
|
|||
# has been checked on API side.
|
||||
# Set this to the same value as your MUSIC_DIRECTORY_PATH setting.
|
||||
internal;
|
||||
alias ${MUSIC_DIRECTORY_SERVE_PATH}/;
|
||||
alias ${MUSIC_DIRECTORY_PATH}/;
|
||||
add_header Access-Control-Allow-Origin '*';
|
||||
}
|
||||
|
||||
|
@ -166,4 +191,8 @@ server {
|
|||
# If the reverse proxy is terminating SSL, nginx gets confused and redirects to http, hence the full URL
|
||||
return 302 ${FUNKWHALE_PROTOCOL}://${FUNKWHALE_HOSTNAME}/api/v1/instance/spa-manifest.json;
|
||||
}
|
||||
|
||||
location /staticfiles/ {
|
||||
alias ${STATIC_ROOT}/;
|
||||
}
|
||||
}
|
||||
|
|
34
dev.yml
34
dev.yml
|
@ -19,7 +19,7 @@ services:
|
|||
- "./po:/po"
|
||||
networks:
|
||||
- internal
|
||||
command: "yarn dev --host --base /front/"
|
||||
command: "yarn dev --host"
|
||||
|
||||
postgres:
|
||||
env_file:
|
||||
|
@ -71,7 +71,6 @@ services:
|
|||
- "DATABASE_URL=postgresql://postgres@postgres/postgres"
|
||||
- "CACHE_URL=redis://redis:6379/0"
|
||||
- "STATIC_ROOT=/staticfiles"
|
||||
- "MEDIA_ROOT=/data/media"
|
||||
|
||||
depends_on:
|
||||
- postgres
|
||||
|
@ -103,7 +102,6 @@ services:
|
|||
- "FUNKWHALE_PROTOCOL=${FUNKWHALE_PROTOCOL-http}"
|
||||
- "DATABASE_URL=postgresql://postgres@postgres/postgres"
|
||||
- "CACHE_URL=redis://redis:6379/0"
|
||||
- "MEDIA_ROOT=/data/media"
|
||||
volumes:
|
||||
- ./api:/app
|
||||
- "${MUSIC_DIRECTORY_SERVE_PATH-./data/music}:/music:ro"
|
||||
|
@ -117,7 +115,6 @@ services:
|
|||
- "node3.funkwhale.test:172.17.0.1"
|
||||
|
||||
nginx:
|
||||
command: /entrypoint.sh
|
||||
env_file:
|
||||
- .env.dev
|
||||
- .env
|
||||
|
@ -136,8 +133,7 @@ services:
|
|||
- api
|
||||
- front
|
||||
volumes:
|
||||
- ./docker/nginx/conf.dev:/etc/nginx/nginx.conf.template:ro
|
||||
- ./docker/nginx/entrypoint.sh:/entrypoint.sh:ro
|
||||
- ./docker/nginx/conf.dev:/etc/nginx/templates/default.conf.template:ro
|
||||
- "${MUSIC_DIRECTORY_SERVE_PATH-./data/music}:/music:ro"
|
||||
- ./deploy/funkwhale_proxy.conf:/etc/nginx/funkwhale_proxy.conf:ro
|
||||
- "./front:/frontend:ro"
|
||||
|
@ -156,15 +152,6 @@ services:
|
|||
traefik.frontend.passHostHeader: true
|
||||
traefik.docker.network: federation
|
||||
|
||||
docs:
|
||||
build: docs
|
||||
command: python3 serve.py
|
||||
volumes:
|
||||
- ".:/app/"
|
||||
ports:
|
||||
- "35730:35730"
|
||||
- "8001:8001"
|
||||
|
||||
api-docs:
|
||||
image: swaggerapi/swagger-ui:v3.37.2
|
||||
environment:
|
||||
|
@ -175,6 +162,19 @@ services:
|
|||
- "./docs/swagger.yml:/usr/share/nginx/html/swagger.yml"
|
||||
- "./docs/api:/usr/share/nginx/html/api"
|
||||
|
||||
typesense:
|
||||
env_file:
|
||||
- .env.dev
|
||||
- .env
|
||||
image: typesense/typesense:0.24.0
|
||||
networks:
|
||||
- internal
|
||||
volumes:
|
||||
- ./typesense/data:/data
|
||||
command: --data-dir /data --enable-cors
|
||||
profiles:
|
||||
- typesense
|
||||
|
||||
# minio:
|
||||
# image: minio/minio
|
||||
# command: server /data
|
||||
|
@ -193,5 +193,5 @@ services:
|
|||
networks:
|
||||
internal:
|
||||
federation:
|
||||
external:
|
||||
name: federation
|
||||
name: federation
|
||||
external: true
|
||||
|
|
|
@ -0,0 +1,17 @@
|
|||
{
|
||||
"group": {
|
||||
"default": {
|
||||
"targets": ["api", "front"]
|
||||
}
|
||||
},
|
||||
"target": {
|
||||
"api": {
|
||||
"context": "api",
|
||||
"target": "production"
|
||||
},
|
||||
"front": {
|
||||
"context": "front",
|
||||
"target": "production"
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,170 +1,167 @@
|
|||
user nginx;
|
||||
worker_processes 1;
|
||||
upstream funkwhale-api {
|
||||
# depending on your setup, you may want to update this
|
||||
server ${FUNKWHALE_API_HOST}:${FUNKWHALE_API_PORT};
|
||||
}
|
||||
|
||||
error_log /var/log/nginx/error.log warn;
|
||||
pid /var/run/nginx.pid;
|
||||
upstream funkwhale-front {
|
||||
server ${FUNKWHALE_FRONT_IP}:${FUNKWHALE_FRONT_PORT};
|
||||
}
|
||||
|
||||
|
||||
events {
|
||||
worker_connections 1024;
|
||||
# Required for websocket support.
|
||||
map $http_upgrade $connection_upgrade {
|
||||
default upgrade;
|
||||
'' close;
|
||||
}
|
||||
|
||||
|
||||
http {
|
||||
include /etc/nginx/mime.types;
|
||||
default_type application/octet-stream;
|
||||
server {
|
||||
listen 80;
|
||||
listen [::]:80;
|
||||
|
||||
log_format main '$remote_addr - $remote_user [$time_local] "$request" '
|
||||
'$status $body_bytes_sent "$http_referer" '
|
||||
'"$http_user_agent" "$http_x_forwarded_for"';
|
||||
server_name _;
|
||||
|
||||
access_log /var/log/nginx/access.log main;
|
||||
# General configs
|
||||
root /usr/share/nginx/html;
|
||||
client_max_body_size ${NGINX_MAX_BODY_SIZE};
|
||||
charset utf-8;
|
||||
|
||||
sendfile on;
|
||||
# compression settings
|
||||
gzip on;
|
||||
gzip_comp_level 5;
|
||||
gzip_min_length 256;
|
||||
gzip_proxied any;
|
||||
gzip_vary on;
|
||||
gzip_types
|
||||
application/javascript
|
||||
application/vnd.geo+json
|
||||
application/vnd.ms-fontobject
|
||||
application/x-font-ttf
|
||||
application/x-web-app-manifest+json
|
||||
font/opentype
|
||||
image/bmp
|
||||
image/svg+xml
|
||||
image/x-icon
|
||||
text/cache-manifest
|
||||
text/css
|
||||
text/plain
|
||||
text/vcard
|
||||
text/vnd.rim.location.xloc
|
||||
text/vtt
|
||||
text/x-component
|
||||
text/x-cross-domain-policy;
|
||||
# end of compression settings
|
||||
|
||||
keepalive_timeout 65;
|
||||
# headers
|
||||
add_header Content-Security-Policy "default-src 'self'; connect-src https: wss: http: ws: 'self' 'unsafe-eval'; script-src 'self' 'wasm-unsafe-eval'; style-src https: http: 'self' 'unsafe-inline'; img-src https: http: 'self' data:; font-src https: http: 'self' data:; media-src https: http: 'self' data:; object-src 'none'";
|
||||
add_header Referrer-Policy "strict-origin-when-cross-origin";
|
||||
add_header X-Frame-Options "SAMEORIGIN" always;
|
||||
add_header Service-Worker-Allowed "/";
|
||||
|
||||
upstream funkwhale-api {
|
||||
server ${FUNKWHALE_API_IP}:${FUNKWHALE_API_PORT};
|
||||
}
|
||||
upstream funkwhale-front {
|
||||
server ${FUNKWHALE_FRONT_IP}:${FUNKWHALE_FRONT_PORT};
|
||||
}
|
||||
|
||||
# Required for websocket support.
|
||||
map $http_upgrade $connection_upgrade {
|
||||
default upgrade;
|
||||
'' close;
|
||||
}
|
||||
|
||||
server {
|
||||
listen 80;
|
||||
listen [::]:80;
|
||||
charset utf-8;
|
||||
client_max_body_size ${NGINX_MAX_BODY_SIZE};
|
||||
location /api/ {
|
||||
include /etc/nginx/funkwhale_proxy.conf;
|
||||
# This is needed if you have file import via upload enabled.
|
||||
client_max_body_size ${NGINX_MAX_BODY_SIZE};
|
||||
proxy_pass http://funkwhale-api;
|
||||
}
|
||||
|
||||
add_header Content-Security-Policy "default-src 'self'; connect-src https: wss: http: ws: 'self' 'unsafe-eval'; script-src 'self' 'wasm-unsafe-eval'; style-src https: http: 'self' 'unsafe-inline'; img-src https: http: 'self' data:; font-src https: http: 'self' data:; media-src https: http: 'self' data:; object-src 'none'";
|
||||
location ~ ^/library/(albums|tracks|artists|playlists)/ {
|
||||
include /etc/nginx/funkwhale_proxy.conf;
|
||||
proxy_pass http://funkwhale-api;
|
||||
}
|
||||
|
||||
location /channels/ {
|
||||
include /etc/nginx/funkwhale_proxy.conf;
|
||||
proxy_pass http://funkwhale-api;
|
||||
}
|
||||
|
||||
location ~ ^/@(vite-plugin-pwa|vite|id)/ {
|
||||
include /etc/nginx/funkwhale_proxy.conf;
|
||||
proxy_pass http://funkwhale-front;
|
||||
}
|
||||
|
||||
location /@ {
|
||||
include /etc/nginx/funkwhale_proxy.conf;
|
||||
proxy_pass http://funkwhale-api;
|
||||
}
|
||||
|
||||
location / {
|
||||
expires 1d;
|
||||
include /etc/nginx/funkwhale_proxy.conf;
|
||||
proxy_pass http://funkwhale-front;
|
||||
}
|
||||
|
||||
location = /embed.html {
|
||||
proxy_pass http://funkwhale-front;
|
||||
add_header Content-Security-Policy "connect-src https: http: 'self'; default-src 'self'; script-src 'self' unpkg.com 'unsafe-inline' 'unsafe-eval'; style-src https: http: 'self' 'unsafe-inline'; img-src https: http: 'self' data:; font-src https: http: 'self' data:; object-src 'none'; media-src https: http: 'self' data:";
|
||||
add_header Referrer-Policy "strict-origin-when-cross-origin";
|
||||
add_header X-Frame-Options "SAMEORIGIN" always;
|
||||
add_header Service-Worker-Allowed "/";
|
||||
|
||||
# compression settings
|
||||
gzip on;
|
||||
gzip_comp_level 5;
|
||||
gzip_min_length 256;
|
||||
gzip_proxied any;
|
||||
gzip_vary on;
|
||||
expires 1d;
|
||||
}
|
||||
|
||||
gzip_types
|
||||
application/javascript
|
||||
application/vnd.geo+json
|
||||
application/vnd.ms-fontobject
|
||||
application/x-font-ttf
|
||||
application/x-web-app-manifest+json
|
||||
font/opentype
|
||||
image/bmp
|
||||
image/svg+xml
|
||||
image/x-icon
|
||||
text/cache-manifest
|
||||
text/css
|
||||
text/plain
|
||||
text/vcard
|
||||
text/vnd.rim.location.xloc
|
||||
text/vtt
|
||||
text/x-component
|
||||
text/x-cross-domain-policy;
|
||||
# end of compression settings
|
||||
location /federation/ {
|
||||
include /etc/nginx/funkwhale_proxy.conf;
|
||||
proxy_pass http://funkwhale-api;
|
||||
}
|
||||
|
||||
location /api/ {
|
||||
include /etc/nginx/funkwhale_proxy.conf;
|
||||
# This is needed if you have file import via upload enabled.
|
||||
client_max_body_size ${NGINX_MAX_BODY_SIZE};
|
||||
proxy_pass http://funkwhale-api;
|
||||
}
|
||||
# You can comment this if you do not plan to use the Subsonic API.
|
||||
location /rest/ {
|
||||
include /etc/nginx/funkwhale_proxy.conf;
|
||||
proxy_pass http://funkwhale-api/api/subsonic/rest/;
|
||||
}
|
||||
|
||||
location / {
|
||||
proxy_pass http://funkwhale-front;
|
||||
expires 1d;
|
||||
}
|
||||
location /.well-known/ {
|
||||
include /etc/nginx/funkwhale_proxy.conf;
|
||||
proxy_pass http://funkwhale-api;
|
||||
}
|
||||
|
||||
location ~ "/(front/)?embed.html" {
|
||||
add_header Content-Security-Policy "connect-src https: http: 'self'; default-src 'self'; script-src 'self' unpkg.com 'unsafe-inline' 'unsafe-eval'; style-src https: http: 'self' 'unsafe-inline'; img-src https: http: 'self' data:; font-src https: http: 'self' data:; object-src 'none'; media-src https: http: 'self' data:";
|
||||
add_header Referrer-Policy "strict-origin-when-cross-origin";
|
||||
# Allow direct access to only specific subdirectories in /media
|
||||
location /media/__sized__/ {
|
||||
alias ${MEDIA_ROOT}/__sized__/;
|
||||
add_header Access-Control-Allow-Origin '*';
|
||||
}
|
||||
|
||||
proxy_pass http://funkwhale-front/embed.html;
|
||||
expires 1d;
|
||||
}
|
||||
# Allow direct access to only specific subdirectories in /media
|
||||
location /media/attachments/ {
|
||||
alias ${MEDIA_ROOT}/attachments/;
|
||||
add_header Access-Control-Allow-Origin '*';
|
||||
}
|
||||
|
||||
location /federation/ {
|
||||
include /etc/nginx/funkwhale_proxy.conf;
|
||||
proxy_pass http://funkwhale-api;
|
||||
}
|
||||
# Allow direct access to only specific subdirectories in /media
|
||||
location /media/dynamic_preferences/ {
|
||||
alias ${MEDIA_ROOT}/dynamic_preferences/;
|
||||
add_header Access-Control-Allow-Origin '*';
|
||||
}
|
||||
|
||||
# You can comment this if you do not plan to use the Subsonic API.
|
||||
location /rest/ {
|
||||
include /etc/nginx/funkwhale_proxy.conf;
|
||||
proxy_pass http://funkwhale-api/api/subsonic/rest/;
|
||||
}
|
||||
|
||||
location /media/__sized__/ {
|
||||
alias /protected/media/__sized__/;
|
||||
}
|
||||
|
||||
location /media/attachments/ {
|
||||
alias /protected/media/attachments/;
|
||||
}
|
||||
|
||||
location /.well-known/ {
|
||||
include /etc/nginx/funkwhale_proxy.conf;
|
||||
proxy_pass http://funkwhale-api;
|
||||
}
|
||||
|
||||
# Allow direct access to only specific subdirectories in /media
|
||||
location /media/__sized__/ {
|
||||
alias /protected/media/__sized__/;
|
||||
add_header Access-Control-Allow-Origin '*';
|
||||
}
|
||||
|
||||
# Allow direct access to only specific subdirectories in /media
|
||||
location /media/attachments/ {
|
||||
alias /protected/media/attachments/;
|
||||
add_header Access-Control-Allow-Origin '*';
|
||||
}
|
||||
|
||||
# Allow direct access to only specific subdirectories in /media
|
||||
location /media/dynamic_preferences/ {
|
||||
alias ${MEDIA_ROOT}/dynamic_preferences/;
|
||||
add_header Access-Control-Allow-Origin '*';
|
||||
}
|
||||
# This is an internal location that is used to serve
|
||||
# media (uploaded) files once correct permission / authentication
|
||||
# has been checked on API side.
|
||||
# Comment the "NON-S3" commented lines and uncomment "S3" commented lines
|
||||
# if you're storing media files in a S3 bucket.
|
||||
location ~ /_protected/media/(.+) {
|
||||
internal;
|
||||
alias ${MEDIA_ROOT}/$1; # NON-S3
|
||||
# Needed to ensure DSub auth isn't forwarded to S3/Minio, see #932.
|
||||
# proxy_set_header Authorization ""; # S3
|
||||
# proxy_pass $1; # S3
|
||||
add_header Access-Control-Allow-Origin '*';
|
||||
}
|
||||
|
||||
location /_protected/music/ {
|
||||
# This is an internal location that is used to serve
|
||||
# media (uploaded) files once correct permission / authentication
|
||||
# local music files once correct permission / authentication
|
||||
# has been checked on API side.
|
||||
# Comment the "NON-S3" commented lines and uncomment "S3" commented lines
|
||||
# if you're storing media files in a S3 bucket.
|
||||
location ~ /_protected/media/(.+) {
|
||||
internal;
|
||||
alias /protected/media/$1; # NON-S3
|
||||
# Needed to ensure DSub auth isn't forwarded to S3/Minio, see #932.
|
||||
# proxy_set_header Authorization ""; # S3
|
||||
# proxy_pass $1; # S3
|
||||
add_header Access-Control-Allow-Origin '*';
|
||||
}
|
||||
# Set this to the same value as your MUSIC_DIRECTORY_PATH setting.
|
||||
internal;
|
||||
alias ${MUSIC_DIRECTORY_PATH}/;
|
||||
add_header Access-Control-Allow-Origin '*';
|
||||
}
|
||||
|
||||
location /_protected/music/ {
|
||||
# This is an internal location that is used to serve
|
||||
# local music files once correct permission / authentication
|
||||
# has been checked on API side.
|
||||
# Set this to the same value as your MUSIC_DIRECTORY_PATH setting.
|
||||
internal;
|
||||
alias /music/;
|
||||
add_header Access-Control-Allow-Origin '*';
|
||||
}
|
||||
location /manifest.json {
|
||||
# If the reverse proxy is terminating SSL, nginx gets confused and redirects to http, hence the full URL
|
||||
return 302 ${FUNKWHALE_PROTOCOL}://${FUNKWHALE_HOSTNAME}/api/v1/instance/spa-manifest.json;
|
||||
}
|
||||
|
||||
location /manifest.json {
|
||||
# If the reverse proxy is terminating SSL, nginx gets confused and redirects to http, hence the full URL
|
||||
return 302 ${FUNKWHALE_PROTOCOL}://${FUNKWHALE_HOSTNAME}/api/v1/instance/spa-manifest.json;
|
||||
}
|
||||
location /staticfiles/ {
|
||||
alias /usr/share/nginx/html/staticfiles/;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,11 +0,0 @@
|
|||
#!/bin/sh
|
||||
|
||||
set -eux
|
||||
|
||||
TEMPLATE_PATH="/etc/nginx/nginx.conf.template"
|
||||
CONFIG_PATH="/etc/nginx/nginx.conf"
|
||||
|
||||
ALLOWED_VARS="$(env | cut -d '=' -f 1 | xargs printf "\${%s} ")"
|
||||
envsubst "$ALLOWED_VARS" < "$TEMPLATE_PATH" | tee "$CONFIG_PATH"
|
||||
|
||||
nginx-debug -g 'daemon off;'
|
|
@ -17,5 +17,5 @@ services:
|
|||
|
||||
networks:
|
||||
federation:
|
||||
external:
|
||||
name: federation
|
||||
name: federation
|
||||
external: true
|
||||
|
|
|
@ -1,5 +0,0 @@
|
|||
FROM python:3.11
|
||||
|
||||
RUN apt-get update && apt-get install -y graphviz
|
||||
RUN pip install sphinx livereload sphinx_rtd_theme django-environ django myst-parser sphinx-design sphinx-multiversion sphinxcontrib-mermaid sphinx-copybutton
|
||||
WORKDIR /app/docs
|
|
@ -1,20 +1,85 @@
|
|||
# Minimal makefile for Sphinx documentation
|
||||
#
|
||||
SHELL = bash
|
||||
UNAME := $(shell uname)
|
||||
ifeq ($(UNAME), Linux)
|
||||
CPU_CORES = $(shell N=$$(nproc); echo $$(( $$N > 4 ? 4 : $$N )))
|
||||
else
|
||||
CPU_CORES = $(shell N=$$(sysctl -n hw.physicalcpu); echo $$(( $$N > 4 ? 4 : $$N )))
|
||||
endif
|
||||
|
||||
# You can set these variables from the command line.
|
||||
SPHINXOPTS =
|
||||
SPHINXBUILD = python3 -msphinx
|
||||
SPHINXPROJ = funkwhale
|
||||
SOURCEDIR = .
|
||||
BUILDDIR = _build
|
||||
# Install
|
||||
VENV = .venv
|
||||
export POETRY_VIRTUALENVS_IN_PROJECT=true
|
||||
|
||||
# Put it first so that "make" without argument is like "make help".
|
||||
help:
|
||||
@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
|
||||
$(VENV):
|
||||
$(MAKE) install
|
||||
|
||||
.PHONY: help Makefile
|
||||
install:
|
||||
poetry install
|
||||
poetry run pip install --no-deps --editable ../api
|
||||
|
||||
# Catch-all target: route all unknown targets to Sphinx using the new
|
||||
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
|
||||
%: Makefile
|
||||
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
|
||||
clean:
|
||||
git clean -xdf .
|
||||
|
||||
# Sphinx
|
||||
SPHINX_OPTS = -j $(CPU_CORES)
|
||||
SOURCE_DIR = .
|
||||
BUILD_DIR = _build
|
||||
|
||||
$(BUILD_DIR):
|
||||
mkdir -p $(BUILD_DIR)
|
||||
|
||||
# Dev
|
||||
dev: $(VENV)
|
||||
poetry run sphinx-autobuild . /tmp/_build/ --port 8001
|
||||
|
||||
# I18n
|
||||
LOCALES = en_GB en_US fr
|
||||
|
||||
locale-generate: $(VENV)
|
||||
poetry run sphinx-build -b gettext $(SOURCE_DIR) locales/gettext $(SPHINX_OPTS)
|
||||
|
||||
locale-update: $(VENV)
|
||||
poetry run sphinx-intl update -p locales/gettext $(foreach locale,$(LOCALES),-l $(locale))
|
||||
|
||||
locale-prune-untranslated: $(VENV)
|
||||
poetry run _scripts/locale-prune-untranslated.py
|
||||
|
||||
# Swagger
|
||||
SWAGGER_VERSION = 5.1.2
|
||||
SWAGGER_RELEASE_URL = https://github.com/swagger-api/swagger-ui/archive/refs/tags/v$(SWAGGER_VERSION).tar.gz
|
||||
SWAGGER_BUILD_DIR = swagger
|
||||
|
||||
swagger:
|
||||
mkdir "$(SWAGGER_BUILD_DIR)"
|
||||
curl -sSL "$(SWAGGER_RELEASE_URL)" | \
|
||||
tar --extract \
|
||||
--gzip \
|
||||
--directory="$(SWAGGER_BUILD_DIR)" \
|
||||
--strip-components=2 \
|
||||
"swagger-ui-$(SWAGGER_VERSION)/dist"
|
||||
|
||||
sed -i \
|
||||
"s#https://petstore.swagger.io/v2/swagger.json#schema.yml#g" \
|
||||
"$(SWAGGER_BUILD_DIR)/swagger-initializer.js"
|
||||
|
||||
cp schema.yml "$(SWAGGER_BUILD_DIR)/schema.yml"
|
||||
|
||||
# Releases
|
||||
$(BUILD_DIR)/releases.json: $(BUILD_DIR)
|
||||
../scripts/releases.py > "$@"
|
||||
|
||||
$(BUILD_DIR)/latest.txt: $(BUILD_DIR)
|
||||
../scripts/releases.py -r -q latest.id > "$@"
|
||||
|
||||
releases: $(BUILD_DIR)/releases.json $(BUILD_DIR)/latest.txt
|
||||
|
||||
# Build
|
||||
build: $(VENV)
|
||||
poetry run sphinx-build $(SOURCE_DIR) $(BUILD_DIR) $(SPHINX_OPTS)
|
||||
|
||||
build-translated: $(VENV) locale-prune-untranslated
|
||||
for locale in $(LOCALES); do \
|
||||
poetry run sphinx-build $(SOURCE_DIR) $(BUILD_DIR)/$$locale $(SPHINX_OPTS) -D language=$$locale; \
|
||||
done
|
||||
|
||||
build-all: build build-translated releases swagger
|
||||
|
|
|
@ -0,0 +1,19 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
from subprocess import check_output
|
||||
|
||||
|
||||
def main() -> int:
|
||||
output = check_output(["poetry", "run", "sphinx-intl", "stat"], text=True)
|
||||
for line in output.splitlines():
|
||||
path, _, comment = line.partition(":")
|
||||
if "0 untranslated." in comment:
|
||||
print(f"removing untranslated po file: {path}")
|
||||
os.unlink(path)
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue