Merge branch 'pre-release/1.3.0' into stable
This commit is contained in:
commit
8612a921a0
|
@ -0,0 +1,12 @@
|
|||
followings
|
||||
inforce
|
||||
keypair
|
||||
nam
|
||||
nd
|
||||
readby
|
||||
serie
|
||||
upto
|
||||
|
||||
# Names
|
||||
nin
|
||||
noe
|
|
@ -1,29 +0,0 @@
|
|||
# http://editorconfig.org
|
||||
|
||||
root = true
|
||||
|
||||
[*]
|
||||
charset = utf-8
|
||||
end_of_line = lf
|
||||
insert_final_newline = true
|
||||
trim_trailing_whitespace = true
|
||||
|
||||
[*.{py,rst,ini}]
|
||||
indent_style = space
|
||||
indent_size = 4
|
||||
|
||||
[*.py]
|
||||
line_length=120
|
||||
known_first_party=funkwhale_api
|
||||
multi_line_output=3
|
||||
default_section=THIRDPARTY
|
||||
|
||||
[*.{html,js,vue,css,scss,json,yml}]
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
|
||||
[*.md]
|
||||
trim_trailing_whitespace = false
|
||||
|
||||
[Makefile]
|
||||
indent_style = tab
|
5
.env.dev
5
.env.dev
|
@ -1,4 +1,4 @@
|
|||
DJANGO_ALLOWED_HOSTS=.funkwhale.test,localhost,nginx,0.0.0.0,127.0.0.1
|
||||
DJANGO_ALLOWED_HOSTS=.funkwhale.test,localhost,nginx,0.0.0.0,127.0.0.1,.gitpod.io
|
||||
DJANGO_SETTINGS_MODULE=config.settings.local
|
||||
DJANGO_SECRET_KEY=dev
|
||||
C_FORCE_ROOT=true
|
||||
|
@ -10,7 +10,7 @@ MUSIC_DIRECTORY_PATH=/music
|
|||
BROWSABLE_API_ENABLED=True
|
||||
FORWARDED_PROTO=http
|
||||
LDAP_ENABLED=False
|
||||
FUNKWHALE_SPA_HTML_ROOT=http://nginx/front/
|
||||
FUNKWHALE_SPA_HTML_ROOT=http://nginx/
|
||||
PYTHONTRACEMALLOC=0
|
||||
|
||||
# Uncomment this if you're using traefik/https
|
||||
|
@ -18,3 +18,4 @@ PYTHONTRACEMALLOC=0
|
|||
|
||||
# Customize to your needs
|
||||
POSTGRES_VERSION=11
|
||||
DEBUG=true
|
||||
|
|
|
@ -0,0 +1,11 @@
|
|||
# Use .flake8 file until pyproject.toml is supported
|
||||
# See https://github.com/PyCQA/flake8/issues/234
|
||||
[flake8]
|
||||
max-line-length = 120
|
||||
extend-exclude =
|
||||
*/migrations/*
|
||||
extend-ignore =
|
||||
F405
|
||||
W503
|
||||
E203
|
||||
E741
|
|
@ -86,7 +86,7 @@ front/tests/e2e/reports
|
|||
front/selenium-debug.log
|
||||
docs/_build
|
||||
|
||||
data/
|
||||
/data/
|
||||
.env
|
||||
|
||||
po/*.po
|
||||
|
|
449
.gitlab-ci.yml
449
.gitlab-ci.yml
|
@ -1,41 +1,58 @@
|
|||
---
|
||||
include:
|
||||
- project: funkwhale/ci
|
||||
file: /templates/pre-commit.yml
|
||||
- project: funkwhale/ci
|
||||
file: /templates/lychee.yml
|
||||
|
||||
variables:
|
||||
IMAGE_NAME: funkwhale/funkwhale
|
||||
IMAGE: $IMAGE_NAME:$CI_COMMIT_REF_NAME
|
||||
IMAGE_LATEST: $IMAGE_NAME:latest
|
||||
ALL_IN_ONE_IMAGE_NAME: funkwhale/all-in-one
|
||||
ALL_IN_ONE_IMAGE: $ALL_IN_ONE_IMAGE_NAME:$CI_COMMIT_REF_NAME
|
||||
ALL_IN_ONE_IMAGE_LATEST: $ALL_IN_ONE_IMAGE_NAME:latest
|
||||
PIP_CACHE_DIR: "$CI_PROJECT_DIR/pip-cache"
|
||||
PYTHONDONTWRITEBYTECODE: "true"
|
||||
REVIEW_DOMAIN: preview.funkwhale.audio
|
||||
REVIEW_INSTANCE_URL: https://funkwhale.juniorjpdj.pl
|
||||
DOCKER_HOST: tcp://docker:2375/
|
||||
DOCKER_DRIVER: overlay2
|
||||
DOCKER_TLS_CERTDIR: ""
|
||||
BUILD_PLATFORMS: linux/amd64,linux/arm64,linux/arm/v7
|
||||
|
||||
PIP_CACHE_DIR: $CI_PROJECT_DIR/.cache/pip
|
||||
|
||||
.shared_variables:
|
||||
# Keep the git files permissions during job setup
|
||||
keep_git_files_permissions: &keep_git_files_permissions
|
||||
GIT_STRATEGY: clone
|
||||
GIT_DEPTH: "5"
|
||||
FF_DISABLE_UMASK_FOR_DOCKER_EXECUTOR: "true"
|
||||
|
||||
default:
|
||||
tags:
|
||||
- docker
|
||||
|
||||
workflow:
|
||||
rules:
|
||||
# Run for any event on the default branches in the funkwhale namespace
|
||||
- if: >
|
||||
$CI_PROJECT_NAMESPACE == "funkwhale" &&
|
||||
(
|
||||
$CI_COMMIT_BRANCH =~ /(stable|develop)/ ||
|
||||
$CI_COMMIT_TAG
|
||||
)
|
||||
# Run for merge requests from any repo or branches
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
|
||||
stages:
|
||||
- review
|
||||
- lint
|
||||
- build
|
||||
- test
|
||||
- deploy
|
||||
- deps
|
||||
- build
|
||||
- publish
|
||||
|
||||
review_front:
|
||||
interruptible: true
|
||||
stage: review
|
||||
image: node:16-buster
|
||||
image: node:18-alpine
|
||||
when: manual
|
||||
allow_failure: true
|
||||
variables:
|
||||
BASE_URL: /-/$CI_PROJECT_NAME/-/jobs/$CI_JOB_ID/artifacts/front-review/
|
||||
VUE_APP_ROUTER_BASE_URL: /-/$CI_PROJECT_NAME/-/jobs/$CI_JOB_ID/artifacts/front-review/
|
||||
VUE_APP_INSTANCE_URL: $REVIEW_INSTANCE_URL
|
||||
VUE_APP_INSTANCE_URL: https://demo.funkwhale.audio
|
||||
NODE_ENV: review
|
||||
before_script:
|
||||
- curl -L -o /usr/local/bin/jq https://github.com/stedolan/jq/releases/download/jq-1.5/jq-linux64
|
||||
- chmod +x /usr/local/bin/jq
|
||||
- apk add --no-cache jq bash coreutils python3
|
||||
- rm -rf front-review
|
||||
- mkdir front-review
|
||||
- cd front
|
||||
|
@ -43,7 +60,7 @@ review_front:
|
|||
- yarn install
|
||||
# this is to ensure we don't have any errors in the output,
|
||||
# cf https://dev.funkwhale.audio/funkwhale/funkwhale/issues/169
|
||||
- yarn run build | tee /dev/stderr | (! grep -i 'ERROR in')
|
||||
- yarn run build --base ./ | tee /dev/stderr | (! grep -i 'ERROR in')
|
||||
- cp -r dist/* ../front-review
|
||||
artifacts:
|
||||
expire_in: 2 weeks
|
||||
|
@ -54,10 +71,6 @@ review_front:
|
|||
paths:
|
||||
- front/node_modules
|
||||
- front/yarn.lock
|
||||
only:
|
||||
- branches
|
||||
tags:
|
||||
- docker
|
||||
environment:
|
||||
name: review/front/$CI_COMMIT_REF_NAME
|
||||
url: http://$CI_PROJECT_NAMESPACE.pages.funkwhale.audio/-/$CI_PROJECT_NAME/-/jobs/$CI_JOB_ID/artifacts/front-review/index.html
|
||||
|
@ -65,9 +78,8 @@ review_front:
|
|||
review_docs:
|
||||
interruptible: true
|
||||
stage: review
|
||||
when: manual
|
||||
allow_failure: true
|
||||
image: python:3.10
|
||||
image: python:3.11
|
||||
variables:
|
||||
BUILD_PATH: "../docs-review"
|
||||
before_script:
|
||||
|
@ -82,7 +94,7 @@ review_docs:
|
|||
- git switch stable && git pull
|
||||
- git switch $CI_COMMIT_BRANCH && git pull
|
||||
script:
|
||||
- poetry run python -m sphinx . $BUILD_PATH
|
||||
- poetry run python3 -m sphinx . $BUILD_PATH
|
||||
cache:
|
||||
key: "$CI_PROJECT_ID__sphinx"
|
||||
paths:
|
||||
|
@ -91,77 +103,79 @@ review_docs:
|
|||
expire_in: 2 weeks
|
||||
paths:
|
||||
- docs-review
|
||||
only:
|
||||
- branches
|
||||
tags:
|
||||
- docker
|
||||
environment:
|
||||
name: review/docs/$CI_COMMIT_REF_NAME
|
||||
url: http://$CI_PROJECT_NAMESPACE.pages.funkwhale.audio/-/$CI_PROJECT_NAME/-/jobs/$CI_JOB_ID/artifacts/docs-review/index.html
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
changes:
|
||||
- docs/**/*
|
||||
- if: $CI_PIPELINE_SOURCE != "merge_request_event" && $CI_COMMIT_REF_NAME != $CI_DEFAULT_BRANCH
|
||||
when: manual
|
||||
|
||||
black:
|
||||
interruptible: true
|
||||
image: python:3.6
|
||||
stage: lint
|
||||
variables:
|
||||
GIT_STRATEGY: fetch
|
||||
before_script:
|
||||
- pip install black==19.10b0
|
||||
script:
|
||||
- black --check --diff api/
|
||||
only:
|
||||
refs:
|
||||
- branches
|
||||
changes:
|
||||
- api/**/*
|
||||
find_broken_links:
|
||||
allow_failure:
|
||||
exit_codes: 2
|
||||
|
||||
flake8:
|
||||
interruptible: true
|
||||
image: python:3.6
|
||||
stage: lint
|
||||
variables:
|
||||
GIT_STRATEGY: fetch
|
||||
before_script:
|
||||
- pip install 'flake8<3.7'
|
||||
extends: [.lychee]
|
||||
script:
|
||||
- flake8 -v api
|
||||
cache:
|
||||
key: "$CI_PROJECT_ID__flake8_pip_cache"
|
||||
paths:
|
||||
- "$PIP_CACHE_DIR"
|
||||
only:
|
||||
refs:
|
||||
- branches
|
||||
changes:
|
||||
- api/**/*
|
||||
- >
|
||||
lychee
|
||||
--cache
|
||||
--no-progress
|
||||
--exclude-all-private
|
||||
--exclude-mail
|
||||
--exclude 'demo\.funkwhale\.audio'
|
||||
--exclude 'nginx\.com'
|
||||
--exclude-path 'docs/_templates/'
|
||||
-- . || exit $?
|
||||
|
||||
changelog_snippet:
|
||||
interruptible: true
|
||||
image: alpine:3.17
|
||||
stage: lint
|
||||
before_script:
|
||||
- apk add git
|
||||
- git fetch origin $CI_MERGE_REQUEST_TARGET_BRANCH_NAME
|
||||
script:
|
||||
- git diff --name-only FETCH_HEAD | grep "changes/changelog.d/*"
|
||||
rules:
|
||||
- if: $CI_COMMIT_AUTHOR == 'Renovate Bot <bot@dev.funkwhale.audio>'
|
||||
when: never
|
||||
- if: $CI_MERGE_REQUEST_TITLE =~ /NOCHANGELOG/
|
||||
when: never
|
||||
- if: $CI_PIPELINE_SOURCE == 'merge_request_event'
|
||||
|
||||
pre-commit:
|
||||
extends: [.pre-commit]
|
||||
|
||||
eslint:
|
||||
interruptible: true
|
||||
image: node:16-buster
|
||||
image: node:18-alpine
|
||||
stage: lint
|
||||
allow_failure: true
|
||||
before_script:
|
||||
- cd front
|
||||
- apk add --no-cache jq bash coreutils python3
|
||||
- yarn install
|
||||
script:
|
||||
- yarn lint --max-warnings 0
|
||||
- yarn lint:tsc
|
||||
cache:
|
||||
key: "$CI_PROJECT_ID__eslint_npm_cache"
|
||||
paths:
|
||||
- front/node_modules
|
||||
only:
|
||||
refs:
|
||||
- branches
|
||||
changes:
|
||||
- front/**/*
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
changes:
|
||||
- front/**/*
|
||||
|
||||
test_api:
|
||||
interruptible: true
|
||||
services:
|
||||
- postgres:11
|
||||
- redis:5
|
||||
- postgres:15-alpine
|
||||
- redis:7-alpine
|
||||
stage: test
|
||||
image: $CI_REGISTRY/funkwhale/backend-test-docker:3.7
|
||||
retry: 1
|
||||
cache:
|
||||
key: "$CI_PROJECT_ID__pip_cache"
|
||||
paths:
|
||||
|
@ -172,96 +186,109 @@ test_api:
|
|||
DJANGO_SETTINGS_MODULE: config.settings.local
|
||||
POSTGRES_HOST_AUTH_METHOD: trust
|
||||
CACHE_URL: "redis://redis:6379/0"
|
||||
only:
|
||||
refs:
|
||||
- branches
|
||||
changes:
|
||||
- api/**/*
|
||||
before_script:
|
||||
- cd api
|
||||
- pip3 install -r requirements/base.txt
|
||||
- pip3 install -r requirements/local.txt
|
||||
- pip3 install -r requirements/test.txt
|
||||
- poetry install --no-root
|
||||
script:
|
||||
- pytest --cov-report xml --cov-report term-missing:skip-covered --cov=funkwhale_api --junitxml=report.xml tests/
|
||||
tags:
|
||||
- docker
|
||||
- poetry run pytest --cov-report xml --cov-report term-missing:skip-covered --cov=funkwhale_api --junitxml=report.xml tests/
|
||||
artifacts:
|
||||
when: always
|
||||
expire_in: 2 weeks
|
||||
reports:
|
||||
junit: api/report.xml
|
||||
coverage_report:
|
||||
coverage_format: cobertura
|
||||
path: api/coverage.xml
|
||||
parallel:
|
||||
matrix:
|
||||
- PY_VER: ["3.7", "3.8", "3.9", "3.10", "3.11"]
|
||||
image: $CI_REGISTRY/funkwhale/backend-test-docker:$PY_VER
|
||||
coverage: '/TOTAL\s*\d*\s*\d*\s*(\d*%)/'
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event" || $CI_PIPELINE_SOURCE == "push"
|
||||
changes:
|
||||
- api/**/*
|
||||
- if: $CI_COMMIT_REF_PROTECTED == "true"
|
||||
when: always
|
||||
|
||||
test_front:
|
||||
interruptible: true
|
||||
stage: test
|
||||
image: node:16-buster
|
||||
before_script:
|
||||
- cd front
|
||||
only:
|
||||
refs:
|
||||
- branches
|
||||
changes:
|
||||
- front/**/*
|
||||
script:
|
||||
- yarn install --check-files
|
||||
- yarn test:unit
|
||||
cache:
|
||||
key: "funkwhale__front_dependencies"
|
||||
paths:
|
||||
- front/node_modules
|
||||
- front/yarn.lock
|
||||
artifacts:
|
||||
name: "front_${CI_COMMIT_REF_NAME}"
|
||||
paths:
|
||||
- front/dist/
|
||||
reports:
|
||||
junit: front/test-results.xml
|
||||
tags:
|
||||
- docker
|
||||
# Those tests are disabled for now since no vitest dom emulation is providing
|
||||
# AudioContext, which is required for our HTML audio player
|
||||
#test_front:
|
||||
# interruptible: true
|
||||
# stage: test
|
||||
# image: node:18-alpine
|
||||
# before_script:
|
||||
# - cd front
|
||||
# - apk add --no-cache jq bash coreutils python3
|
||||
# script:
|
||||
# - yarn install --check-files
|
||||
# - yarn test:unit
|
||||
# cache:
|
||||
# key: "funkwhale__front_dependencies"
|
||||
# paths:
|
||||
# - front/node_modules
|
||||
# - front/yarn.lock
|
||||
# artifacts:
|
||||
# name: "front_${CI_COMMIT_REF_NAME}"
|
||||
# paths:
|
||||
# - front/dist/
|
||||
# reports:
|
||||
# junit: front/coverage/cobertura-coverage.xml
|
||||
# tags:
|
||||
# - docker
|
||||
# rules:
|
||||
# - if: $CI_PIPELINE_SOURCE == "merge_request_event" || $CI_PIPELINE_SOURCE == "push"
|
||||
# changes:
|
||||
# - front/**/*
|
||||
# - if: $CI_COMMIT_REF_PROTECTED == "true"
|
||||
# when: always
|
||||
|
||||
build_front:
|
||||
build_openapi_schema:
|
||||
stage: build
|
||||
image: node:16-buster
|
||||
before_script:
|
||||
- curl -L -o /usr/local/bin/jq https://github.com/stedolan/jq/releases/download/jq-1.5/jq-linux64
|
||||
- chmod +x /usr/local/bin/jq
|
||||
- cd front
|
||||
script:
|
||||
- yarn install
|
||||
- yarn run i18n-compile
|
||||
# this is to ensure we don't have any errors in the output,
|
||||
# cf https://dev.funkwhale.audio/funkwhale/funkwhale/issues/169
|
||||
- yarn build | tee /dev/stderr | (! grep -i 'ERROR in')
|
||||
- chmod -R 755 dist
|
||||
artifacts:
|
||||
name: "front_${CI_COMMIT_REF_NAME}"
|
||||
image: $CI_REGISTRY/funkwhale/backend-test-docker:3.11
|
||||
services:
|
||||
- postgres:15-alpine
|
||||
- redis:7-alpine
|
||||
cache:
|
||||
key: "$CI_PROJECT_ID__pip_cache"
|
||||
paths:
|
||||
- front/dist/
|
||||
only:
|
||||
- tags@funkwhale/funkwhale
|
||||
- master@funkwhale/funkwhale
|
||||
- stable@funkwhale/funkwhale
|
||||
- develop@funkwhale/funkwhale
|
||||
tags:
|
||||
- docker
|
||||
- "$PIP_CACHE_DIR"
|
||||
variables:
|
||||
DATABASE_URL: "postgresql://postgres@postgres/postgres"
|
||||
FUNKWHALE_URL: "https://funkwhale.ci"
|
||||
DJANGO_SETTINGS_MODULE: config.settings.local
|
||||
POSTGRES_HOST_AUTH_METHOD: trust
|
||||
CACHE_URL: "redis://redis:6379/0"
|
||||
API_TYPE: "v1"
|
||||
before_script:
|
||||
- cd api
|
||||
- pip3 install poetry
|
||||
- poetry install
|
||||
- poetry run funkwhale-manage migrate
|
||||
script:
|
||||
- poetry run funkwhale-manage spectacular --file ../docs/schema.yml
|
||||
artifacts:
|
||||
expire_in: 2 weeks
|
||||
paths:
|
||||
- docs/schema.yml
|
||||
|
||||
build_documentation:
|
||||
stage: build
|
||||
image: python:3.10
|
||||
image: python:3.11
|
||||
needs:
|
||||
- job: build_openapi_schema
|
||||
artifacts: true
|
||||
variables:
|
||||
BUILD_PATH: "../public"
|
||||
GIT_STRATEGY: clone
|
||||
GIT_DEPTH: 0
|
||||
before_script:
|
||||
- cd docs
|
||||
- apt-get update
|
||||
- apt-get install -y graphviz git
|
||||
- apt-get install -y graphviz
|
||||
- pip install poetry
|
||||
- poetry install
|
||||
- git switch develop && git pull
|
||||
- git switch stable && git pull
|
||||
- git switch $CI_COMMIT_BRANCH && git pull
|
||||
- git branch stable --track origin/stable || true
|
||||
- git branch develop --track origin/develop || true
|
||||
script:
|
||||
- ./build_docs.sh
|
||||
cache:
|
||||
|
@ -269,18 +296,64 @@ build_documentation:
|
|||
paths:
|
||||
- "$PIP_CACHE_DIR"
|
||||
artifacts:
|
||||
expire_in: 2 weeks
|
||||
paths:
|
||||
- public
|
||||
rules:
|
||||
- if: $CI_COMMIT_BRANCH == "stable" || $CI_COMMIT_BRANCH == "develop"
|
||||
when: always
|
||||
- changes:
|
||||
- docs/**/*
|
||||
when: always
|
||||
|
||||
build_front:
|
||||
stage: build
|
||||
image: node:18-alpine
|
||||
variables:
|
||||
<<: *keep_git_files_permissions
|
||||
before_script:
|
||||
- apk add --no-cache jq bash coreutils python3
|
||||
- cd front
|
||||
script:
|
||||
- yarn install
|
||||
# this is to ensure we don't have any errors in the output,
|
||||
# cf https://dev.funkwhale.audio/funkwhale/funkwhale/issues/169
|
||||
- yarn run build:deployment | tee /dev/stderr | (! grep -i 'ERROR in')
|
||||
artifacts:
|
||||
name: front_${CI_COMMIT_REF_NAME}
|
||||
paths:
|
||||
- front/dist/
|
||||
only:
|
||||
- tags@funkwhale/funkwhale
|
||||
- stable@funkwhale/funkwhale
|
||||
tags:
|
||||
- docker
|
||||
- develop@funkwhale/funkwhale
|
||||
|
||||
build_api:
|
||||
stage: build
|
||||
image: bash
|
||||
variables:
|
||||
<<: *keep_git_files_permissions
|
||||
script:
|
||||
- rm -rf api/tests
|
||||
- >
|
||||
if [ "$CI_COMMIT_REF_NAME" == "develop" ] || [ "$CI_COMMIT_REF_NAME" == "stable" ]; then
|
||||
./scripts/set-api-build-metadata.sh $CI_COMMIT_SHORT_SHA;
|
||||
fi
|
||||
artifacts:
|
||||
name: api_${CI_COMMIT_REF_NAME}
|
||||
paths:
|
||||
- api
|
||||
only:
|
||||
- tags@funkwhale/funkwhale
|
||||
- stable@funkwhale/funkwhale
|
||||
- develop@funkwhale/funkwhale
|
||||
|
||||
deploy_documentation:
|
||||
stage: deploy
|
||||
stage: publish
|
||||
image: alpine
|
||||
dependencies:
|
||||
- build_documentation
|
||||
needs:
|
||||
- job: build_documentation
|
||||
artifacts: true
|
||||
before_script:
|
||||
- apk add openssh-client rsync
|
||||
- mkdir -p ~/.ssh
|
||||
|
@ -293,19 +366,33 @@ deploy_documentation:
|
|||
rules:
|
||||
- if: $CI_COMMIT_BRANCH =~ /(stable|develop)/
|
||||
|
||||
|
||||
.docker_publish:
|
||||
stage: deploy
|
||||
stage: publish
|
||||
image: egon0/docker-with-buildx-and-git:bash
|
||||
parallel:
|
||||
matrix:
|
||||
- COMPONENT: ["api", "front"]
|
||||
variables:
|
||||
<<: *keep_git_files_permissions
|
||||
|
||||
IMAGE_NAME: funkwhale/$COMPONENT
|
||||
IMAGE: $IMAGE_NAME:$CI_COMMIT_REF_NAME
|
||||
IMAGE_LATEST: $IMAGE_NAME:latest
|
||||
|
||||
DOCKER_HOST: tcp://docker:2375/
|
||||
DOCKER_DRIVER: overlay2
|
||||
DOCKER_TLS_CERTDIR: ""
|
||||
BUILD_PLATFORMS: linux/amd64,linux/arm64,linux/arm/v7
|
||||
tags:
|
||||
- multiarch
|
||||
services:
|
||||
- docker:20-dind
|
||||
before_script:
|
||||
- docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
|
||||
- docker login -u $DOCKER_LOGIN -p $DOCKER_PASSWORD
|
||||
- cp -r front/dist api/frontend
|
||||
script:
|
||||
cache:
|
||||
key: docker_public_${CI_COMMIT_REF_NAME}
|
||||
paths:
|
||||
- ~/.cargo
|
||||
|
||||
docker_publish_stable_release:
|
||||
# Publish a docker image for releases
|
||||
|
@ -317,7 +404,7 @@ docker_publish_stable_release:
|
|||
- ./docs/get-releases-json.py | scripts/is-docker-latest.py $CI_COMMIT_TAG - && export DOCKER_LATEST_TAG="-t $IMAGE_LATEST" || export DOCKER_LATEST_TAG=;
|
||||
- export major="$(echo $CI_COMMIT_REF_NAME | cut -d '.' -f 1)"
|
||||
- export minor="$(echo $CI_COMMIT_REF_NAME | cut -d '.' -f 1,2)"
|
||||
- cd api
|
||||
- cd $COMPONENT
|
||||
- docker buildx create --use --name A$CI_COMMIT_SHORT_SHA
|
||||
- docker buildx build --platform $BUILD_PLATFORMS --push -t $IMAGE $DOCKER_LATEST_TAG -t $IMAGE_NAME:$major -t $IMAGE_NAME:$minor .
|
||||
|
||||
|
@ -328,67 +415,17 @@ docker_publish_unstable_release:
|
|||
- if: $CI_COMMIT_TAG && $CI_COMMIT_REF_NAME !~ /^[0-9]+(.[0-9]+){1,2}$/
|
||||
script:
|
||||
# Check if this is the latest release
|
||||
- cd api
|
||||
- cd $COMPONENT
|
||||
- docker buildx create --use --name A$CI_COMMIT_SHORT_SHA
|
||||
- docker buildx build --platform $BUILD_PLATFORMS --push -t $IMAGE .
|
||||
|
||||
docker_published_non-release:
|
||||
docker_publish_non-release:
|
||||
# Publish a docker image for each commit on develop
|
||||
extends: .docker_publish
|
||||
only:
|
||||
- develop@funkwhale/funkwhale
|
||||
- stable@funkwhale/funkwhale
|
||||
script:
|
||||
- ./scripts/set-api-build-metadata.sh $CI_COMMIT_SHORT_SHA
|
||||
- cd api
|
||||
- cd $COMPONENT
|
||||
- docker buildx create --use --name A$CI_COMMIT_SHORT_SHA
|
||||
- docker buildx build --platform $BUILD_PLATFORMS --push -t $IMAGE .
|
||||
|
||||
docker_all_in_one_release:
|
||||
stage: deploy
|
||||
image: egon0/docker-with-buildx-and-git:bash
|
||||
services:
|
||||
- docker:20-dind
|
||||
variables:
|
||||
ALL_IN_ONE_REF: main
|
||||
ALL_IN_ONE_ARTIFACT_URL: https://dev.funkwhale.audio/funkwhale/funkwhale-docker-all-in-one/-/archive/$ALL_IN_ONE_REF/funkwhale-docker-all-in-one-$ALL_IN_ONE_REF.zip
|
||||
BUILD_PATH: all_in_one
|
||||
before_script:
|
||||
- docker login -u $DOCKER_LOGIN -p $DOCKER_PASSWORD
|
||||
- (if [ "$CI_COMMIT_REF_NAME" == "develop" ] || [ "$CI_COMMIT_REF_NAME" == "master" ] || [ "$CI_COMMIT_REF_NAME" == "stable" ]; then ./scripts/set-api-build-metadata.sh $(echo $CI_COMMIT_SHA | cut -c 1-8); fi);
|
||||
script:
|
||||
- if [[ ! -z "$CI_COMMIT_TAG" ]]; then (./docs/get-releases-json.py | scripts/is-docker-latest.py $CI_COMMIT_TAG -) && export DOCKER_LATEST_TAG="-t $ALL_IN_ONE_IMAGE_LATEST" || export DOCKER_LATEST_TAG=; fi
|
||||
- wget $ALL_IN_ONE_ARTIFACT_URL -O all_in_one.zip
|
||||
- unzip -o all_in_one.zip -d tmpdir
|
||||
- mv tmpdir/funkwhale-docker-all-in-one-$ALL_IN_ONE_REF $BUILD_PATH && rmdir tmpdir
|
||||
- cp -r api $BUILD_PATH/src/api
|
||||
- cp -r front $BUILD_PATH/src/front
|
||||
- cd $BUILD_PATH
|
||||
- ./scripts/download-nginx-template.sh src/ $CI_COMMIT_REF_NAME
|
||||
- docker build -t $ALL_IN_ONE_IMAGE $DOCKER_LATEST_TAG .
|
||||
- docker push $ALL_IN_ONE_IMAGE
|
||||
- if [[ ! -z "$DOCKER_LATEST_TAG" ]]; then docker push $ALL_IN_ONE_IMAGE_LATEST; fi
|
||||
only:
|
||||
- develop@funkwhale/funkwhale
|
||||
- master@funkwhale/funkwhale
|
||||
- stable@funkwhale/funkwhale
|
||||
- tags@funkwhale/funkwhale
|
||||
|
||||
build_api:
|
||||
# Simply publish a zip containing api/ directory
|
||||
stage: deploy
|
||||
image: bash
|
||||
artifacts:
|
||||
name: "api_${CI_COMMIT_REF_NAME}"
|
||||
paths:
|
||||
- api
|
||||
script:
|
||||
- rm -rf api/tests
|
||||
- (if [ "$CI_COMMIT_REF_NAME" == "develop" ] || [ "$CI_COMMIT_REF_NAME" == "stable" ] || [ "$CI_COMMIT_REF_NAME" == "master" ]; then ./scripts/set-api-build-metadata.sh $(echo $CI_COMMIT_SHA | cut -c 1-8); fi);
|
||||
- chmod -R 750 api
|
||||
- echo Done!
|
||||
only:
|
||||
- tags@funkwhale/funkwhale
|
||||
- master@funkwhale/funkwhale
|
||||
- stable@funkwhale/funkwhale
|
||||
- develop@funkwhale/funkwhale
|
||||
|
|
|
@ -0,0 +1,26 @@
|
|||
<!--
|
||||
|
||||
Hello and welcome to our issue tracker! We appreciate you taking the time to help us make Funkwhale even better :smile:
|
||||
|
||||
Before you begin, here are some helpful tips for submitting an issue:
|
||||
|
||||
* If you need help setting up or using Funkwhale, try asking in our
|
||||
[forum's support board](https://forum.funkwhale.audio/support) or our [support
|
||||
chat room](https://riot.im/app/#/room/#funkwhale-support:tchncs.de).
|
||||
* If you think you've found a bug but aren't sure, you can ask in one of the above channels
|
||||
first. Once you're confident it's a bug, go ahead and create an issue for us to investigate.
|
||||
Select the **Bug** template in the **Description** dropdown menu. This template contains
|
||||
helpful tips for creating your report.
|
||||
* For smaller additions and enhancements, please file a feature request.
|
||||
Select the **Feature Request** template in the **Description** dropdown
|
||||
menu. This template contains helpful tips for creating your request.
|
||||
* If you have ideas for new features or use cases, consider opening a thread in our
|
||||
[forum](https://forum.funkwhale.audio/t/development). This enables us to
|
||||
discuss the feature, its use cases, and the development effort it requires.
|
||||
|
||||
We always like hearing ideas from our community. If you're still not sure, click
|
||||
the **Create issue** button and we'll work with you to sort out the issue.
|
||||
|
||||
Happy listening! :whale:
|
||||
|
||||
-->
|
|
@ -1,4 +1,4 @@
|
|||
Please avoid merging the base branch into your feature branch. We are working with rebases and those merged tend to cause trouble.
|
||||
Please avoid merging the base branch into your feature branch. We are working with rebases and those merged tend to cause trouble.
|
||||
For further questions, join us at Matrix: https://matrix.to/#/#funkwhale-dev:matrix.org
|
||||
|
||||
If your contribution is fixing an issue by a small change, please consider a merge into `stable` by using it as target branch.
|
||||
|
|
|
@ -0,0 +1,75 @@
|
|||
{
|
||||
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
|
||||
"extends": [
|
||||
":dependencyDashboard",
|
||||
":maintainLockFilesWeekly",
|
||||
":enablePreCommit",
|
||||
":semanticCommits",
|
||||
":semanticCommitScopeDisabled",
|
||||
"group:monorepos",
|
||||
"group:recommended"
|
||||
],
|
||||
"baseBranches": ["stable", "develop"],
|
||||
"branchConcurrentLimit": 2,
|
||||
"prConcurrentLimit": 1,
|
||||
"rangeStrategy": "pin",
|
||||
"ignoreDeps": ["$CI_REGISTRY/funkwhale/backend-test-docker"],
|
||||
"packageRules": [
|
||||
{
|
||||
"matchPaths": ["api/*", "front/*", "docs/*"],
|
||||
"additionalBranchPrefix": "{{parentDir}}-",
|
||||
"semanticCommitScope": "{{parentDir}}"
|
||||
},
|
||||
{
|
||||
"matchUpdateTypes": ["lockFileMaintenance"],
|
||||
"branchConcurrentLimit": 0,
|
||||
"prConcurrentLimit": 0
|
||||
},
|
||||
{
|
||||
"matchUpdateTypes": ["major", "minor"],
|
||||
"matchBaseBranches": ["stable"],
|
||||
"enabled": false
|
||||
},
|
||||
{
|
||||
"matchDepTypes": ["devDependencies"],
|
||||
"matchBaseBranches": ["stable"],
|
||||
"enabled": false
|
||||
},
|
||||
{
|
||||
"matchUpdateTypes": ["patch", "pin", "digest"],
|
||||
"matchBaseBranches": ["develop"],
|
||||
"automerge": true,
|
||||
"automergeType": "branch"
|
||||
},
|
||||
{
|
||||
"matchManagers": ["npm"],
|
||||
"addLabels": ["Area::Frontend"]
|
||||
},
|
||||
{
|
||||
"matchManagers": ["poetry"],
|
||||
"addLabels": ["Area::Backend"]
|
||||
},
|
||||
{
|
||||
"matchPackagePatterns": ["^@vueuse/.*"],
|
||||
"groupName": "vueuse"
|
||||
},
|
||||
{
|
||||
"matchPackageNames": ["channels", "channels-redis", "daphne"],
|
||||
"groupName": "channels"
|
||||
},
|
||||
{
|
||||
"matchPackageNames": ["node"],
|
||||
"allowedVersions": "/\\d+[02468]$/"
|
||||
},
|
||||
{
|
||||
"matchFiles": ["deploy/docker-compose.yml"],
|
||||
"matchPackageNames": ["postgres"],
|
||||
"postUpgradeTasks": {
|
||||
"commands": [
|
||||
"echo 'Upgrade Postgres to version {{ newVersion }}. [Make sure to migrate!](https://docs.funkwhale.audio/administrator_documentation/upgrade_docs/docker.html#upgrade-the-postgres-container)' > changes/changelog.d/postgres.update"
|
||||
],
|
||||
"fileFilters": ["changes/changelog.d/postgres.update"]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
|
@ -0,0 +1,103 @@
|
|||
image:
|
||||
file: .gitpod/Dockerfile
|
||||
|
||||
tasks:
|
||||
- name: Backend
|
||||
env:
|
||||
ENV_FILE: /workspace/funkwhale/.gitpod/.env
|
||||
COMPOSE_FILE: /workspace/funkwhale/.gitpod/docker-compose.yml
|
||||
before: |
|
||||
cp .gitpod/gitpod.env .gitpod/.env
|
||||
cd api
|
||||
init: |
|
||||
mkdir -p ../data/media/attachments ../data/music ../data/staticfiles
|
||||
docker-compose up -d
|
||||
|
||||
poetry env use python
|
||||
poetry install
|
||||
|
||||
gp ports await 5432
|
||||
|
||||
poetry run funkwhale-manage migrate
|
||||
poetry run funkwhale-manage gitpod init
|
||||
command: |
|
||||
echo "MEDIA_URL=`gp url 8000`/media/" >> ../.gitpod/.env
|
||||
echo "STATIC_URL=`gp url 8000`/staticfiles/" >> ../.gitpod/.env
|
||||
echo "FUNKWHALE_HOSTNAME=`gp url 8000 | sed 's#https://##'`" >> ../.gitpod/.env
|
||||
echo "FUNKWHALE_PROTOCOL=https" >> ../.gitpod/.env
|
||||
|
||||
docker-compose up -d
|
||||
gp ports await 5432
|
||||
poetry run funkwhale-manage collectstatic --no-input
|
||||
poetry run funkwhale-manage gitpod dev
|
||||
|
||||
- name: Celery Worker
|
||||
env:
|
||||
ENV_FILE: /workspace/funkwhale/.gitpod/.env
|
||||
before: cd api
|
||||
command: |
|
||||
gp ports await 5000
|
||||
poetry run celery -A funkwhale_api.taskapp worker -l debug -B --concurrency=0
|
||||
|
||||
- name: Frontend
|
||||
env:
|
||||
VUE_EDITOR: code
|
||||
before: cd front
|
||||
init: |
|
||||
yarn install
|
||||
command: yarn dev --host 0.0.0.0 --base ./
|
||||
|
||||
- name: Welcome to Funkwhale development!
|
||||
env:
|
||||
COMPOSE_FILE: /workspace/funkwhale/.gitpod/docker-compose.yml
|
||||
ENV_FILE: /workspace/funkwhale/.gitpod/.env
|
||||
VUE_EDITOR: code
|
||||
DJANGO_SETTINGS_MODULE: config.settings.local
|
||||
init: pre-commit install
|
||||
command: |
|
||||
pre-commit run --all && clear
|
||||
echo ""
|
||||
echo -e " ⠀⠀⠸⣿⣷⣦⣄⣠⣶⣾⣿⠇⠀⠀ You can now start developing Funkwhale with gitpod!"
|
||||
echo -e " ⠀⠀⠀⠈⠉⠻⣿⣿⠟⠉⠁⠀⠀⠀"
|
||||
echo -e " \u1b[34m⣀⠀⢀⡀⢀⣀\u1b[0m⠹⠇\u1b[34m⣀⡀⢀⡀⠀⣀ \u1b[0mTo sign in to the superuser account,"
|
||||
echo -e " \u1b[34m⢻⣇⠘⣧⡈⠻⠶⠶⠟⢁⣾⠃⣸⡟ \u1b[0mplease use these credentials:"
|
||||
echo -e " \u1b[34m⠀⠻⣦⡈⠻⠶⣶⣶⠶⠟⢁⣴⠟⠀"
|
||||
echo -e " \u1b[34m⠀⠀⠈⠻⠷⣦⣤⣤⣴⠾⠟⠁⠀⠀ gitpod\u1b[0m:\u1b[34mgitpod"
|
||||
echo ""
|
||||
|
||||
ports:
|
||||
- port: 8000
|
||||
visibility: public
|
||||
onOpen: notify
|
||||
|
||||
- port: 5000
|
||||
visibility: private
|
||||
onOpen: ignore
|
||||
|
||||
- port: 5432
|
||||
visibility: private
|
||||
onOpen: ignore
|
||||
|
||||
- port: 5678
|
||||
visibility: private
|
||||
onOpen: ignore
|
||||
|
||||
- port: 6379
|
||||
visibility: private
|
||||
onOpen: ignore
|
||||
|
||||
- port: 8080
|
||||
visibility: private
|
||||
onOpen: ignore
|
||||
|
||||
vscode:
|
||||
extensions:
|
||||
- Vue.volar
|
||||
- ms-python.python
|
||||
- ms-toolsai.jupyter
|
||||
- ms-toolsai.jupyter-keymap
|
||||
- ms-toolsai.jupyter-renderers
|
||||
- hbenl.vscode-test-explorer
|
||||
- hbenl.test-adapter-converter
|
||||
- littlefoxteam.vscode-python-test-adapter
|
||||
- ZixuanChen.vitest-explorer
|
|
@ -0,0 +1,9 @@
|
|||
FROM gitpod/workspace-full:2022-11-15-17-00-18
|
||||
USER gitpod
|
||||
|
||||
RUN sudo apt update -y \
|
||||
&& sudo apt install libsasl2-dev libldap2-dev libssl-dev ffmpeg gettext -y
|
||||
|
||||
RUN pip install poetry pre-commit \
|
||||
&& poetry config virtualenvs.create true \
|
||||
&& poetry config virtualenvs.in-project true
|
|
@ -0,0 +1,43 @@
|
|||
version: "3"
|
||||
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:15-alpine
|
||||
environment:
|
||||
- "POSTGRES_HOST_AUTH_METHOD=trust"
|
||||
volumes:
|
||||
- "../data/postgres:/var/lib/postgresql/data"
|
||||
ports:
|
||||
- 5432:5432
|
||||
|
||||
redis:
|
||||
image: redis:7-alpine
|
||||
volumes:
|
||||
- "../data/redis:/data"
|
||||
ports:
|
||||
- 6379:6379
|
||||
|
||||
nginx:
|
||||
command: /entrypoint.sh
|
||||
env_file:
|
||||
- ./.env
|
||||
image: nginx
|
||||
ports:
|
||||
- 8000:80
|
||||
extra_hosts:
|
||||
- host.docker.internal:host-gateway
|
||||
environment:
|
||||
- "NGINX_MAX_BODY_SIZE=100M"
|
||||
- "FUNKWHALE_API_IP=host.docker.internal"
|
||||
- "FUNKWHALE_API_PORT=5000"
|
||||
- "FUNKWHALE_FRONT_IP=host.docker.internal"
|
||||
- "FUNKWHALE_FRONT_PORT=8080"
|
||||
- "FUNKWHALE_HOSTNAME=${FUNKWHALE_HOSTNAME-host.docker.internal}"
|
||||
volumes:
|
||||
- ../data/media:/protected/media:ro
|
||||
- ../data/music:/music:ro
|
||||
- ../data/staticfiles:/staticfiles:ro
|
||||
- ../deploy/funkwhale_proxy.conf:/etc/nginx/funkwhale_proxy.conf:ro
|
||||
- ../docker/nginx/conf.dev:/etc/nginx/nginx.conf.template:ro
|
||||
- ../docker/nginx/entrypoint.sh:/entrypoint.sh:ro
|
||||
- ../front:/frontend:ro
|
|
@ -0,0 +1,26 @@
|
|||
# Dev Environment Variables
|
||||
DJANGO_ALLOWED_HOSTS=.funkwhale.test,localhost,nginx,0.0.0.0,127.0.0.1,.gitpod.io
|
||||
DJANGO_SETTINGS_MODULE=config.settings.local
|
||||
C_FORCE_ROOT=true
|
||||
BROWSABLE_API_ENABLED=True
|
||||
FORWARDED_PROTO=http
|
||||
LDAP_ENABLED=False
|
||||
FUNKWHALE_SPA_HTML_ROOT=http://localhost:8000/
|
||||
FUNKWHALE_URL=http://localhost:8000/
|
||||
MUSIC_DIRECTORY_PATH=/workspace/funkwhale/data/music
|
||||
STATIC_ROOT=/workspace/funkwhale/data/staticfiles/
|
||||
MEDIA_ROOT=/workspace/funkwhale/data/media/
|
||||
|
||||
PYTHONTRACEMALLOC=0
|
||||
PYTHONDONTWRITEBYTECODE=true
|
||||
|
||||
POSTGRES_VERSION=14
|
||||
DEBUG=true
|
||||
|
||||
|
||||
# Django Environment Variables
|
||||
DATABASE_URL=postgresql://postgres@localhost:5432/postgres
|
||||
DJANGO_SECRET_KEY=gitpod
|
||||
THROTTLING_ENABLED=False
|
||||
|
||||
# Gitpod Environment Variables
|
|
@ -0,0 +1,74 @@
|
|||
---
|
||||
# See https://pre-commit.com for more information
|
||||
# See https://pre-commit.com/hooks.html for more hooks
|
||||
repos:
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.4.0
|
||||
hooks:
|
||||
- id: check-added-large-files
|
||||
- id: check-case-conflict
|
||||
- id: check-executables-have-shebangs
|
||||
- id: check-shebang-scripts-are-executable
|
||||
- id: check-symlinks
|
||||
- id: destroyed-symlinks
|
||||
|
||||
- id: check-json
|
||||
- id: check-yaml
|
||||
- id: check-xml
|
||||
- id: check-toml
|
||||
|
||||
- id: check-vcs-permalinks
|
||||
- id: check-merge-conflict
|
||||
- id: end-of-file-fixer
|
||||
exclude: ^(docs/locales/.*/LC_MESSAGES)
|
||||
- id: mixed-line-ending
|
||||
- id: trailing-whitespace
|
||||
|
||||
- repo: https://github.com/asottile/pyupgrade
|
||||
rev: v3.3.1
|
||||
hooks:
|
||||
- id: pyupgrade
|
||||
args: [--py37-plus]
|
||||
exclude: ^(api/.*/migrations/.*)
|
||||
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 22.12.0
|
||||
hooks:
|
||||
- id: black
|
||||
|
||||
- repo: https://github.com/pycqa/isort
|
||||
rev: 5.12.0
|
||||
hooks:
|
||||
- id: isort
|
||||
|
||||
- repo: https://github.com/pycqa/flake8
|
||||
rev: 6.0.0
|
||||
hooks:
|
||||
- id: flake8
|
||||
|
||||
- repo: https://github.com/pre-commit/mirrors-prettier
|
||||
rev: v2.7.1
|
||||
hooks:
|
||||
- id: prettier
|
||||
files: \.(md|yml|yaml|json)$
|
||||
|
||||
- repo: https://github.com/codespell-project/codespell
|
||||
rev: v2.2.2
|
||||
hooks:
|
||||
- id: codespell
|
||||
additional_dependencies: [tomli]
|
||||
|
||||
- repo: https://github.com/shellcheck-py/shellcheck-py
|
||||
rev: v0.8.0.4
|
||||
hooks:
|
||||
- id: shellcheck
|
||||
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: pwa-manifest.json
|
||||
name: pwa-manifest.json
|
||||
description: Sync pwa-manifest.json
|
||||
entry: scripts/sync-pwa-manifest.sh
|
||||
pass_filenames: false
|
||||
language: script
|
||||
files: pwa-manifest.json$
|
|
@ -0,0 +1,36 @@
|
|||
{
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"name": "Attach python debugger",
|
||||
"type": "python",
|
||||
"request": "attach",
|
||||
"connect": {
|
||||
"host": "localhost",
|
||||
"port": 5678
|
||||
},
|
||||
"django": true
|
||||
},
|
||||
{
|
||||
"name": "Debug python",
|
||||
"type": "python",
|
||||
"request": "launch",
|
||||
"module": "uvicorn",
|
||||
"cwd": "${workspaceFolder}/api",
|
||||
"envFile": "${workspaceFolder}/.gitpod/.env",
|
||||
"args": [
|
||||
"--reload",
|
||||
"config.asgi:application",
|
||||
"--host",
|
||||
"0.0.0.0",
|
||||
"--port",
|
||||
"5000",
|
||||
"--reload-dir",
|
||||
"config/",
|
||||
"--reload-dir",
|
||||
"funkwhale_api/"
|
||||
],
|
||||
"django": true
|
||||
}
|
||||
]
|
||||
}
|
|
@ -0,0 +1,108 @@
|
|||
{
|
||||
"Callout": {
|
||||
"prefix": "callout",
|
||||
"description": "Add a callout to highlight information",
|
||||
"body": [
|
||||
":::{${1|attention,caution,danger,error,hint,important,note,seealso,tip,warning|}} ${2:Optional title}",
|
||||
"${0:${TM_SELECTED_TEXT}}",
|
||||
":::"
|
||||
],
|
||||
},
|
||||
"Code tabs": {
|
||||
"prefix": "code-tabs",
|
||||
"description": "Insert a set of code tabs",
|
||||
"body": [
|
||||
":::{tab-set-code}",
|
||||
"",
|
||||
"$0",
|
||||
"",
|
||||
":::"
|
||||
]
|
||||
},
|
||||
"Tab set": {
|
||||
"prefix": "tab-set",
|
||||
"description": "Insert a group of generic tabs",
|
||||
"body": [
|
||||
"::::{tab-set}",
|
||||
":::{tab-item} ${1:Tab title}",
|
||||
"$2",
|
||||
":::",
|
||||
"",
|
||||
":::{tab-item} ${3:Tab title}",
|
||||
"$0",
|
||||
":::",
|
||||
"",
|
||||
"::::"
|
||||
]
|
||||
},
|
||||
"Insert fragment": {
|
||||
"prefix": "insert fragment",
|
||||
"description": "Insert reusable text from another file",
|
||||
"body": [
|
||||
":::{include} ${1:full path to file}",
|
||||
":start-after: ${2:the text to start after}",
|
||||
":end-before: ${0:the text to end before}",
|
||||
":::"
|
||||
]
|
||||
},
|
||||
"Dropdown": {
|
||||
"prefix": "insert dropdown",
|
||||
"description": "Insert a dropdown (accordion)",
|
||||
"body": [
|
||||
":::{dropdown} ${1:title}",
|
||||
"${0:${TM_SELECTED_TEXT}}",
|
||||
":::"
|
||||
]
|
||||
},
|
||||
"Versioning": {
|
||||
"prefix": "version change",
|
||||
"description": "Specify when a feature was added, changed, or deprecated",
|
||||
"body": [
|
||||
":::{${1|versionadded,versionchanged,deprecated|}} ${2:v4.32.0}",
|
||||
"${0:${TM_SELECTED_TEXT}}",
|
||||
":::"
|
||||
]
|
||||
},
|
||||
"List table": {
|
||||
"prefix": "list table",
|
||||
"description": "Insert a table defined as a set of lists",
|
||||
"body": [
|
||||
":::{list-table} ${1:Optional title}",
|
||||
":header-rows: ${2:Number of header rows}",
|
||||
"",
|
||||
"* - ${3: First row column 1}",
|
||||
" - ${4: First row column 2}",
|
||||
"* - ${5: Second row column 1}",
|
||||
" - ${0: Second row column 2}",
|
||||
":::"
|
||||
]
|
||||
},
|
||||
"Guilabel": {
|
||||
"prefix": "guilabel",
|
||||
"description": "Format text as a GUI label (e.g. a button label or interface label",
|
||||
"body": [
|
||||
"{guilabel}`${0:${TM_SELECTED_TEXT}}`"
|
||||
]
|
||||
},
|
||||
"File": {
|
||||
"prefix": "file",
|
||||
"description": "Format text as a file name or path",
|
||||
"body": [
|
||||
"{file}`${0:${TM_SELECTED_TEXT}}`"
|
||||
]
|
||||
},
|
||||
"Increase indent": {
|
||||
"prefix": "increase indent",
|
||||
"description": "Increase the indentation of all selected colon or backtick fences",
|
||||
"body": [
|
||||
"${TM_SELECTED_TEXT/((?<c>[`:])\\k<c>{2,})/$1$2/gm}"
|
||||
]
|
||||
},
|
||||
"Deprecation warning": {
|
||||
"prefix": "insert deprecation warning",
|
||||
"description": "Inserts an inline deprecation badge. Useful in tables of parameters",
|
||||
"body": [
|
||||
"{bdg-warning}`Deprecated`"
|
||||
]
|
||||
}
|
||||
}
|
|
@ -0,0 +1,14 @@
|
|||
{
|
||||
"python.defaultInterpreterPath": "/workspace/funkwhale/api/.venv/bin/python",
|
||||
"python.testing.cwd": "/workspace/funkwhale/api",
|
||||
"python.envFile": "/workspace/funkwhale/.gitpod/.env",
|
||||
"python.testing.pytestArgs": ["--cov=funkwhale_api", "tests/"],
|
||||
"python.testing.unittestEnabled": false,
|
||||
"python.testing.pytestEnabled": true,
|
||||
"vitest.enable": true,
|
||||
"vitest.commandLine": "yarn vitest",
|
||||
"i18n-ally.localesPaths": ["front/src/locales"],
|
||||
"i18n-ally.pathMatcher": "*.json",
|
||||
"i18n-ally.enabledFrameworks": ["vue"],
|
||||
"i18n-ally.keystyle": "nested"
|
||||
}
|
505
CHANGELOG
505
CHANGELOG
|
@ -10,12 +10,410 @@ This changelog is viewable on the web at https://docs.funkwhale.audio/changelog.
|
|||
|
||||
.. towncrier
|
||||
|
||||
1.2.10 (2023-03-17)
|
||||
-------------------
|
||||
1.3.0 (2023-06-01)
|
||||
------------------
|
||||
|
||||
Upgrade instructions are available at
|
||||
https://docs.funkwhale.audio/admin/upgrading.html
|
||||
|
||||
Update instructions:
|
||||
|
||||
- If you are running the docker deployment, make sure to update our compose file.
|
||||
In this small example we show you how to save the old config and update it
|
||||
correctly:
|
||||
|
||||
```
|
||||
export FUNKWHALE_VERSION="1.3.0"
|
||||
cd /srv/funkwhale
|
||||
docker-compose down
|
||||
mv docker-compose.yml docker-compose.bak
|
||||
curl -L -o /srv/funkwhale/docker-compose.yml "https://dev.funkwhale.audio/funkwhale/funkwhale/raw/${FUNKWHALE_VERSION}/deploy/docker-compose.yml"
|
||||
```
|
||||
|
||||
.. note::
|
||||
If you need to customize your nginx template, e.g. to work around `problems with
|
||||
Docker's resolver <https://docs.funkwhale.audio/admin/external-storages.html#no-resolver-found>`_, you can mount your
|
||||
custom nginx configuration into the container. Uncomment the commented volumes in the `nginx` section of your `docker-compose.yml`.
|
||||
Additionally you need to update the paths in `nginx/funkwhale.template`.
|
||||
Replace all occurrences of `/funkwhale` by `/usr/share/nginx/html`.
|
||||
This loads the templates from your `nginx` folder and overrides the template files in the Docker container.
|
||||
|
||||
```
|
||||
docker-compose up -d
|
||||
```
|
||||
- The Docker instructions now use the updated Docker compose plugin. If you previously used the ``docker-compose`` standalone installation, do the following while upgrading:
|
||||
|
||||
1. Download the `Docker compose plugin <https://docs.docker.com/compose/install/linux/#install-using-the-repository>`_
|
||||
2. Stop your containers using the **docker-compose** syntax.
|
||||
|
||||
.. code-block:: sh
|
||||
sudo docker-compose down
|
||||
|
||||
3. Bring the containers back up using the **docker compose** syntax.
|
||||
|
||||
.. code-block:: sh
|
||||
sudo docker compose up -d
|
||||
|
||||
After this you can continue to use the **docker compose** syntax for all Docker management tasks.
|
||||
- Upgrade Postgres to version 15. [Make sure to migrate!](https://docs.funkwhale.audio/administrator_documentation/upgrade_docs/docker.html#upgrade-the-postgres-container)
|
||||
- With this update Funkwhale starts using poetry to maintain its dependencies. We therefore recommend
|
||||
removing the old virtualenv by running `rm -rf /srv/funkwhale/virtualenv`.
|
||||
|
||||
|
||||
Features:
|
||||
|
||||
- Add a management command to create a new library for a user
|
||||
- Add Gitpod configuration and guide
|
||||
- Add Sentry SDK to collect #1479
|
||||
- Prepare API for the upcoming version 2
|
||||
- Rewrite player to be based on Web Audio API
|
||||
|
||||
|
||||
Enhancements:
|
||||
|
||||
- Add a celery task to scan remote library (#1712)
|
||||
- Add coverage report for Frontend Tests
|
||||
- Add hint which serializer is used for OembedView (#1901)
|
||||
- Add music visualizer (#1135)
|
||||
- Add playable tracks to gitpod instance
|
||||
- Add playlists radio to search page (#1968)
|
||||
- Add proper serialization for TextPreviewView (#1903)
|
||||
- Add python debug and test support for gitpod
|
||||
- Add Serializer for SpaManifest endpoint
|
||||
- Add support for python 3.11
|
||||
- Added proper serializers for the rate-limit endpoint.
|
||||
- Added type hints to the API.
|
||||
- Adding support for play all radio in search result page (#1563)
|
||||
- All administrator documentation has been rewritten to improve clarity and update outdated information.
|
||||
- Allow arbitrary length names for artists, albums and tracks
|
||||
- Allow installing the funkwhale_api package
|
||||
- Allow using default browser dark mode and update UI dynamically on change
|
||||
- Apply migrations on API container start (!1879)
|
||||
- Automatically fetch next page of tracks (#1526)
|
||||
- Build frontend natively for cross-arch docker images
|
||||
- Change unmaintained PyMemoize library to django-cache-memoize
|
||||
to enable Python 3.10 support
|
||||
- Cleaned up frontend docker container
|
||||
- Cleanup Gitlab CI and Dockerfiles (!1796)
|
||||
- Create the funkwhale-manage entrypoint in the api package
|
||||
- Created migration guide for the deprecated all-in-one docker container.
|
||||
- Don't buffer python stdout/err in docker
|
||||
- Don't compile python byte code in docker
|
||||
- Don't use poetry in production deployments
|
||||
- Drop direct dependency on pyopenssl (#1975)
|
||||
- Exclude /api/v1/oauth/authorize from the specs since its not supported yet (#1899)
|
||||
- Fix openapi specs for user endpoints (#1892, #1894)
|
||||
- Fix Serializer for inline channel artists (#1833)
|
||||
- Fix specs for ListenViewSet (#1898)
|
||||
- Handle PWA correctly and provide better cache strategy for album covers (#1721)
|
||||
- Improve docker caching
|
||||
- Improve specification of LibraryFollowViewSet (#1896)
|
||||
- Install API python package in docker image
|
||||
- Make CI always run all tests on protected branches.
|
||||
- Make mutations endpoint appear in openapi specs
|
||||
- Make Python 3.10 tests in CI mandatory
|
||||
- Make sure ChannelViewSet always has a serializer (#1895)
|
||||
- Migrate to new queue system from old localStorage keys
|
||||
- Migrate to Vue 3
|
||||
- Migrate to vue-i18n (#1831)
|
||||
Fix locale changing (#1862)
|
||||
- Migrated to sphinx-design.
|
||||
- New task checking if remote instance is reachable to avoid playback latence (#1711)
|
||||
- OAuth Application client secrets are now hashed before storing them to the DB. Those are only displayed once from now on!
|
||||
- Parameterize the default S3 ACL when uploading objects. (#1319)
|
||||
- Pin Alpine package versions in API Dockerfile (fixes part of CI build issues).
|
||||
- Prefer using the funkwhale-manage entrypoint
|
||||
- Prevent running two pipelines for MRs
|
||||
- Random and less listened radio filter out un-owned content on library section (#2007)
|
||||
- Refactor node info endpoint to use proper serializers
|
||||
- Refactor SettingsView to use a proper serializer
|
||||
- Remove unnecessary or wrong `is` keyword usage from backend
|
||||
- Rename OpenAPI schema's operation ids for nicer API client method names.
|
||||
- Replace django-channels package with web socket implementation from @vueuse/core (#1715)
|
||||
- Retry fetching new radio track 5 times if error occurred before resetting radio session (#2022)
|
||||
- Rewrite embedded player to petite-vue
|
||||
- Split DATABASE_URL into multiple configuration variables
|
||||
- The ListenBrainz plugin submits the track duration
|
||||
- Update Django OAuth Toolkit to version 2, ref #1944
|
||||
- Update migration after django update (#1815)
|
||||
- Update upload status when import fails (#1999)
|
||||
- Updated the installation guides to make installation steps clearer.
|
||||
- Upgrade docker base image to alpine 3.17
|
||||
- Use proper serializer for Search endpoint (#1902)
|
||||
|
||||
|
||||
Refactoring:
|
||||
|
||||
- Instead of requesting the right spa content from the API using a middleware we now serve the
|
||||
Frontend directly with nginx and only proxy-forward for API endpoints
|
||||
- Replace django-rest-auth with dj-rest-auth (#1877)
|
||||
|
||||
|
||||
Bugfixes:
|
||||
|
||||
- Allow enabling systemd funkwhale.target
|
||||
- Allow playback of media from external frontend (#1937).
|
||||
- Allow summary field of actors to be blank. This leaves actors valid that have a blank (`""`) summary field and allows follows from those.
|
||||
- Catch ValueError on radio end (#1596)
|
||||
- Channel overview was displaying foreign tracks (#1773) (1773)
|
||||
- Docker setup: do not export the API port 5000 publicly
|
||||
- Fix adding same track multiple times (#1933)
|
||||
- Fix artist name submission in Maloja plugin
|
||||
- Fix changing visualizer CORS error (#1934).
|
||||
- Fix content form autofocus despite `autofocus` prop being set to `false` (#1924)
|
||||
- Fix CSP header issues
|
||||
- Fix CSP issue caused by django-channels package (#1752)
|
||||
- Fix docker API image building with removing autobahn workaround version pin
|
||||
- Fix docker builds on armv7
|
||||
- Fix docker nginx entrypoint
|
||||
- Fix editing playlist tracks (#1362)
|
||||
- Fix embedded player not working on social posts (1946)
|
||||
- Fix favorite button in queue
|
||||
- Fix fetching pages of albums in album detail view (#1927)
|
||||
- Fix front album tracks count translation
|
||||
- Fix global keyboard shortcuts firing when input is focused (#1876)
|
||||
- Fix lots of problems in debian installation guide
|
||||
- Fix media directory nginx routing error in front docker container introduced in !1897
|
||||
- Fix OAuth login (#1890)
|
||||
- Fix play button in albums with multi-page volumes (#1928)
|
||||
- Fix player closing when queue ends (#1931)
|
||||
- Fix postgres connection details in docker setup
|
||||
- Fix purging of dangling files #1929
|
||||
- Fix remote search (#1857)
|
||||
- Fix search by text in affected views (#1858)
|
||||
- Fix timeout on spa manifest requests
|
||||
- Fix track table showing all tracks and double pagination in some cases (#1923)
|
||||
- Fix user requests and reports filtering (#1924)
|
||||
- Fix validity issues in openapi/swagger spec files (#1171)
|
||||
- Fixed an issue which caused links in Markdown forms to not render correctly. (#2023)
|
||||
- Fixed login redirect (1736)
|
||||
- Fixed mobile player element widths (#2054)
|
||||
- Fixed remote subscription form in Podcast and search views (#1708)
|
||||
- Fixed upload form VUE errors (#1738) (1738)
|
||||
- Fixes an issue which made it possible to download all media files without access control (#2101)
|
||||
- Fixes channel page (#1729) (1729)
|
||||
- Fixes development environment set-up with docker (1726)
|
||||
- Fixes embed player (#1783) (1783)
|
||||
- Fixes service worker (#1634)
|
||||
- Fixes track listenings not being sent when tab is not focused
|
||||
- Hide create custom radio to un-authenticated users (#1720)
|
||||
- Improve signal handling for service and containers
|
||||
- Move api docker hardcoded env vars in the settings module
|
||||
- Prefer str over dict for the CACHE_URL api setting
|
||||
|
||||
This fix the ability to reuse the CACHE_URL with other settings such as
|
||||
CELERY_BROKER_URL.
|
||||
- Remove trailing slash from reverse proxy configuration
|
||||
- Remove unused Footer component (#1660)
|
||||
- Remove usage of deprecated Model and Serializer fields (#1663)
|
||||
- Resolved an issue where queue text with mouse over has dark text on dark background (#2058) (2058)
|
||||
- Skip refreshing local actors in celery federation.refresh_actor_data task - fixes disappearing avatars (!1873)
|
||||
|
||||
|
||||
Documentation:
|
||||
|
||||
- Add ability to translate documentation into multiple languages
|
||||
- Add generic upgrade instructions to Docker postgres documentation (#2049)
|
||||
- Add restore instructions to backup docs (#1627).
|
||||
- Add systemd update instructions to Debian upgrade instructions (#1966)
|
||||
- Added Nginx regeneration instructions to Debian update guide (#2050)
|
||||
- Added virtualenv upgrade instructions for Debian (#1562).
|
||||
- Cleaned up documentation
|
||||
- Document the new login flow of the CLI-tool (#1800)
|
||||
- Documented LOGLEVEL command (#1541).
|
||||
- Documented the `NGINX_MAX_BODY_SIZE` .env variable (#1624).
|
||||
- Fix broken links in CHANGELOG (#1976)
|
||||
- Harden security for debian install docs
|
||||
- Remove unnecessary postgres variable in Docker migration guide (#2124).
|
||||
- Rewrote documentation contributor guide.
|
||||
- Rewrote the architecture file (#1908)
|
||||
- Rewrote the federation developer documentation (#1911)
|
||||
- Rewrote the plugins documentation (#1910)
|
||||
- Rewrote translators file
|
||||
- Updated API developer documentation (#1912, #1909)
|
||||
- Updated CONTRIBUTING guide with up-to-date documentation. Created layout in documentation hub.
|
||||
|
||||
|
||||
Other:
|
||||
|
||||
- Add a CI job to check if changelog snippet is available
|
||||
- Add CI broken links checker
|
||||
- Add pre-commit hooks
|
||||
- flake8
|
||||
- black
|
||||
- isort
|
||||
- pyupgrade
|
||||
- prettier
|
||||
- codespell
|
||||
- Add pre-commit to development tools
|
||||
- Align the openapi spec to the actual API wherever possible
|
||||
- Cache lychee checked urls for 1 day in CI
|
||||
- Fix api tests warnings by renaming fixtures
|
||||
- Fix permissions for build artifacts
|
||||
- Fix shell scripts lint errors
|
||||
- Format api pyproject.toml
|
||||
- Format or fix files using pre-commit
|
||||
|
||||
- Upgrade code to >=python3.7
|
||||
- Fix flake8 warnings
|
||||
- Fix spelling errors
|
||||
- Format files using black
|
||||
- Format files using isort
|
||||
- Format files using prettier
|
||||
- Move api tools config to pyproject.toml
|
||||
- Move database url composition from custom script to django settings
|
||||
- Remove docker_all_in_one_release ci job
|
||||
- Rename api composer/django/ dir to docker/
|
||||
- Unpin asgiref in API dependencies
|
||||
- Use vite for building the frontend, #1644
|
||||
|
||||
|
||||
Deprecation:
|
||||
|
||||
- Deprecate the api manage.py script in favor of the funkwhale-manage entrypoint
|
||||
- That's the last minor version series that supports python3.7. Funkwhale 1.4 will remove support for
|
||||
it. #1693
|
||||
- The automatically generated 'DATABASE_URL' configuration in the docker setup is deprecated, please
|
||||
configure either the 'DATABASE_URL' environment variable or the 'DATABASE_HOST', 'DATABASE_USER' and
|
||||
'DATABASE_PASSWORD' environment variables instead.
|
||||
|
||||
|
||||
Removal:
|
||||
|
||||
- This release removes support for Python 3.6. Please make sure you update your python version before
|
||||
Updating Funkwhale!
|
||||
|
||||
Committers:
|
||||
|
||||
- Agate
|
||||
- Aina Hernàndez Campaña
|
||||
- AMoonRabbit
|
||||
- Anton
|
||||
- bruce diao
|
||||
- Bruno Talanski
|
||||
- ButterflyOfFire
|
||||
- Çağla Pickaxe
|
||||
- Ciarán Ainsworth
|
||||
- Dignified Silence
|
||||
- dignny
|
||||
- Éilias McTalún
|
||||
- EorlBruder
|
||||
- Fedi Funkers
|
||||
- Georg Krause
|
||||
- ghose
|
||||
- Henri Dickson
|
||||
- Jacek Pruciak
|
||||
- Jasper Bogers
|
||||
- Jhoan Sebastian Espinosa Borrero
|
||||
- jo
|
||||
- jooola
|
||||
- Julian Rademacher
|
||||
- JuniorJPDJ
|
||||
- Kasper Seweryn
|
||||
- Keunes
|
||||
- Kisel1337
|
||||
- Laurin W
|
||||
- Marcos Peña
|
||||
- Matyáš Caras
|
||||
- Michael Long
|
||||
- nztvar
|
||||
- oki
|
||||
- Petitminion
|
||||
- Philipp Wolfer
|
||||
- poeppe
|
||||
- Porrumentzio
|
||||
- ppom
|
||||
- Reinhard Prechtl
|
||||
- Sky
|
||||
- Sporiff
|
||||
- Stuart Begley-Miller
|
||||
- @ta
|
||||
- Thomas
|
||||
- Till Robin Zickel
|
||||
- tobifroe
|
||||
- wvffle
|
||||
|
||||
Contributors to our Issues:
|
||||
|
||||
- AMoonRabbit
|
||||
- Agate
|
||||
- Artem Anufrij
|
||||
- ChengChung
|
||||
- Ciarán Ainsworth
|
||||
- Creak
|
||||
- Eric Mesa
|
||||
- Georg Krause
|
||||
- Hans Bauer
|
||||
- HurricaneDancer
|
||||
- Jakob Schürz
|
||||
- Jucgshu
|
||||
- Julian-Samuel Gebühr
|
||||
- JuniorJPDJ
|
||||
- Kasper Seweryn
|
||||
- Keunes
|
||||
- Laser Lesbian
|
||||
- Laurin W
|
||||
- Marco
|
||||
- Marcos Peña
|
||||
- Martin Giger
|
||||
- Mathieu Jourdan
|
||||
- MattDHarding
|
||||
- Meliurwen
|
||||
- Micha Gläß-Stöcker
|
||||
- MichaelBechHansen
|
||||
- Nathan Mih
|
||||
- Nicolas Derive
|
||||
- Nolan Darilek
|
||||
- Philipp Wolfer
|
||||
- Porrumentzio
|
||||
- Rodion Borisov
|
||||
- Sam Birch
|
||||
- Sky Leite
|
||||
- TheSunCat
|
||||
- Thomas
|
||||
- Tobias Frölich
|
||||
- Tony Wasserka
|
||||
- Vincent Riquer
|
||||
- Virgile Robles
|
||||
- dddddd-mmmmmm
|
||||
- gerry_the_hat
|
||||
- getzze
|
||||
- heyarne
|
||||
- jake
|
||||
- jooola
|
||||
- jovuit
|
||||
- nouts
|
||||
- petitminion
|
||||
- ppom
|
||||
- pullopen
|
||||
- resister
|
||||
- silksow
|
||||
- troll
|
||||
|
||||
Contributors to our Merge Requests:
|
||||
|
||||
- AMoonRabbit
|
||||
- Ciarán Ainsworth
|
||||
- Georg Krause
|
||||
- JuniorJPDJ
|
||||
- Kasper Seweryn
|
||||
- Laurin W
|
||||
- Marcos Peña
|
||||
- Mathieu Jourdan
|
||||
- Nicolas Derive
|
||||
- Philipp Wolfer
|
||||
- Rodion Borisov
|
||||
- Thomas
|
||||
- Tobias Frölich
|
||||
- getzze
|
||||
- jooola
|
||||
- mqus
|
||||
- petitminion
|
||||
- poeppe
|
||||
|
||||
1.2.10 (2023-03-17)
|
||||
-------------------
|
||||
|
||||
Bugfixes:
|
||||
|
||||
- Fixes a security vulnerability that allows to download all media files without access control #2101
|
||||
|
@ -31,7 +429,6 @@ Committers:
|
|||
|
||||
- JuniorJPDJ
|
||||
|
||||
|
||||
1.2.9 (2022-11-25)
|
||||
------------------
|
||||
|
||||
|
@ -461,11 +858,9 @@ nztvar
|
|||
Upgrade instructions are available at
|
||||
https://docs.funkwhale.audio/admin/upgrading.html
|
||||
|
||||
Due to a bug in our CI Pipeline frontend artifacts are not available at
|
||||
https://dev.funkwhale.audio/funkwhale/funkwhale/builds/artifacts/1.2.0/download?job=build_front as they would usually.
|
||||
Please use this URL to get your frontend build: https://dev.funkwhale.audio/funkwhale/funkwhale/-/jobs/81069/artifacts/download
|
||||
Due to a bug in our CI Pipeline, you need to download the frontend artifact here: https://dev.funkwhale.audio/funkwhale/funkwhale/-/jobs/81069/artifacts/download
|
||||
|
||||
If you are running the All-in-One-Container since a longer time, you probably need to manually migrate your database information. If thats the case, you will get a message like this:
|
||||
If you are running the All-in-One-Container since a longer time, you probably need to manually migrate your database information. If that's the case, you will get a message like this:
|
||||
|
||||
`DETAIL: The data directory was initialized by PostgreSQL version 11, which is not compatible with this version 13.5.`
|
||||
|
||||
|
@ -473,7 +868,7 @@ Make sure the Funkwhale version is set to `1.1.4` in `docker-compose.yml`. Now y
|
|||
|
||||
`docker-compose exec -T funkwhale pg_dump -c -U funkwhale > "db.dump"`
|
||||
|
||||
Now you can update the Funkwhale version in `docker-compose.yml` to `1.2.0`. Additionally you should save your `data` directory, eg by running `mv data data.bak && mkdir data`. Stop Funkwhale and start it again with the new version, by using `docker-compose down && docker-compose up -d`. This will initialize a fresh DB and applies all migrations. Now you can restore your database with the following command: `cat db.dump | docker-compose exec -T funkwhale psql -U funkwhale`. Thats it, enjoy!
|
||||
Now you can update the Funkwhale version in `docker-compose.yml` to `1.2.0`. Additionally you should save your `data` directory, eg by running `mv data data.bak && mkdir data`. Stop Funkwhale and start it again with the new version, by using `docker-compose down && docker-compose up -d`. This will initialize a fresh DB and applies all migrations. Now you can restore your database with the following command: `cat db.dump | docker-compose exec -T funkwhale psql -U funkwhale`. That's it, enjoy!
|
||||
|
||||
Features:
|
||||
|
||||
|
@ -510,7 +905,7 @@ Bugfixes:
|
|||
- Fix showing too long radio descriptions (#1556)
|
||||
- Fix X-Frame-Options HTTP header for embed and force it to SAMEORIGIN value for other pages (fix #1022)
|
||||
- Fix before last track starts playing when last track removed (#1485)
|
||||
- Fix delete account button is not disalbed when missing password (#1591)
|
||||
- Fix delete account button is not disabled when missing password (#1591)
|
||||
- Fix omputed properties already defined in components data (#1649)
|
||||
- Fix the all in one docker image building process, related to #1503
|
||||
- Fix crash in album moderation interface when missing cover (#1474)
|
||||
|
@ -522,7 +917,7 @@ Bugfixes:
|
|||
- Sanitize remote tracks' saving locations with slashes on their names (#1435)
|
||||
- Show embed option for channel tracks (#1278)
|
||||
- Store volume in logarithmic scale and convert when setting it to audio (fixes #1543)
|
||||
- Use global Howler volume instead of setting it separatly for each track (fixes #1542)
|
||||
- Use global Howler volume instead of setting it separately for each track (fixes #1542)
|
||||
|
||||
|
||||
Documentation:
|
||||
|
@ -980,7 +1375,7 @@ Features:
|
|||
- Allow users to hide compilation artists on the artist search page (#1053)
|
||||
- Can now launch server import from the UI (#1105)
|
||||
- Dedicated, advanced search page (#370)
|
||||
- Persist theme and language settings accross sessions (#996)
|
||||
- Persist theme and language settings across sessions (#996)
|
||||
|
||||
|
||||
Enhancements:
|
||||
|
@ -1072,13 +1467,13 @@ Enhancements:
|
|||
|
||||
Bugfixes:
|
||||
|
||||
- Fix embedded player not working on channel serie/album (#1175)
|
||||
- Fix embedded player not working on channel series/album (#1175)
|
||||
- Fixed broken mimetype detection during import (#1165)
|
||||
- Fixed crash when loading recent albums via Subsonic (#1158)
|
||||
- Fixed crash with null help text in admin (#1161)
|
||||
- Fixed invalid metadata when importing multi-artists tracks/albums (#1104)
|
||||
- Fixed player crash when using Funkwhale as a PWA (#1157)
|
||||
- Fixed wrong covert art displaying in some situations (#1138)
|
||||
- Fixed wrong convert art displaying in some situations (#1138)
|
||||
- Make channel card updated times more humanly readable, add internationalization (#1089)
|
||||
|
||||
Contributors to this release (development, documentation, reviews):
|
||||
|
@ -1105,7 +1500,7 @@ Enhancements:
|
|||
- Fix HTML <title> not including instance name in some situations (#1107)
|
||||
- Make URL-building logic more resilient against reverse proxy misconfiguration (#1085)
|
||||
- Removed unused masonry dependency (#1112)
|
||||
- Support for specifying itunes:email and itunes:name in channels for compatibiliy with third-party platforms (#1154)
|
||||
- Support for specifying itunes:email and itunes:name in channels for compatibility with third-party platforms (#1154)
|
||||
- Updated the /api/v1/libraries endpoint to support listing public libraries from other users/pods (#1151)
|
||||
|
||||
|
||||
|
@ -1298,7 +1693,7 @@ Upgrade from Postgres 10 to 11 [manual action required, docker all-in-one only]
|
|||
|
||||
With our upgrade to Alpine 3.10, the ``funkwhale/all-in-one`` image now includes PostgreSQL 11.
|
||||
|
||||
In order to update to Funkwhale 0.21, you will first need to uprade Funkwhale's PostgreSQL database, following the steps below::
|
||||
In order to update to Funkwhale 0.21, you will first need to upgrade Funkwhale's PostgreSQL database, following the steps below::
|
||||
|
||||
# open a shell as the Funkwhale user
|
||||
sudo -u funkwhale -H bash
|
||||
|
@ -1521,7 +1916,7 @@ Contributors to this release (translation, development, documentation, reviews,
|
|||
- Ciarán Ainsworth
|
||||
- Dag Stenstad
|
||||
- Daniele Lira Mereb
|
||||
- Eliot Berriot
|
||||
- Agate
|
||||
- Esteban
|
||||
- Johannes H.
|
||||
- knuxify
|
||||
|
@ -1675,7 +2070,7 @@ Then reload the configuration change with ``sudo systemctl daemon-reload`` and `
|
|||
Content-Security-Policy and additional security headers [manual action suggested]
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
To improve the security and reduce the attack surface in case of a successfull exploit, we suggest
|
||||
To improve the security and reduce the attack surface in case of a successful exploit, we suggest
|
||||
you add the following Content-Security-Policy to your nginx configuration.
|
||||
|
||||
..note::
|
||||
|
@ -1728,7 +2123,7 @@ Then reload nginx with ``systemctl reload nginx``.
|
|||
# Simply copy-paste the /front/ location, but replace the following lines:
|
||||
location /front/embed.html {
|
||||
add_header X-Frame-Options "ALLOW";
|
||||
alias /frontent/embed.html;
|
||||
alias /frontend/embed.html;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1740,7 +2135,7 @@ Rate limiting
|
|||
With this release, rate-limiting on the API is enabled by default, with high enough limits to ensure
|
||||
regular users of the app aren't affected. Requests beyond allowed limits are answered with a 429 HTTP error.
|
||||
|
||||
For anonymous requests, the limit is applied to the IP adress of the client, and for authenticated requests, the limit
|
||||
For anonymous requests, the limit is applied to the IP address of the client, and for authenticated requests, the limit
|
||||
is applied to the corresponding user account. By default, anonymous requests get a lower limit than authenticated requests.
|
||||
|
||||
You can disable the rate-limiting feature by adding `THROTTLING_ENABLED=false` to your ``.env`` file and restarting the
|
||||
|
@ -1829,7 +2224,7 @@ Contributors to this release (translation, development, documentation, reviews,
|
|||
- Amaranthe
|
||||
- ButterflyOfFire
|
||||
- Ciarán Ainsworth
|
||||
- Eliot Berriot
|
||||
- Agate
|
||||
- Esteban
|
||||
- Francesc Galí
|
||||
- Freyja Wildes
|
||||
|
@ -1889,12 +2284,12 @@ Bugfixes:
|
|||
- Use ASCII filename before upload to S3 to avoid playback issues (#847)
|
||||
|
||||
|
||||
Contributors to this release (commiters and reviewers):
|
||||
Contributors to this release (committers and reviewers):
|
||||
|
||||
- Ciarán Ainsworth
|
||||
- Creak
|
||||
- ealgase
|
||||
- Eliot Berriot
|
||||
- Agate
|
||||
- Esteban
|
||||
- Freyja Wildes
|
||||
- hellekin
|
||||
|
@ -2050,7 +2445,7 @@ Enhancements:
|
|||
- Expose an instance-level actor (service@domain) in nodeinfo endpoint (#689)
|
||||
- Improved readability of logo (#385)
|
||||
- Keep persistent connections to the database instead of recreating a new one for each request
|
||||
- Labels for privacy levels are now consistently grabbed from a common source instead of being hardcoded everytime they are needed.
|
||||
- Labels for privacy levels are now consistently grabbed from a common source instead of being hardcoded every time they are needed.
|
||||
- Merged artist/album buttons with title text on artist and album pages (#725)
|
||||
- Now honor maxBitrate parameter in Subsonic API (#802)
|
||||
- Preload next track in queue (#572)
|
||||
|
@ -2072,7 +2467,7 @@ Bugfixes:
|
|||
- Do not consider tracks as duplicates during import if they have different positions (#740)
|
||||
- Ensure all our ActivityPub fetches are authenticated (#758)
|
||||
- Ensure correct track duration and playable status when browsing radios (#812)
|
||||
- Fixed alignement/size issue with some buttons (#702)
|
||||
- Fixed alignment/size issue with some buttons (#702)
|
||||
- Fixed an encoding issue with instance name on about page (#828)
|
||||
- Fixed cover not showing in queue/player when playing tracks from "albums" tab (#795)
|
||||
- Fixed crashing upload processing on invalid date format (#718)
|
||||
|
@ -2092,7 +2487,7 @@ Documentation:
|
|||
|
||||
- Document how to use Redis over unix sockets (#770)
|
||||
|
||||
Contributors to this release (commiters and translators):
|
||||
Contributors to this release (committers and translators):
|
||||
|
||||
- Ale London
|
||||
- Alexander
|
||||
|
@ -2101,7 +2496,7 @@ Contributors to this release (commiters and translators):
|
|||
- Ciarán Ainsworth
|
||||
- Damien Nicolas
|
||||
- Daniele Lira Mereb
|
||||
- Eliot Berriot
|
||||
- Agate
|
||||
- Elza Gelez
|
||||
- gerry_the_hat
|
||||
- gordon
|
||||
|
@ -2154,7 +2549,7 @@ Enhancements:
|
|||
Bugfixes:
|
||||
|
||||
- Avoid mixed content when deploying mono-container behind HTTPS proxy (thetarkus/docker-funkwhale#19)
|
||||
- Display new notifications immediatly on notifications page (#729)
|
||||
- Display new notifications immediately on notifications page (#729)
|
||||
- Ensure cover art from uploaded files is picked up properly on existing albums (#757)
|
||||
- Fixed a crash when federating a track with unspecified position
|
||||
- Fixed broken Activity and Actor modules in django admin (#767)
|
||||
|
@ -2296,7 +2691,7 @@ Bugfixes:
|
|||
- Fix transcoding of in-place imported tracks (#688)
|
||||
- Fixed celery worker defaulting to development settings instead of production
|
||||
- Fixed crashing Django admin when loading track detail page (#666)
|
||||
- Fixed list icon alignement on landing page (#668)
|
||||
- Fixed list icon alignment on landing page (#668)
|
||||
- Fixed overescaping issue in notifications and album page (#676)
|
||||
- Fixed wrong number of affected elements in bulk action modal (#683)
|
||||
- Fixed wrong URL in documentation for funkwhale_proxy.conf file when deploying using Docker
|
||||
|
@ -2321,7 +2716,7 @@ Many thanks to the dozens of people that contributed to this release: translator
|
|||
bug hunters, admins and backers. You made it possible!
|
||||
|
||||
Upgrade instructions are available at
|
||||
https://docs.funkwhale.audio/admin/upgrading.html, ensure you also execute the intructions
|
||||
https://docs.funkwhale.audio/admin/upgrading.html, ensure you also execute the instructions
|
||||
marked with ``[manual action required]`` and ``[manual action suggested]``.
|
||||
|
||||
See ``Full changelog`` below for an exhaustive list of changes!
|
||||
|
@ -2541,7 +2936,7 @@ Enhancements:
|
|||
- Load env file in config/.env automatically to avoid sourcing it by hand (#626)
|
||||
- More resilient date parsing during audio import, will not crash anymore on
|
||||
invalid dates (#622)
|
||||
- Now start radios immediatly, skipping any existing tracks in queue (#585)
|
||||
- Now start radios immediately, skipping any existing tracks in queue (#585)
|
||||
- Officially support connecting to a password protected redis server, with
|
||||
the redis://:password@localhost:6379/0 scheme (#640)
|
||||
- Performance improvement when fetching favorites, down to a single, small http request
|
||||
|
@ -2569,11 +2964,11 @@ Bugfixes:
|
|||
- Allow opus file upload (#598)
|
||||
- Do not display "view on MusicBrainz" button if we miss the mbid (#422)
|
||||
- Do not try to create unaccent extension if it's already present (#663)
|
||||
- Ensure admin links in sidebar are displayed for users with relavant permissions, and only them (#597)
|
||||
- Fix broken websocket connexion under Chrome browser (#589)
|
||||
- Ensure admin links in sidebar are displayed for users with relevant permissions, and only them (#597)
|
||||
- Fix broken websocket connection under Chrome browser (#589)
|
||||
- Fix play button not starting playback with empty queue (#632)
|
||||
- Fixed a styling inconsistency on about page when instance description was missing (#659)
|
||||
- Fixed a UI discrepency in playlist tracks count (#647)
|
||||
- Fixed a UI discrepancy in playlist tracks count (#647)
|
||||
- Fixed greyed tracks in radio builder and detail page (#637)
|
||||
- Fixed inconsistencies in subsonic error responses (#616)
|
||||
- Fixed incorrect icon for "next track" in player control (#613)
|
||||
|
@ -2581,7 +2976,7 @@ Bugfixes:
|
|||
- Fixed missing track count on various library cards (#581)
|
||||
- Fixed skipped track when appending multiple tracks to the queue under certain conditions (#209)
|
||||
- Fixed wrong album/track count on artist page (#599)
|
||||
- Hide unplayable/emtpy playlists in "Browse playlist" pages (#424)
|
||||
- Hide unplayable/empty playlists in "Browse playlist" pages (#424)
|
||||
- Initial UI render using correct language from browser (#644)
|
||||
- Invalid URI for reverse proxy websocket with apache (#617)
|
||||
- Properly encode Wikipedia and lyrics search urls (#470)
|
||||
|
@ -2630,7 +3025,7 @@ Bugfixes:
|
|||
or adding tracks to queue (#464)
|
||||
- Fix broken icons in playlist editor (#515)
|
||||
- Fixed a few untranslated strings (#559)
|
||||
- Fixed splitted album when importing from federation (#346)
|
||||
- Fixed split album when importing from federation (#346)
|
||||
- Fixed toggle mute in volume bar does not restore previous volume level (#514)
|
||||
- Fixed wrong env file URL and display bugs in deployment documentation (#520)
|
||||
- Fixed wrong title in PlayButton (#435)
|
||||
|
@ -2905,7 +3300,7 @@ Features:
|
|||
|
||||
- Complete redesign of the library home and playlist pages (#284)
|
||||
- Expose ActivityPub actors for users (#317)
|
||||
- Implemented a basic but functionnal Github-like search on federated tracks
|
||||
- Implemented a basic but functional Github-like search on federated tracks
|
||||
list (#344)
|
||||
- Internationalized interface as well as translations for Arabic, French,
|
||||
Esperanto, Italian, Occitan, Polish, Portuguese and Swedish (#161, #167)
|
||||
|
@ -3066,7 +3461,7 @@ This is the type of query you can run:
|
|||
- ``artist:"System of a Down" domain:instance.funkwhale`` search for results where artist name equals "System of a Down" and inside "instance.funkwhale" library
|
||||
|
||||
|
||||
Ensure MEDIA_ROOT, STATIC_ROOT and MUSIC_DIRECTORY_* are set explicitely [Manual action required]
|
||||
Ensure MEDIA_ROOT, STATIC_ROOT and MUSIC_DIRECTORY_* are set explicitly [Manual action required]
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
In our default .env file, MEDIA_ROOT and STATIC_ROOT were commented by default, causing
|
||||
|
@ -3186,10 +3581,10 @@ Invitations generation and management requires the "settings" permission.
|
|||
Removed front-end and back-end coupling
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Eventhough Funkwhale's front-end has always been a Single Page Application,
|
||||
Even though Funkwhale's front-end has always been a Single Page Application,
|
||||
talking to an API, it was only able to talk to an API on the same domain.
|
||||
|
||||
There was no real technical justification behind this (only lazyness), and it was
|
||||
There was no real technical justification behind this (only laziness), and it was
|
||||
also blocking interesting use cases:
|
||||
|
||||
- Use multiple customized versions of the front-end with the same instance
|
||||
|
@ -3232,7 +3627,7 @@ Bugfixes:
|
|||
- Ensure radios can only be edited and deleted by their owners (#311)
|
||||
- Fixed admin menu not showing after login (#245)
|
||||
- Fixed broken pagination in Subsonic API (#295)
|
||||
- Fixed duplicated websocket connexion on timeline (#287)
|
||||
- Fixed duplicated websocket connection on timeline (#287)
|
||||
|
||||
|
||||
Documentation:
|
||||
|
@ -3468,7 +3863,7 @@ Bugfixes:
|
|||
(#106)
|
||||
- Fixed a few broken translations strings (#227)
|
||||
- Fixed broken ordering in front-end lists (#179)
|
||||
- Fixed ignored page_size paremeter on artist and favorites list (#240)
|
||||
- Fixed ignored page_size parameter on artist and favorites list (#240)
|
||||
- Read ID3Tag Tracknumber from TRCK (#220)
|
||||
- We now fetch album covers regardless of the import methods (#231)
|
||||
|
||||
|
@ -3490,7 +3885,7 @@ This is the first bit of an ongoing work that will span several releases, to
|
|||
bring more powerful library management features to Funkwhale. This iteration
|
||||
includes a basic file management interface where users with the "library"
|
||||
permission can list and search available files, order them using
|
||||
various criterias (size, bitrate, duration...) and delete them.
|
||||
various criteria (size, bitrate, duration...) and delete them.
|
||||
|
||||
New "upload" permission
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
@ -3598,10 +3993,10 @@ tend to be a lot bigger than other files, you may want to increase the
|
|||
``client_max_body_size`` value in your Nginx configuration if you plan
|
||||
to upload flac files.
|
||||
|
||||
Missing subsonic configuration bloc in vhost files
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
Missing subsonic configuration block in vhost files
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Because of a missing bloc in the sample Nginx and Apache configurations,
|
||||
Because of a missing block in the sample Nginx and Apache configurations,
|
||||
instances that were deployed after the 0.13 release are likely to be unable
|
||||
to answer to Subsonic clients (the missing bits were properly documented
|
||||
in the changelog).
|
||||
|
@ -3684,7 +4079,7 @@ additional information about audio files:
|
|||
- Duration
|
||||
|
||||
This change is not retroactive, meaning already imported files will lack those
|
||||
informations. The interface and API should work as before in such case, however,
|
||||
information. The interface and API should work as before in such case, however,
|
||||
we offer a command to deal with legacy files and populate the missing values.
|
||||
|
||||
On docker setups:
|
||||
|
@ -3787,8 +4182,8 @@ without developping our own alternative clients for each and every platform.
|
|||
Most advanced Subsonic clients support offline caching of music files,
|
||||
playlist management and search, which makes them well-suited for nomadic use.
|
||||
|
||||
Please head over :doc:`users/apps` for more informations about supported clients
|
||||
and user instructions.
|
||||
Please see `our list of supported apps <https://funkwhale.audio/en_US/apps>`_
|
||||
for more information about supported clients and user instructions.
|
||||
|
||||
At the instance-level, the Subsonic API is enabled by default, but require
|
||||
and additional endpoint to be added in you reverse-proxy configuration.
|
||||
|
@ -3971,7 +4366,7 @@ of emails:
|
|||
- Password reset emails, enabling user to reset their password without an admin's intervention
|
||||
|
||||
Email sending is disabled by default, as it requires additional configuration.
|
||||
In this mode, emails are simply outputed on stdout.
|
||||
In this mode, emails are simply outputted on stdout.
|
||||
|
||||
If you want to actually send those emails to your users, you should edit your
|
||||
.env file and tweak the ``EMAIL_CONFIG`` variable. See :data:`EMAIL_CONFIG <config.settings.common.EMAIL_CONFIG>`
|
||||
|
@ -4110,7 +4505,7 @@ This is for real this time, and includes:
|
|||
|
||||
- Following other Funkwhale libraries
|
||||
- Importing tracks from remote libraries (tracks are hotlinked, and only cached for a short amount of time)
|
||||
- Searching accross federated catalogs
|
||||
- Searching across federated catalogs
|
||||
|
||||
Note that by default, federation is opt-in, on a per-instance basis:
|
||||
instances will request access to your catalog, and you can accept or refuse
|
||||
|
@ -4260,7 +4655,7 @@ Bugfixes:
|
|||
- Fixed broken import request admin (#115)
|
||||
- Fixed forced redirection to login event with
|
||||
API_AUTHENTICATION_REQUIRED=False (#119)
|
||||
- Fixed position not being reseted properly when playing the same track
|
||||
- Fixed position not being reset properly when playing the same track
|
||||
multiple times in a row
|
||||
- Fixed synchronized start/stop radio buttons for all custom radios (#103)
|
||||
- Fixed typo and missing icon on homepage (#96)
|
||||
|
@ -4333,7 +4728,7 @@ To prepare for new realtime features and enable websocket support in Funkwhale,
|
|||
we are now using django-channels and daphne to serve HTTP and websocket traffic.
|
||||
|
||||
This replaces gunicorn and the switch should be easy assuming you
|
||||
follow the upgrade process described bellow.
|
||||
follow the upgrade process described below.
|
||||
|
||||
If you are using docker, please remove the command instruction inside the
|
||||
api service, as the up-to-date command is now included directly in the image
|
||||
|
@ -4464,7 +4859,7 @@ Basic transcoding is now available to/from the following formats : ogg and mp3.
|
|||
|
||||
This relies internally on FFMPEG and can put some load on your server.
|
||||
It's definitely recommended you setup some caching for the transcoded files
|
||||
at your webserver level. Check the the exemple nginx file at deploy/nginx.conf
|
||||
at your webserver level. Check the the example nginx file at deploy/nginx.conf
|
||||
for an implementation.
|
||||
|
||||
On the frontend, usage of transcoding should be transparent in the player.
|
||||
|
@ -4518,7 +4913,7 @@ an import is made.
|
|||
0.3.1
|
||||
------------------
|
||||
|
||||
- Revamped all import logic, everything is more tested and consistend
|
||||
- Revamped all import logic, everything is more tested and consistent
|
||||
- Can now use Acoustid in file imports to automatically grab metadata from musicbrainz
|
||||
- Brand new file import wizard
|
||||
|
||||
|
@ -4577,7 +4972,7 @@ Tech:
|
|||
|
||||
Features:
|
||||
|
||||
- Models: now store relese group mbid on Album model (#7)
|
||||
- Models: now store release group mbid on Album model (#7)
|
||||
- Models: now bind import job to track files (#44)
|
||||
|
||||
Bugfixes:
|
||||
|
|
|
@ -0,0 +1,23 @@
|
|||
# Contribute to Funkwhale
|
||||
|
||||
We welcome contributions from across the community. Whether you are a designer, a translator, a technical writer, or a developer, we look forward to working with you to improve the Funkwhale project!
|
||||
|
||||
## Contribute to Funkwhale development
|
||||
|
||||
The Funkwhale software is the core of the community project. While we have a core team working on it, we are always excited to see new features, bugfixes, and refactorings from our community.
|
||||
|
||||
Funkwhale's backend is written in [Python](https://www.python.org/) using [Django](https://www.djangoproject.com) and [Django REST framework](https://www.django-rest-framework.org/). Our web app is written in [Vue.js](https://vuejs.org/) and [Typescript](https://typescriptlang.org).
|
||||
|
||||
Whether you're an experienced developer or you're just learning, check out our [developer guide](https://docs.funkwhale.audio/developers/index.html) to get started.
|
||||
|
||||
## Document Funkwhale
|
||||
|
||||
Funkwhale is a large project with a lot of moving parts. To help users and developers alike, we need to keep our documentation up-to-date and readable. If you have a knack for explaining technical concepts or you've noticed a gap, check out our [documentation guide](https://docs.funkwhale.audio/documentation/index.html) to see how to get involved.
|
||||
|
||||
## Translate Funkwhale
|
||||
|
||||
All Funkwhale content is written in American English, but our community speaks languages from all over the world. If you'd like to see Funkwhale in your language, check out the [translation guide](https://docs.funkwhale.audio/translators.html) to see how you can help out.
|
||||
|
||||
## Other contributions
|
||||
|
||||
We'll update this file and our documentation with more information and workflows as we expand our search for contributors. If you have some suggestions or want to see how else you can get involved, come join us on [our forum](https://forum.funkwhale.audio). We'd love to hear your ideas!
|
791
CONTRIBUTING.rst
791
CONTRIBUTING.rst
|
@ -1,791 +0,0 @@
|
|||
Contribute to Funkwhale development
|
||||
===================================
|
||||
|
||||
First of all, thank you for your interest in the project! We really
|
||||
appreciate the fact that you're about to take some time to read this
|
||||
and hack on the project.
|
||||
|
||||
This document will guide you through common operations such as:
|
||||
|
||||
- Setup your development environment
|
||||
- Working on your first issue
|
||||
- Writing unit tests to validate your work
|
||||
- Submit your work
|
||||
|
||||
A quick path to contribute on the front-end
|
||||
-------------------------------------------
|
||||
|
||||
The next sections of this document include a full installation guide to help
|
||||
you setup a local, development version of Funkwhale. If you only want to fix small things
|
||||
on the front-end, and don't want to manage a full development environment, there is another way.
|
||||
|
||||
As the front-end can work with any Funkwhale server, you can work with the front-end only,
|
||||
and make it talk with an existing instance (like the demo one, or you own instance, if you have one).
|
||||
|
||||
If even that is too much for you, you can also make your changes without any development environment,
|
||||
and open a merge request. We will be able to review your work easily by spawning automatically a
|
||||
live version of your changes, thanks to Gitlab Review apps.
|
||||
|
||||
Setup front-end only development environment
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
1. Clone the repository::
|
||||
|
||||
git clone ssh://git@dev.funkwhale.audio/funkwhale/funkwhale.git
|
||||
cd funkwhale
|
||||
cd front
|
||||
|
||||
2. Install `nodejs <https://nodejs.org/en/download/package-manager/>`_ and `yarn <https://yarnpkg.com/lang/en/docs/install/#debian-stable>`_
|
||||
|
||||
3. Install the dependencies::
|
||||
|
||||
yarn install
|
||||
|
||||
4. Compile the translations::
|
||||
|
||||
yarn i18n-compile
|
||||
|
||||
5. Launch the development server::
|
||||
|
||||
# this will serve the front-end on http://localhost:8000/front/
|
||||
VUE_PORT=8000 yarn serve
|
||||
|
||||
6. Make the front-end talk with an existing server (like https://demo.funkwhale.audio or https://open.audio),
|
||||
by clicking on the corresponding link in the footer
|
||||
|
||||
7. Start hacking!
|
||||
|
||||
Setup your development environment
|
||||
----------------------------------
|
||||
|
||||
If you want to fix a bug or implement a feature, you'll need
|
||||
to run a local, development copy of funkwhale.
|
||||
|
||||
We provide a docker based development environment, which should
|
||||
be both easy to setup and work similarly regardless of your
|
||||
development machine setup.
|
||||
|
||||
Instructions for bare-metal setup will come in the future (Merge requests
|
||||
are welcome).
|
||||
|
||||
Installing docker and docker-compose
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
This is already cover in the relevant documentations:
|
||||
|
||||
- https://docs.docker.com/install/
|
||||
- https://docs.docker.com/compose/install/
|
||||
|
||||
.. note::
|
||||
|
||||
If you are on Fedora, know that you can't use `podman` or `moby-engine` to set up the development environment.
|
||||
Stick to `docker-ce` and you'll be fine.
|
||||
|
||||
Cloning the project
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Visit https://dev.funkwhale.audio/funkwhale/funkwhale and clone the repository using SSH or HTTPS. Example using SSH::
|
||||
|
||||
git clone ssh://git@dev.funkwhale.audio/funkwhale/funkwhale.git
|
||||
cd funkwhale
|
||||
|
||||
.. note::
|
||||
|
||||
As of January 2020, the SSH fingerprints of our Gitlab server are the following::
|
||||
|
||||
$ ssh-keyscan dev.funkwhale.audio | ssh-keygen -lf -
|
||||
# dev.funkwhale.audio:22 SSH-2.0-OpenSSH_7.4p1 Debian-10+deb9u6
|
||||
# dev.funkwhale.audio:22 SSH-2.0-OpenSSH_7.4p1 Debian-10+deb9u6
|
||||
# dev.funkwhale.audio:22 SSH-2.0-OpenSSH_7.4p1 Debian-10+deb9u6
|
||||
2048 SHA256:WEZ546nkMhB9yV9lyDZZcEeN/IfriyhU8+mj7Cz/+sU dev.funkwhale.audio (RSA)
|
||||
256 SHA256:dEhAo+1ImjC98hSqVdnkwVleheCulV8xIsV1eKUcig0 dev.funkwhale.audio (ECDSA)
|
||||
256 SHA256:/AxZwOSP74hlNKCHzmu9Trlp9zVGTrsJOV+zet1hYyQ dev.funkwhale.audio (ED25519)
|
||||
|
||||
|
||||
A note about branches
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Next release development occurs on the "develop" branch, and releases are made on the "stable" branch. Therefore, when submitting Merge Requests, ensure you are merging on the develop branch.
|
||||
|
||||
|
||||
Working with docker
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
In development, we use the docker-compose file named ``dev.yml``, and this is why all our docker-compose commands will look like this::
|
||||
|
||||
docker-compose -f dev.yml logs
|
||||
|
||||
If you do not want to add the ``-f dev.yml`` snippet every time, you can run this command before starting your work::
|
||||
|
||||
export COMPOSE_FILE=dev.yml
|
||||
|
||||
|
||||
Creating your env file
|
||||
^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
We provide a working .env.dev configuration file that is suitable for
|
||||
development. However, to enable customization on your machine, you should
|
||||
also create a .env file that will hold your personal environment
|
||||
variables (those will not be commited to the project).
|
||||
|
||||
Create it like this::
|
||||
|
||||
touch .env
|
||||
|
||||
|
||||
Create docker network
|
||||
^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Create the federation network::
|
||||
|
||||
docker network create federation
|
||||
|
||||
|
||||
Building the containers
|
||||
^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
On your initial clone, or if there have been some changes in the
|
||||
app dependencies, you will have to rebuild your containers. This is done
|
||||
via the following command::
|
||||
|
||||
docker-compose -f dev.yml build
|
||||
|
||||
|
||||
Database management
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
To setup funkwhale's database schema, run this::
|
||||
|
||||
docker-compose -f dev.yml run --rm api python manage.py migrate
|
||||
|
||||
This will create all the tables needed for the API to run properly.
|
||||
You will also need to run this whenever changes are made on the database
|
||||
schema.
|
||||
|
||||
It is safe to run this command multiple times, so you can run it whenever
|
||||
you fetch develop.
|
||||
|
||||
|
||||
Development data
|
||||
^^^^^^^^^^^^^^^^
|
||||
|
||||
You'll need at least an admin user and some artists/tracks/albums to work
|
||||
locally.
|
||||
|
||||
Create an admin user with the following command::
|
||||
|
||||
docker-compose -f dev.yml run --rm api python manage.py createsuperuser
|
||||
|
||||
Injecting fake data is done by running the following script::
|
||||
|
||||
artists=25
|
||||
command="from funkwhale_api.music import fake_data; fake_data.create_data($artists)"
|
||||
echo $command | docker-compose -f dev.yml run --rm api python manage.py shell -i python
|
||||
|
||||
The previous command will create 25 artists with random albums, tracks
|
||||
and metadata.
|
||||
|
||||
|
||||
Launch all services
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Before the first Funkwhale launch, it is required to run this::
|
||||
|
||||
docker-compose -f dev.yml run --rm front yarn run i18n-compile
|
||||
|
||||
Then you can run everything with::
|
||||
|
||||
docker-compose -f dev.yml up front api nginx celeryworker
|
||||
|
||||
This will launch all services, and output the logs in your current terminal window.
|
||||
If you prefer to launch them in the background instead, use the ``-d`` flag, and access the logs when you need it via ``docker-compose -f dev.yml logs --tail=50 --follow``.
|
||||
|
||||
Once everything is up, you can access the various funkwhale's components:
|
||||
|
||||
- The Vue webapp, on http://localhost:8000
|
||||
- The API, on http://localhost:8000/api/v1/
|
||||
- The django admin, on http://localhost:8000/api/admin/
|
||||
|
||||
Stopping everything
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Once you're down with your work, you can stop running containers, if any, with::
|
||||
|
||||
docker-compose -f dev.yml stop
|
||||
|
||||
|
||||
Removing everything
|
||||
^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
If you want to wipe your development environment completely (e.g. if you want to start over from scratch), just run::
|
||||
|
||||
docker-compose -f dev.yml down -v
|
||||
|
||||
This will wipe your containers and data, so please be careful before running it.
|
||||
|
||||
You can keep your data by removing the ``-v`` flag.
|
||||
|
||||
|
||||
Working with federation locally
|
||||
-------------------------------
|
||||
|
||||
This is not needed unless you need to work on federation-related features.
|
||||
|
||||
To achieve that, you'll need:
|
||||
|
||||
1. to update your dns resolver to resolve all your .dev hostnames locally
|
||||
2. a reverse proxy (such as traefik) to catch those .dev requests and
|
||||
and with https certificate
|
||||
3. two instances (or more) running locally, following the regular dev setup
|
||||
|
||||
Resolve .dev names locally
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
If you use dnsmasq, this is as simple as doing::
|
||||
|
||||
echo "address=/test/172.17.0.1" | sudo tee /etc/dnsmasq.d/test.conf
|
||||
sudo systemctl restart dnsmasq
|
||||
|
||||
If you use NetworkManager with dnsmasq integration, use this instead::
|
||||
|
||||
echo "address=/test/172.17.0.1" | sudo tee /etc/NetworkManager/dnsmasq.d/test.conf
|
||||
sudo systemctl restart NetworkManager
|
||||
|
||||
Add wildcard certificate to the trusted certificates
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Simply copy bundled certificates::
|
||||
|
||||
sudo cp docker/ssl/test.crt /usr/local/share/ca-certificates/
|
||||
sudo update-ca-certificates
|
||||
|
||||
This certificate is a wildcard for ``*.funkwhale.test``
|
||||
|
||||
Run a reverse proxy for your instances
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
|
||||
Launch everything
|
||||
^^^^^^^^^^^^^^^^^
|
||||
|
||||
Launch the traefik proxy::
|
||||
|
||||
docker-compose -f docker/traefik.yml up -d
|
||||
|
||||
Then, in separate terminals, you can setup as many different instances as you
|
||||
need::
|
||||
|
||||
export COMPOSE_PROJECT_NAME=node2
|
||||
export VUE_PORT=1234 # this has to be unique for each instance
|
||||
docker-compose -f dev.yml run --rm api python manage.py migrate
|
||||
docker-compose -f dev.yml run --rm api python manage.py createsuperuser
|
||||
docker-compose -f dev.yml up nginx api front nginx api celeryworker
|
||||
|
||||
Note that by default, if you don't export the COMPOSE_PROJECT_NAME,
|
||||
we will default to node1 as the name of your instance.
|
||||
|
||||
Assuming your project name is ``node1``, your server will be reachable
|
||||
at ``https://node1.funkwhale.test/``. Not that you'll have to trust
|
||||
the SSL Certificate as it's self signed.
|
||||
|
||||
When working on federation with traefik, ensure you have this in your ``env``::
|
||||
|
||||
# This will ensure we don't bind any port on the host, and thus enable
|
||||
# multiple instances of funkwhale to be spawned concurrently.
|
||||
VUE_PORT_BINDING=
|
||||
# This disable certificate verification
|
||||
EXTERNAL_REQUESTS_VERIFY_SSL=false
|
||||
# this ensure you don't have incorrect urls pointing to http resources
|
||||
FUNKWHALE_PROTOCOL=https
|
||||
# Disable host ports binding for the nginx container, as traefik is serving everything
|
||||
NGINX_PORTS_MAPPING=80
|
||||
|
||||
Typical workflow for a contribution
|
||||
-----------------------------------
|
||||
|
||||
0. Fork the project if you did not already or if you do not have access to the main repository
|
||||
1. Checkout the development branch and pull most recent changes: ``git checkout develop && git pull``
|
||||
2. If working on an issue, assign yourself to the issue. Otherwise, consider open an issue before starting to work on something, especially for new features.
|
||||
3. Create a dedicated branch for your work ``42-awesome-fix``. It is good practice to prefix your branch name with the ID of the issue you are solving.
|
||||
4. Work on your stuff
|
||||
5. [Optional] Consider running ``yarn lint`` in ``front`` if you changed something there. Consider fixing some
|
||||
linting errors in the files you touched.
|
||||
6. Commit small, atomic changes to make it easier to review your contribution
|
||||
7. Add a changelog fragment to summarize your changes: ``echo "Implemented awesome stuff (#42)" > changes/changelog.d/42.feature``
|
||||
8. Push your branch
|
||||
9. Create your merge request
|
||||
10. Take a step back and enjoy, we're really grateful you did all of this and took the time to contribute!
|
||||
|
||||
Changelog management
|
||||
--------------------
|
||||
|
||||
To ensure we have extensive and well-structured changelog, any significant
|
||||
work such as closing an issue must include a changelog fragment. Small changes
|
||||
may include a changelog fragment as well but this is not mandatory. If you're not
|
||||
sure about what to do, do not panic, open your merge request normally and we'll
|
||||
figure everything during the review ;)
|
||||
|
||||
Changelog fragments are text files that can contain one or multiple lines
|
||||
that describe the changes occurring in a bunch of commits. Those files reside
|
||||
in ``changes/changelog.d``.
|
||||
|
||||
Content
|
||||
^^^^^^^
|
||||
|
||||
A typical fragment looks like that:
|
||||
|
||||
Fixed broken audio player on Chrome 42 for ogg files (#567)
|
||||
|
||||
If the work fixes one or more issues, the issue number should be included at the
|
||||
end of the fragment (``(#567)`` is the issue number in the previous example).
|
||||
|
||||
If your work is not related to a specific issue, use the merge request
|
||||
identifier instead, like this:
|
||||
|
||||
Fixed a typo in landing page copy (!342)
|
||||
|
||||
Naming
|
||||
^^^^^^
|
||||
|
||||
Fragment files should respect the following naming pattern: ``changes/changelog.d/<name>.<category>``.
|
||||
Name can be anything describing your work, or simply the identifier of the issue number you are fixing.
|
||||
Category can be one of:
|
||||
|
||||
- ``feature``: for new features
|
||||
- ``enhancement``: for enhancements on existing features
|
||||
- ``bugfix``: for bugfixes
|
||||
- ``doc``: for documentation
|
||||
- ``i18n``: for internationalization-related work
|
||||
- ``misc``: for anything else
|
||||
|
||||
Shortcuts
|
||||
^^^^^^^^^
|
||||
|
||||
Here is a shortcut you can use/adapt to easily create new fragments from command-line:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
issue="42"
|
||||
content="Fixed an overflowing issue on small resolutions (#$issue)"
|
||||
category="bugfix"
|
||||
echo "$content ($issue)" > changes/changelog.d/$issue.$category
|
||||
|
||||
You can of course create fragments by hand in your text editor, or from Gitlab's
|
||||
interface as well.
|
||||
|
||||
Internationalization
|
||||
--------------------
|
||||
|
||||
We're using https://github.com/Polyconseil/vue-gettext to manage i18n in the project.
|
||||
|
||||
When working on the front-end, any end-user string should be marked as a translatable string,
|
||||
with the proper context, as described below.
|
||||
|
||||
Translations in HTML
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Translations in HTML use the ``<translate>`` tag::
|
||||
|
||||
<template>
|
||||
<div>
|
||||
<h1><translate translate-context="Content/Profile/Header">User profile</translate></h1>
|
||||
<p>
|
||||
<translate
|
||||
translate-context="Content/Profile/Paragraph"
|
||||
:translate-params="{username: 'alice'}">
|
||||
You are logged in as %{ username }
|
||||
</translate>
|
||||
</p>
|
||||
<p>
|
||||
<translate
|
||||
translate-context="Content/Profile/Paragraph"
|
||||
translate-plural="You have %{ count } new messages, that's a lot!"
|
||||
:translate-n="unreadMessagesCount"
|
||||
:translate-params="{count: unreadMessagesCount}">
|
||||
You have 1 new message
|
||||
</translate>
|
||||
</p>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
Anything between the `<translate>` and `</translate>` delimiters will be considered as a translatable string.
|
||||
You can use variables in the translated string via the ``:translate-params="{var: 'value'}"`` directive, and reference them like this:
|
||||
``val value is %{ value }``.
|
||||
|
||||
For pluralization, you need to use ``translate-params`` in conjunction with ``translate-plural`` and ``translate-n``:
|
||||
|
||||
- ``translate-params`` should contain the variable you're using for pluralization (which is usually shown to the user)
|
||||
- ``translate-n`` should match the same variable
|
||||
- The ``<translate>`` delimiters contain the non-pluralized version of your string
|
||||
- The ``translate-plural`` directive contains the pluralized version of your string
|
||||
|
||||
|
||||
Translations in javascript
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Translations in javascript work by calling the ``this.$*gettext`` functions::
|
||||
|
||||
export default {
|
||||
computed: {
|
||||
strings () {
|
||||
let tracksCount = 42
|
||||
let playButton = this.$pgettext('Sidebar/Player/Button/Verb, Short', 'Play')
|
||||
let loginMessage = this.$pgettext('*/Login/Message', 'Welcome back %{ username }')
|
||||
let addedMessage = this.$npgettext('*/Player/Message', 'One track was queued', '%{ count } tracks were queued', tracksCount)
|
||||
console.log(this.$gettextInterpolate(addedMessage, {count: tracksCount}))
|
||||
console.log(this.$gettextInterpolate(loginMessage, {username: 'alice'}))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
The first argument of the ``$pgettext`` and ``$npgettext`` functions is the string context.
|
||||
|
||||
Contextualization
|
||||
^^^^^^^^^^^^^^^^^
|
||||
|
||||
Translation contexts provided via the ``translate-context`` directive and the ``$pgettext`` and ``$npgettext`` are never shown to end users
|
||||
but visible by Funkwhale translators. They help translators where and how the strings are used,
|
||||
especially with short or ambiguous strings, like ``May``, which can refer a month or a verb.
|
||||
|
||||
While we could in theory use free form context, like ``This string is inside a button, in the main page, and is a call to action``,
|
||||
Funkwhale use a hierarchical structure to write contexts and keep them short and consistents accross the app. The previous context,
|
||||
rewritten correctly would be: ``Content/Home/Button/Call to action``.
|
||||
|
||||
This hierarchical structure is made of several parts:
|
||||
|
||||
- The location part, which is required and refers to the big blocks found in Funkwhale UI where the translated string is displayed:
|
||||
- ``Content``
|
||||
- ``Footer``
|
||||
- ``Head``
|
||||
- ``Menu``
|
||||
- ``Popup``
|
||||
- ``Sidebar``
|
||||
- ``*`` for strings that are not tied to a specific location
|
||||
|
||||
- The feature part, which is required, and refers to the feature associated with the translated string:
|
||||
- ``About``
|
||||
- ``Admin``
|
||||
- ``Album``
|
||||
- ``Artist``
|
||||
- ``Embed``
|
||||
- ``Home``
|
||||
- ``Login``
|
||||
- ``Library``
|
||||
- ``Moderation``
|
||||
- ``Player``
|
||||
- ``Playlist``
|
||||
- ``Profile``
|
||||
- ``Favorites``
|
||||
- ``Notifications``
|
||||
- ``Radio``
|
||||
- ``Search``
|
||||
- ``Settings``
|
||||
- ``Signup``
|
||||
- ``Track``
|
||||
- ``Queue``
|
||||
- ``*`` for strings that are not tied to a specific feature
|
||||
|
||||
- The component part, which is required and refers to the type of element that contain the string:
|
||||
- ``Button``
|
||||
- ``Card``
|
||||
- ``Checkbox``
|
||||
- ``Dropdown``
|
||||
- ``Error message``
|
||||
- ``Form``
|
||||
- ``Header``
|
||||
- ``Help text``
|
||||
- ``Hidden text``
|
||||
- ``Icon``
|
||||
- ``Input``
|
||||
- ``Image``
|
||||
- ``Label``
|
||||
- ``Link``
|
||||
- ``List item``
|
||||
- ``Menu``
|
||||
- ``Message``
|
||||
- ``Paragraph``
|
||||
- ``Placeholder``
|
||||
- ``Tab``
|
||||
- ``Table``
|
||||
- ``Title``
|
||||
- ``Tooltip``
|
||||
- ``*`` for strings that are not tied to a specific component
|
||||
|
||||
The detail part, which is optional and refers to the contents of the string itself, such as:
|
||||
- ``Adjective``
|
||||
- ``Call to action``
|
||||
- ``Noun``
|
||||
- ``Short``
|
||||
- ``Unit``
|
||||
- ``Verb``
|
||||
|
||||
Here are a few examples of valid context hierarchies:
|
||||
|
||||
- ``Sidebar/Player/Button``
|
||||
- ``Content/Home/Button/Call to action``
|
||||
- ``Footer/*/Help text``
|
||||
- ``*/*/*/Verb, Short``
|
||||
- ``Popup/Playlist/Button``
|
||||
- ``Content/Admin/Table.Label/Short, Noun (Value is a date)``
|
||||
|
||||
It's possible to nest multiple component parts to reach a higher level of detail. The component parts are then separated by a dot:
|
||||
|
||||
- ``Sidebar/Queue/Tab.Title``
|
||||
- ``Content/*/Button.Title``
|
||||
- ``Content/*/Table.Header``
|
||||
- ``Footer/*/List item.Link``
|
||||
- ``Content/*/Form.Help text``
|
||||
|
||||
Collecting translatable strings
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
If you want to ensure your translatable strings are correctly marked for translation,
|
||||
you can try to extract them.
|
||||
|
||||
Extraction is done by calling ``yarn run i18n-extract``, which
|
||||
will pull all the strings from source files and put them in a PO files.
|
||||
|
||||
You can then inspect the PO files to ensure everything is fine (but don't commit them, it's not needed).
|
||||
|
||||
Contributing to the API
|
||||
-----------------------
|
||||
|
||||
Project structure
|
||||
^^^^^^^^^^^^^^^^^
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
tree api -L 2 -d
|
||||
api
|
||||
├── config # configuration directory (settings, urls, wsgi server)
|
||||
│ └── settings # Django settings files
|
||||
├── funkwhale_api # project directory, all funkwhale logic is here
|
||||
├── requirements # python requirements files
|
||||
└── tests # test files, matches the structure of the funkwhale_api directory
|
||||
|
||||
.. note::
|
||||
|
||||
Unless trivial, API contributions must include unittests to ensure
|
||||
your fix or feature is working as expected and won't break in the future
|
||||
|
||||
Running tests
|
||||
^^^^^^^^^^^^^
|
||||
|
||||
To run the pytest test suite, use the following command::
|
||||
|
||||
docker-compose -f dev.yml run --rm api pytest
|
||||
|
||||
This is regular pytest, so you can use any arguments/options that pytest usually accept::
|
||||
|
||||
# get some help
|
||||
docker-compose -f dev.yml run --rm api pytest -h
|
||||
# Stop on first failure
|
||||
docker-compose -f dev.yml run --rm api pytest -x
|
||||
# Run a specific test file
|
||||
docker-compose -f dev.yml run --rm api pytest tests/music/test_models.py
|
||||
|
||||
Writing tests
|
||||
^^^^^^^^^^^^^
|
||||
|
||||
Although teaching you how to write unit tests is outside of the scope of this
|
||||
document, you'll find below a collection of tips, snippets and resources
|
||||
you can use if you want to learn on that subject.
|
||||
|
||||
Useful links:
|
||||
|
||||
- `A quick introduction to unit test writing with pytest <https://semaphoreci.com/community/tutorials/testing-python-applications-with-pytest>`_
|
||||
- `A complete guide to Test-Driven Development (although not using Pytest) <https://www.obeythetestinggoat.com/>`_
|
||||
- `pytest <https://docs.pytest.org/en/latest/>`_: documentation of our testing engine and runner
|
||||
- `pytest-mock <https://pypi.org/project/pytest-mock/>`_: project page of our mocking engine
|
||||
- `factory-boy <http://factoryboy.readthedocs.io/>`_: documentation of factory-boy, which we use to easily generate fake objects and data
|
||||
|
||||
Recommendations:
|
||||
|
||||
- Test files must target a module and mimic ``funkwhale_api`` directory structure: if you're writing tests for ``funkwhale_api/myapp/views.py``, you should put thoses tests in ``tests/myapp/test_views.py``
|
||||
- Tests should be small and test one thing. If you need to test multiple things, write multiple tests.
|
||||
|
||||
We provide a lot of utils and fixtures to make the process of writing tests as
|
||||
painless as possible. You'll find some usage examples below.
|
||||
|
||||
Use factories to create arbitrary objects:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
# funkwhale_api/myapp/users.py
|
||||
|
||||
def downgrade_user(user):
|
||||
"""
|
||||
A simple function that remove superuser status from users
|
||||
and return True if user was actually downgraded
|
||||
"""
|
||||
downgraded = user.is_superuser
|
||||
user.is_superuser = False
|
||||
user.save()
|
||||
return downgraded
|
||||
|
||||
# tests/myapp/test_users.py
|
||||
from funkwhale_api.myapp import users
|
||||
|
||||
def test_downgrade_superuser(factories):
|
||||
user = factories['users.User'](is_superuser=True)
|
||||
downgraded = users.downgrade_user(user)
|
||||
|
||||
assert downgraded is True
|
||||
assert user.is_superuser is False
|
||||
|
||||
def test_downgrade_normal_user_does_nothing(factories):
|
||||
user = factories['users.User'](is_superuser=False)
|
||||
downgraded = something.downgrade_user(user)
|
||||
|
||||
assert downgraded is False
|
||||
assert user.is_superuser is False
|
||||
|
||||
.. note::
|
||||
|
||||
We offer factories for almost if not all models. Factories are located
|
||||
in a ``factories.py`` file inside each app.
|
||||
|
||||
Mocking: mocking is the process of faking some logic in our code. This is
|
||||
useful when testing components that depend on each other:
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
# funkwhale_api/myapp/notifications.py
|
||||
|
||||
def notify(email, message):
|
||||
"""
|
||||
A function that sends an e-mail to the given recipient
|
||||
with the given message
|
||||
"""
|
||||
|
||||
# our e-mail sending logic here
|
||||
# ...
|
||||
|
||||
# funkwhale_api/myapp/users.py
|
||||
from . import notifications
|
||||
|
||||
def downgrade_user(user):
|
||||
"""
|
||||
A simple function that remove superuser status from users
|
||||
and return True if user was actually downgraded
|
||||
"""
|
||||
downgraded = user.is_superuser
|
||||
user.is_superuser = False
|
||||
user.save()
|
||||
if downgraded:
|
||||
notifications.notify(user.email, 'You have been downgraded!')
|
||||
return downgraded
|
||||
|
||||
# tests/myapp/test_users.py
|
||||
def test_downgrade_superuser_sends_email(factories, mocker):
|
||||
"""
|
||||
Your downgrade logic is already tested, however, we want to ensure
|
||||
an e-mail is sent when user is downgraded, but we don't have any e-mail
|
||||
server available in our testing environment. Thus, we need to mock
|
||||
the e-mail sending process.
|
||||
"""
|
||||
mocked_notify = mocker.patch('funkwhale_api.myapp.notifications.notify')
|
||||
user = factories['users.User'](is_superuser=True)
|
||||
users.downgrade_user(user)
|
||||
|
||||
# here, we ensure our notify function was called with proper arguments
|
||||
mocked_notify.assert_called_once_with(user.email, 'You have been downgraded')
|
||||
|
||||
|
||||
def test_downgrade_not_superuser_skips_email(factories, mocker):
|
||||
mocked_notify = mocker.patch('funkwhale_api.myapp.notifications.notify')
|
||||
user = factories['users.User'](is_superuser=False)
|
||||
users.downgrade_user(user)
|
||||
|
||||
# here, we ensure no e-mail was sent
|
||||
mocked_notify.assert_not_called()
|
||||
|
||||
Views: you can find some readable views tests in file: ``api/tests/users/test_views.py``
|
||||
|
||||
.. note::
|
||||
|
||||
A complete list of available-fixtures is available by running
|
||||
``docker-compose -f dev.yml run --rm api pytest --fixtures``
|
||||
|
||||
|
||||
Contributing to the front-end
|
||||
-----------------------------
|
||||
|
||||
Styles and themes
|
||||
^^^^^^^^^^^^^^^^^
|
||||
|
||||
Our UI framework is Fomantic UI (https://fomantic-ui.com/), and Funkwhale's custom styles are written in SCSS. All the styles are configured in ``front/src/styles/_main.scss``,
|
||||
including imporing of Fomantic UI styles and components.
|
||||
|
||||
We're applying several changes on top of the Fomantic CSS files, before they are imported:
|
||||
|
||||
1. Many hardcoded color values are replaced by CSS vars: e.g ``color: orange`` is replaced by ``color: var(--vibrant-color)``. This makes theming way easier.
|
||||
2. Unused components variations and icons are stripped from the source files, in order to reduce the final size of our CSS files
|
||||
|
||||
This changes are applied automatically when running ``yarn install``, through a ``postinstall`` hook. Internally, ``front/scripts/fix-fomantic-css.py`` is called
|
||||
and handle both kind of modifications. Please refer to this script if you need to use new icons to the project, or restore some components variations that
|
||||
were stripped in order to use them.
|
||||
|
||||
Running tests
|
||||
^^^^^^^^^^^^^
|
||||
|
||||
To run the front-end test suite, use the following command::
|
||||
|
||||
docker-compose -f dev.yml run --rm front yarn test:unit
|
||||
|
||||
We also support a "watch and test" mode were we continually relaunch
|
||||
tests when changes are recorded on the file system::
|
||||
|
||||
docker-compose -f dev.yml run --rm front yarn test:unit -w
|
||||
|
||||
The latter is especially useful when you are debugging failing tests.
|
||||
|
||||
.. note::
|
||||
|
||||
The front-end test suite coverage is still pretty low
|
||||
|
||||
|
||||
Making a release
|
||||
----------------
|
||||
|
||||
To make a new 3.4 release::
|
||||
|
||||
# setup
|
||||
export NEXT_RELEASE=3.4 # replace with the next release number
|
||||
export PREVIOUS_RELEASE=3.3 # replace with the previous release number
|
||||
|
||||
# ensure you have an up-to-date repo
|
||||
git checkout develop # use stable if you're doing a hotfix release
|
||||
git pull
|
||||
|
||||
# compile changelog
|
||||
towncrier build --version $NEXT_RELEASE
|
||||
|
||||
# polish changelog
|
||||
# - update the date
|
||||
# - look for typos
|
||||
# - add list of contributors via `python3 scripts/get-contributions-stats.py $NEXT_RELEASE`
|
||||
git log $PREVIOUS_RELEASE.. --format="- %aN" --reverse | sort | uniq # Get all commit authors since last release
|
||||
nano CHANGELOG
|
||||
|
||||
# Set the `__version__` variable to $NEXT_RELEASE
|
||||
nano api/funkwhale_api/__init__.py
|
||||
|
||||
# commit
|
||||
git add .
|
||||
git commit -m "Version bump and changelog for $NEXT_RELEASE"
|
||||
|
||||
# tag
|
||||
git tag $NEXT_RELEASE
|
||||
|
||||
# publish
|
||||
git push --tags && git push
|
||||
|
||||
# if you're doing a hotfix release from stable
|
||||
git checkout develop && git merge stable && git push
|
||||
|
||||
# if you're doing a non-hotfix release, and a real release (not a real release) from develop
|
||||
git checkout stable && git merge develop && git push
|
||||
|
||||
Then, visit https://dev.funkwhale.audio/funkwhale/funkwhale/-/tags, copy-paste the changelog on the corresponding
|
||||
tag, and announce the good news ;)
|
|
@ -0,0 +1,26 @@
|
|||
# Funkwhale
|
||||
|
||||
[](https://funkwhale.audio)
|
||||
|
||||
Funkwhale is a platform for uploading, sharing, and publishing audio content across the federated web. Curate your music library, listen to podcasts, or create your own content and share it with the world.
|
||||
|
||||
## Contribute
|
||||
|
||||
Want to help make Funkwhale even better? We welcome contributions from across the community. Whether you are a designer, a translator, a technical writer, or a developer, we look forward to seeing your work!
|
||||
|
||||
You can find contribution information in our [documentation hub](https://docs.funkwhale.audio).
|
||||
|
||||
- [Developer guides](https://docs.funkwhale.audio/developers/index.html)
|
||||
- [Contributor guides](https://docs.funkwhale.audio/contributing.html)
|
||||
|
||||
## Get help
|
||||
|
||||
Got a question or need help? Head over to our [forum](https://forum.funkwhale.audio/t/support) and open up a discussion.
|
||||
|
||||
## Report a security issue
|
||||
|
||||
If you find a security issue or vulnerability, please report it on our [GitLab instance](https://dev.funkwhale.audio/funkwhale/funkwhale/-/issues). When you open your issue, select the **This issue is confidential and should only be visible to team members with at least Reporter access** option. This ensures developers can verify and patch the issue before disclosing it.
|
||||
|
||||
## Code of conduct
|
||||
|
||||
The Funkwhale collective adheres to a [code of conduct](https://funkwhale.audio/en_US/code-of-conduct) in all our community spaces. Please familiarize yourself with this code and follow it when participating in discussions in our spaces.
|
41
README.rst
41
README.rst
|
@ -1,41 +0,0 @@
|
|||
Funkwhale
|
||||
=============
|
||||
|
||||
.. image:: ./front/src/assets/logo/logo-full-500.png
|
||||
:alt: Funkwhale logo
|
||||
:target: https://funkwhale.audio
|
||||
|
||||
A self-hosted tribute to Grooveshark.com.
|
||||
|
||||
LICENSE: AGPL3
|
||||
|
||||
Getting help
|
||||
------------
|
||||
|
||||
There are several places to get help or get in touch with other members of the community: https://funkwhale.audio/community/
|
||||
|
||||
Contribute
|
||||
----------
|
||||
|
||||
Contribution guidelines as well as development installation instructions
|
||||
are outlined in `CONTRIBUTING <CONTRIBUTING.rst>`_.
|
||||
|
||||
Security issues and vulnerabilities
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
If you found a vulnerability in Funkwhale, please report it on our Gitlab instance at `https://dev.funkwhale.audio/funkwhale/funkwhale/-/issues`_, ensuring
|
||||
you have checked the ``This issue is confidential and should only be visible to team members with at least Reporter access.
|
||||
`` box.
|
||||
|
||||
This will ensure only maintainers and developpers have access to the vulnerability. Thank you for your help!
|
||||
|
||||
|
||||
Translate
|
||||
^^^^^^^^^
|
||||
|
||||
Translators willing to help can refer to `TRANSLATORS <TRANSLATORS.rst>`_ for instructions.
|
||||
|
||||
Code of Conduct
|
||||
---------------
|
||||
|
||||
`Our Code of Conduct <https://funkwhale.audio/code-of-conduct/>`_ applies to all the community spaces, including our GitLab instance. Please, take a moment to read it.
|
|
@ -1,63 +0,0 @@
|
|||
Translating Funkwhale
|
||||
=====================
|
||||
|
||||
A step-by-step, beginner friendly guide is available at https://contribute.funkwhale.audio/guides/translate/
|
||||
|
||||
Thank you for reading this! If you want to help translate Funkwhale,
|
||||
you found the proper place :)
|
||||
|
||||
Translation is done via our own Weblate instance at https://translate.funkwhale.audio/projects/funkwhale/front/.
|
||||
|
||||
You can signup/login using your Gitlab account (from https://dev.funkwhale.audio).
|
||||
|
||||
Translation workflow
|
||||
--------------------
|
||||
|
||||
Once you're logged-in on the Weblate instance, you can suggest translations. Your suggestions will then be reviewer
|
||||
by the project maintainer or other translators to ensure consistency.
|
||||
|
||||
Guidelines
|
||||
----------
|
||||
|
||||
Respecting those guidelines is mandatory if you want your translation to be included:
|
||||
|
||||
- Use gender-neutral language and wording
|
||||
|
||||
Submitting a new language
|
||||
-------------------------
|
||||
|
||||
1. Pull the latest version of ``develop``
|
||||
2. Create a new branch, e.g ``git checkout -b translations-new-fr-ca``
|
||||
3. Add your new language code and name in ``front/src/locales.js``. Use the native language name, as it is what appears in the UI selector.
|
||||
4. Create the ``po`` file from template:
|
||||
|
||||
.. code-block:: shell
|
||||
|
||||
export LOCALE=fr_CA # replace with your actual locale code
|
||||
mkdir -p front/locales/$LOCALE/LC_MESSAGES
|
||||
msginit --no-wrap --no-translator --locale=$LOCALE --input=front/locales/app.pot --output-file=front/locales/$LOCALE/LC_MESSAGES/app.po
|
||||
|
||||
5. Then commit your changes, push, and submit a pull request on the ``develop`` branch
|
||||
|
||||
Requesting a new language
|
||||
-------------------------
|
||||
|
||||
If you cannot submit a new language yourself, you can request it by opening an issue here:
|
||||
https://dev.funkwhale.audio/funkwhale/funkwhale/issues
|
||||
|
||||
Extracting messages from source
|
||||
-------------------------------
|
||||
|
||||
We offer a script to update existing ``po`` and ``pot`` files with new translations
|
||||
from the source code. This action should be run regularly, and in particular before
|
||||
lots of translation work is expected (e.g a few weeks before a new release), or when
|
||||
the UI code changes a lot. Otherwise, translators end up translating some obsolete messages,
|
||||
or not translationg new messages.
|
||||
|
||||
1. `Lock the translations on weblate <https://translate.funkwhale.audio/projects/funkwhale/front/#repository>`_ (``Lock`` button in the sidebar). This will prevent translators from working, and help prevent potential conflicts in the source code
|
||||
2. `Commit and push changes from weblate <https://translate.funkwhale.audio/projects/funkwhale/front/#repository>`_ (``Commit`` and ``Push`` buttons in the sidebar)
|
||||
3. Pull ``develop`` in your local git repository to ensure you have the latest version of the translations
|
||||
4. Create a dedicated branch with ``git checkout -b translations-integration``
|
||||
5. Extract the translations with ``cd front && ./scripts/i18n-extract.sh``. This will update all ``po`` files as necessary
|
||||
6. Review, commit and push the changes, then open a merge request on the ``develop`` branch
|
||||
7. When the MR is merged, `Unlock the translations on weblate <https://translate.funkwhale.audio/projects/funkwhale/front/#repository>`_ (``Unlock`` button in the sidebar).
|
|
@ -1,5 +0,0 @@
|
|||
[run]
|
||||
include = funkwhale_api/*
|
||||
omit = *migrations*, *tests*
|
||||
plugins =
|
||||
django_coverage_plugin
|
|
@ -1,69 +1,12 @@
|
|||
### OSX ###
|
||||
.DS_Store
|
||||
.AppleDouble
|
||||
.LSOverride
|
||||
# Exclude everything and allow only the necessary files
|
||||
*
|
||||
!/docker/
|
||||
!/config/
|
||||
!/funkwhale_api/
|
||||
!/manage.py
|
||||
!/poetry.lock
|
||||
!/pyproject.toml
|
||||
|
||||
### SublimeText ###
|
||||
# cache files for sublime text
|
||||
*.tmlanguage.cache
|
||||
*.tmPreferences.cache
|
||||
*.stTheme.cache
|
||||
|
||||
# workspace files are user-specific
|
||||
*.sublime-workspace
|
||||
|
||||
# project files should be checked into the repository, unless a significant
|
||||
# proportion of contributors will probably not be using SublimeText
|
||||
# *.sublime-project
|
||||
|
||||
# sftp configuration file
|
||||
sftp-config.json
|
||||
|
||||
# Basics
|
||||
# Python
|
||||
*.py[cod]
|
||||
__pycache__
|
||||
|
||||
# Logs
|
||||
*.log
|
||||
pip-log.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
.coverage
|
||||
.tox
|
||||
nosetests.xml
|
||||
htmlcov
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Pycharm
|
||||
.idea
|
||||
|
||||
# Vim
|
||||
|
||||
*~
|
||||
*.swp
|
||||
*.swo
|
||||
|
||||
# npm
|
||||
front/node_modules/
|
||||
|
||||
# Compass
|
||||
.sass-cache
|
||||
|
||||
# virtual environments
|
||||
.env
|
||||
|
||||
# User-uploaded media
|
||||
funkwhale_api/media/
|
||||
|
||||
# Hitch directory
|
||||
tests/.hitch
|
||||
|
||||
# MailHog binary
|
||||
mailhog
|
||||
|
||||
*.sqlite3
|
||||
music
|
||||
media
|
||||
|
|
|
@ -1,11 +0,0 @@
|
|||
[MASTER]
|
||||
load-plugins=pylint_common, pylint_django, pylint_celery
|
||||
|
||||
[FORMAT]
|
||||
max-line-length=120
|
||||
|
||||
[MESSAGES CONTROL]
|
||||
disable=missing-docstring,invalid-name
|
||||
|
||||
[DESIGN]
|
||||
max-parents=13
|
152
api/Dockerfile
152
api/Dockerfile
|
@ -1,74 +1,114 @@
|
|||
FROM alpine:3.13 as builder
|
||||
FROM alpine:3.17 as pre-build
|
||||
|
||||
RUN \
|
||||
echo 'installing dependencies' && \
|
||||
apk add --no-cache \
|
||||
git \
|
||||
musl-dev \
|
||||
gcc \
|
||||
postgresql-dev \
|
||||
python3-dev \
|
||||
py3-psycopg2 \
|
||||
py3-cryptography \
|
||||
libldap \
|
||||
libffi-dev \
|
||||
make \
|
||||
zlib-dev \
|
||||
jpeg-dev \
|
||||
openldap-dev \
|
||||
openssl-dev \
|
||||
cargo \
|
||||
libxml2-dev \
|
||||
libxslt-dev \
|
||||
&& \
|
||||
ln -s /usr/bin/python3 /usr/bin/python
|
||||
# We need this additional step to avoid having poetrys deps interacting with our
|
||||
# dependencies. This is only required until alpine 3.16 is released, since this
|
||||
# allows us to install poetry as package.
|
||||
|
||||
RUN apk add --no-cache python3 py3-cryptography py3-pip poetry
|
||||
COPY pyproject.toml poetry.lock /
|
||||
RUN poetry export --without-hashes > requirements.txt
|
||||
RUN poetry export --with dev --without-hashes > dev-requirements.txt
|
||||
|
||||
|
||||
FROM alpine:3.17 as builder
|
||||
|
||||
ENV PYTHONDONTWRITEBYTECODE=1
|
||||
ENV PYTHONUNBUFFERED=1
|
||||
ARG PIP_NO_CACHE_DIR=1
|
||||
|
||||
RUN set -eux; \
|
||||
apk add --no-cache \
|
||||
cargo \
|
||||
curl \
|
||||
gcc \
|
||||
git \
|
||||
jpeg-dev \
|
||||
libffi-dev \
|
||||
libldap \
|
||||
libxml2-dev \
|
||||
libxslt-dev \
|
||||
make \
|
||||
musl-dev \
|
||||
openldap-dev \
|
||||
openssl-dev \
|
||||
postgresql-dev \
|
||||
zlib-dev \
|
||||
py3-cryptography=38.0.3-r1 \
|
||||
py3-lxml=4.9.2-r0 \
|
||||
py3-pillow=9.3.0-r0 \
|
||||
py3-psycopg2=2.9.5-r0 \
|
||||
py3-watchfiles=0.18.1-r0 \
|
||||
python3-dev
|
||||
|
||||
# create virtual env for next stage
|
||||
RUN python -m venv --system-site-packages /venv
|
||||
RUN python3 -m venv --system-site-packages /venv
|
||||
# emulate activation by prefixing PATH
|
||||
ENV PATH="/venv/bin:$PATH" VIRTUAL_ENV=/venv
|
||||
ENV PATH="/venv/bin:/root/.local/bin:$PATH" VIRTUAL_ENV=/venv
|
||||
|
||||
RUN mkdir /requirements
|
||||
COPY ./requirements/base.txt /requirements/base.txt
|
||||
# hack around https://github.com/pypa/pip/issues/6158#issuecomment-456619072
|
||||
ENV PIP_DOWNLOAD_CACHE=/noop/
|
||||
RUN \
|
||||
echo 'installing pip requirements' && \
|
||||
pip3 install --upgrade pip && \
|
||||
pip3 install setuptools wheel && \
|
||||
pip3 install -r /requirements/base.txt cryptography==3.3.2 && \
|
||||
rm -rf $PIP_DOWNLOAD_CACHE
|
||||
COPY --from=pre-build /requirements.txt /requirements.txt
|
||||
COPY --from=pre-build /dev-requirements.txt /dev-requirements.txt
|
||||
|
||||
RUN set -eux; \
|
||||
pip3 install --upgrade pip; \
|
||||
pip3 install setuptools wheel; \
|
||||
# Currently we are unable to relieably build rust-based packages on armv7. This
|
||||
# is why we need to use the packages shipped by Alpine Linux.
|
||||
# Since poetry does not allow in-place dependency pinning, we need
|
||||
# to install the deps using pip.
|
||||
grep -Ev 'cryptography|lxml|pillow|psycopg2|watchfiles' /requirements.txt \
|
||||
| pip3 install -r /dev/stdin \
|
||||
cryptography==38.0.3 \
|
||||
lxml==4.9.2 \
|
||||
pillow==9.3.0 \
|
||||
psycopg2==2.9.5 \
|
||||
watchfiles==0.18.1
|
||||
|
||||
ARG install_dev_deps=0
|
||||
COPY ./requirements/*.txt /requirements/
|
||||
RUN \
|
||||
if [ "$install_dev_deps" = "1" ] ; then echo "Installing dev dependencies" && pip3 install --no-cache-dir -r /requirements/local.txt -r /requirements/test.txt ; else echo "Skipping dev deps installation" ; fi
|
||||
RUN set -eux; \
|
||||
if [ "$install_dev_deps" = "1" ] ; then \
|
||||
grep -Ev 'cryptography|lxml|pillow|psycopg2|watchfiles' /dev-requirements.txt \
|
||||
| pip3 install -r /dev/stdin \
|
||||
cryptography==38.0.3 \
|
||||
lxml==4.9.2 \
|
||||
pillow==9.3.0 \
|
||||
psycopg2==2.9.5 \
|
||||
watchfiles==0.18.1; \
|
||||
fi
|
||||
|
||||
FROM alpine:3.17 as image
|
||||
|
||||
FROM alpine:3.13 as build-image
|
||||
ENV PYTHONDONTWRITEBYTECODE=1
|
||||
ENV PYTHONUNBUFFERED=1
|
||||
ARG PIP_NO_CACHE_DIR=1
|
||||
|
||||
RUN set -eux; \
|
||||
apk add --no-cache \
|
||||
bash \
|
||||
ffmpeg \
|
||||
gettext \
|
||||
jpeg-dev \
|
||||
libldap \
|
||||
libmagic \
|
||||
libpq \
|
||||
libxml2 \
|
||||
libxslt \
|
||||
py3-cryptography=38.0.3-r1 \
|
||||
py3-lxml=4.9.2-r0 \
|
||||
py3-pillow=9.3.0-r0 \
|
||||
py3-psycopg2=2.9.5-r0 \
|
||||
py3-watchfiles=0.18.1-r0 \
|
||||
python3
|
||||
|
||||
COPY --from=builder /venv /venv
|
||||
# emulate activation by prefixing PATH
|
||||
ENV PATH="/venv/bin:$PATH"
|
||||
|
||||
RUN apk add --no-cache \
|
||||
libmagic \
|
||||
bash \
|
||||
gettext \
|
||||
python3 \
|
||||
jpeg-dev \
|
||||
ffmpeg \
|
||||
libpq \
|
||||
libxml2 \
|
||||
libxslt \
|
||||
py3-cryptography \
|
||||
&& \
|
||||
ln -s /usr/bin/python3 /usr/bin/python
|
||||
|
||||
COPY . /app
|
||||
WORKDIR /app
|
||||
|
||||
RUN find . -type d -exec chmod 755 {} \+
|
||||
RUN set -eux; \
|
||||
pip3 install --no-deps --editable .
|
||||
|
||||
ENTRYPOINT ["./compose/django/entrypoint.sh"]
|
||||
CMD ["./compose/django/server.sh"]
|
||||
ENV IS_DOCKER_SETUP=true
|
||||
|
||||
CMD ["./docker/server.sh"]
|
||||
|
|
|
@ -0,0 +1,39 @@
|
|||
# Funkwhale API
|
||||
|
||||
This is the Funkwhale API. Check out our [API explorer](https://docs.funkwhale.audio/swagger/) for interactive documentation.
|
||||
|
||||
## OAuth Authentication
|
||||
|
||||
Funkwhale uses the OAuth [authorization grant flow](https://tools.ietf.org/html/rfc6749#section-4.1) for external apps. This flow is a secure way to authenticate apps that requires a user's explicit consent to perform actions. You can use our demo server at <https://demo.funkwhale.audio> for testing purposes.
|
||||
|
||||
To authenticate with the Funkwhale API:
|
||||
|
||||
1. Create an application by sending a `POST` request to `api/v1/oauth/apps`. Include your scopes and redirect URI (use `urn:ietf:wg:oauth:2.0:oob`
|
||||
to get an authorization code you can copy)
|
||||
2. Send an [authorization request](https://www.rfc-editor.org/rfc/rfc6749#section-4.1.2) to the `/authorize` endpoint to receive an authorization code
|
||||
3. [Request an access token](https://www.rfc-editor.org/rfc/rfc6749#section-4.1.3) from `/api/v1/oauth/token`
|
||||
4. Use your access token to authenticate your calls with the following format: `Authorization: Bearer <token>`
|
||||
5. Refresh your access token by sending a refresh request to `/api/v1/oauth/token`
|
||||
|
||||
For more detailed instructions, see [our API authentication documentation](https://docs.funkwhale.audio/developers/authentication.html).
|
||||
|
||||
## Application token authentication
|
||||
|
||||
If you have an account on your target pod, you can create an application at `/settings/applications/new`. Once you authorize the application you can retrieve an access token. Use your access token to authenticate your calls with the following format: `Authorization: Bearer <token>`
|
||||
|
||||
## Rate limiting
|
||||
|
||||
Funkwhale supports rate-limiting as of version 0.2.0. Pod admins can choose to rate limit specific endpoints to prevent abuse and improve the stability of the service. If the server drops a request due to rate-limiting, it returns a `429` status code.
|
||||
|
||||
Each API call returns HTTP headers to pass the following information:
|
||||
|
||||
- What was the scope of the request (`X-RateLimit-Scope`)
|
||||
- What is the rate-limit associated with the request scope (`X-RateLimit-Limit`)
|
||||
- How many more requests in the scope can be made within the rate-limit timeframe (`X-RateLimit-Remaining`)
|
||||
- How much time does the client need to wait to send another request (`Retry-After`)
|
||||
|
||||
For more information, check our [rate limit documentation](https://docs.funkwhale.audio/admin/configuration.html#api-configuration)
|
||||
|
||||
## Resources
|
||||
|
||||
For more information about API usage, refer to [our API documentation](https://docs.funkwhale.audio/api.html).
|
|
@ -1,3 +0,0 @@
|
|||
#!/bin/sh
|
||||
set -e
|
||||
exec "$@"
|
|
@ -1,26 +0,0 @@
|
|||
#!/bin/sh
|
||||
set -e
|
||||
# This entrypoint is used to play nicely with the current cookiecutter configuration.
|
||||
# Since docker-compose relies heavily on environment variables itself for configuration, we'd have to define multiple
|
||||
# environment variables just to support cookiecutter out of the box. That makes no sense, so this little entrypoint
|
||||
# does all this for us.
|
||||
export CACHE_URL=${CACHE_URL:="redis://redis:6379/0"}
|
||||
|
||||
if [ -z "$DATABASE_URL" ]; then
|
||||
# the official postgres image uses 'postgres' as default user if not set explictly.
|
||||
if [ -z "$POSTGRES_ENV_POSTGRES_USER" ]; then
|
||||
export POSTGRES_ENV_POSTGRES_USER=postgres
|
||||
fi
|
||||
export DATABASE_URL=postgres://$POSTGRES_ENV_POSTGRES_USER:$POSTGRES_ENV_POSTGRES_PASSWORD@postgres:5432/$POSTGRES_ENV_POSTGRES_USER
|
||||
fi
|
||||
|
||||
if [ -z "$CELERY_BROKER_URL" ]; then
|
||||
export CELERY_BROKER_URL=$CACHE_URL
|
||||
fi
|
||||
|
||||
# we copy the frontend files, if any so we can serve them from the outside
|
||||
if [ -d "frontend" ] && [ -d "/frontend" ]; then
|
||||
cp -r frontend/* /frontend/
|
||||
export FUNKWHALE_SPA_HTML_ROOT=/frontend/index.html
|
||||
fi
|
||||
exec "$@"
|
|
@ -1,3 +0,0 @@
|
|||
#!/bin/bash -eux
|
||||
python /app/manage.py collectstatic --noinput
|
||||
gunicorn config.asgi:application -w ${FUNKWHALE_WEB_WORKERS-1} -k uvicorn.workers.UvicornWorker -b 0.0.0.0:5000 ${GUNICORN_ARGS-}
|
|
@ -0,0 +1,3 @@
|
|||
# loads what is required to generate the swagger docs
|
||||
# https://matrix.to/#/!nNBDNverFlbfNpReEO:matrix.org/$16579878472182UmZUv:tchncs.de?via=tchncs.de&via=matrix.org&via=juniorjpdj.pl
|
||||
import config.schema # noqa: F401
|
|
@ -4,8 +4,8 @@ from rest_framework.urlpatterns import format_suffix_patterns
|
|||
|
||||
from funkwhale_api.activity import views as activity_views
|
||||
from funkwhale_api.audio import views as audio_views
|
||||
from funkwhale_api.common import views as common_views
|
||||
from funkwhale_api.common import routers as common_routers
|
||||
from funkwhale_api.common import views as common_views
|
||||
from funkwhale_api.music import views
|
||||
from funkwhale_api.playlists import views as playlists_views
|
||||
from funkwhale_api.subsonic.views import SubsonicViewSet
|
||||
|
@ -73,7 +73,10 @@ v1_patterns += [
|
|||
r"^history/",
|
||||
include(("funkwhale_api.history.urls", "history"), namespace="history"),
|
||||
),
|
||||
url(r"^", include(("funkwhale_api.users.api_urls", "users"), namespace="users"),),
|
||||
url(
|
||||
r"^",
|
||||
include(("funkwhale_api.users.api_urls", "users"), namespace="users"),
|
||||
),
|
||||
# XXX: remove if Funkwhale 1.1
|
||||
url(
|
||||
r"^users/",
|
||||
|
|
|
@ -7,7 +7,6 @@ import sys
|
|||
import persisting_theory
|
||||
from django.core.cache import cache
|
||||
from django.db.models import Q
|
||||
|
||||
from rest_framework import serializers
|
||||
|
||||
logger = logging.getLogger("plugins")
|
||||
|
@ -29,7 +28,7 @@ _filters = {}
|
|||
_hooks = {}
|
||||
|
||||
|
||||
class PluginCache(object):
|
||||
class PluginCache:
|
||||
def __init__(self, prefix):
|
||||
self.prefix = prefix
|
||||
|
||||
|
@ -82,7 +81,7 @@ def load_settings(name, settings):
|
|||
"text": django_settings.ENV,
|
||||
}
|
||||
values = {}
|
||||
prefix = "FUNKWHALE_PLUGIN_{}".format(name.upper())
|
||||
prefix = f"FUNKWHALE_PLUGIN_{name.upper()}"
|
||||
for s in settings:
|
||||
key = "_".join([prefix, s["name"].upper()])
|
||||
value = mapping[s["type"]](key, default=s.get("default", None))
|
||||
|
@ -180,7 +179,9 @@ def set_conf(name, conf, user=None, registry=_plugins):
|
|||
if not registry[name]["conf"] and not registry[name]["source"]:
|
||||
return
|
||||
conf_serializer = get_serializer_from_conf_template(
|
||||
registry[name]["conf"], user=user, source=registry[name]["source"],
|
||||
registry[name]["conf"],
|
||||
user=user,
|
||||
source=registry[name]["source"],
|
||||
)(data=conf)
|
||||
conf_serializer.is_valid(raise_exception=True)
|
||||
if "library" in conf_serializer.validated_data:
|
||||
|
@ -261,7 +262,7 @@ def get_serializer_from_conf_template(conf, source=False, user=None):
|
|||
self.fields["library"] = LibraryField(actor=user.actor)
|
||||
|
||||
for vname, v in validators.items():
|
||||
setattr(Serializer, "validate_{}".format(vname), v)
|
||||
setattr(Serializer, f"validate_{vname}", v)
|
||||
return Serializer
|
||||
|
||||
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
from channels.auth import AuthMiddlewareStack
|
||||
from channels.routing import ProtocolTypeRouter, URLRouter
|
||||
|
||||
from django.conf.urls import url
|
||||
from django.core.asgi import get_asgi_application
|
||||
|
||||
from funkwhale_api.instance import consumers
|
||||
|
||||
application = ProtocolTypeRouter(
|
||||
|
@ -11,6 +12,7 @@ application = ProtocolTypeRouter(
|
|||
URLRouter(
|
||||
[url("^api/v1/activity$", consumers.InstanceActivityConsumer.as_asgi())]
|
||||
)
|
||||
)
|
||||
),
|
||||
"http": get_asgi_application(),
|
||||
}
|
||||
)
|
||||
|
|
|
@ -0,0 +1,62 @@
|
|||
import os
|
||||
|
||||
from drf_spectacular.contrib.django_oauth_toolkit import OpenApiAuthenticationExtension
|
||||
from drf_spectacular.plumbing import build_bearer_security_scheme_object
|
||||
|
||||
|
||||
class CustomOAuthExt(OpenApiAuthenticationExtension):
|
||||
target_class = "funkwhale_api.common.authentication.OAuth2Authentication"
|
||||
name = "oauth2"
|
||||
|
||||
def get_security_definition(self, auto_schema):
|
||||
from drf_spectacular.settings import spectacular_settings
|
||||
from oauth2_provider.scopes import get_scopes_backend
|
||||
|
||||
flows = {}
|
||||
for flow_type in spectacular_settings.OAUTH2_FLOWS:
|
||||
flows[flow_type] = {}
|
||||
if flow_type in ("implicit", "authorizationCode"):
|
||||
flows[flow_type][
|
||||
"authorizationUrl"
|
||||
] = spectacular_settings.OAUTH2_AUTHORIZATION_URL
|
||||
if flow_type in ("password", "clientCredentials", "authorizationCode"):
|
||||
flows[flow_type]["tokenUrl"] = spectacular_settings.OAUTH2_TOKEN_URL
|
||||
if spectacular_settings.OAUTH2_REFRESH_URL:
|
||||
flows[flow_type]["refreshUrl"] = spectacular_settings.OAUTH2_REFRESH_URL
|
||||
scope_backend = get_scopes_backend()
|
||||
flows[flow_type]["scopes"] = scope_backend.get_all_scopes()
|
||||
|
||||
return {"type": "oauth2", "flows": flows}
|
||||
|
||||
|
||||
class CustomApplicationTokenExt(OpenApiAuthenticationExtension):
|
||||
target_class = "funkwhale_api.common.authentication.ApplicationTokenAuthentication"
|
||||
name = "ApplicationToken"
|
||||
|
||||
def get_security_definition(self, auto_schema):
|
||||
return build_bearer_security_scheme_object(
|
||||
header_name="Authorization",
|
||||
token_prefix="Bearer",
|
||||
)
|
||||
|
||||
|
||||
def custom_preprocessing_hook(endpoints):
|
||||
filtered = []
|
||||
|
||||
# your modifications to the list of operations that are exposed in the schema
|
||||
api_type = os.environ.get("API_TYPE", "v1")
|
||||
|
||||
for (path, path_regex, method, callback) in endpoints:
|
||||
if path.startswith("/api/v1/providers"):
|
||||
continue
|
||||
|
||||
if path.startswith("/api/v1/users/users"):
|
||||
continue
|
||||
|
||||
if path.startswith("/api/v1/oauth/authorize"):
|
||||
continue
|
||||
|
||||
if path.startswith(f"/api/{api_type}"):
|
||||
filtered.append((path, path_regex, method, callback))
|
||||
|
||||
return filtered
|
|
@ -1 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
|
@ -1,10 +1,7 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
from collections import OrderedDict
|
||||
import logging.config
|
||||
import sys
|
||||
|
||||
import warnings
|
||||
from collections import OrderedDict
|
||||
from urllib.parse import urlsplit
|
||||
|
||||
import environ
|
||||
|
@ -17,12 +14,14 @@ APPS_DIR = ROOT_DIR.path("funkwhale_api")
|
|||
env = environ.Env()
|
||||
ENV = env
|
||||
LOGLEVEL = env("LOGLEVEL", default="info").upper()
|
||||
IS_DOCKER_SETUP = env.bool("IS_DOCKER_SETUP", False)
|
||||
|
||||
|
||||
if env("FUNKWHALE_SENTRY_DSN", default=None) is not None:
|
||||
import sentry_sdk
|
||||
from sentry_sdk.integrations.django import DjangoIntegration
|
||||
from sentry_sdk.integrations.celery import CeleryIntegration
|
||||
from sentry_sdk.integrations.django import DjangoIntegration
|
||||
|
||||
from funkwhale_api import __version__ as version
|
||||
|
||||
sentry_sdk.init(
|
||||
|
@ -37,7 +36,17 @@ if env("FUNKWHALE_SENTRY_DSN", default=None) is not None:
|
|||
sentry_sdk.set_tag("instance", env("FUNKWHALE_HOSTNAME"))
|
||||
|
||||
"""
|
||||
Default logging level for the Funkwhale processes""" # pylint: disable=W0105
|
||||
Default logging level for the Funkwhale processes
|
||||
|
||||
Available levels:
|
||||
|
||||
- ``debug``
|
||||
- ``info``
|
||||
- ``warning``
|
||||
- ``error``
|
||||
- ``critical``
|
||||
|
||||
""" # pylint: disable=W0105
|
||||
|
||||
LOGGING_CONFIG = None
|
||||
logging.config.dictConfig(
|
||||
|
@ -74,7 +83,7 @@ Path to a .env file to load
|
|||
"""
|
||||
if env_file:
|
||||
logger.info("Loading specified env file at %s", env_file)
|
||||
# we have an explicitely specified env file
|
||||
# we have an explicitly specified env file
|
||||
# so we try to load and it fail loudly if it does not exist
|
||||
env.read_env(env_file)
|
||||
else:
|
||||
|
@ -101,7 +110,7 @@ FUNKWHALE_PLUGINS_PATH = env(
|
|||
)
|
||||
"""
|
||||
Path to a directory containing Funkwhale plugins.
|
||||
These will be imported at runtime.
|
||||
These are imported at runtime.
|
||||
"""
|
||||
sys.path.append(FUNKWHALE_PLUGINS_PATH)
|
||||
CORE_PLUGINS = [
|
||||
|
@ -144,7 +153,7 @@ else:
|
|||
try:
|
||||
FUNKWHALE_HOSTNAME = env("FUNKWHALE_HOSTNAME")
|
||||
"""
|
||||
Hostname of your Funkwhale pod, e.g. ``mypod.audio``
|
||||
Hostname of your Funkwhale pod, e.g. ``mypod.audio``.
|
||||
"""
|
||||
|
||||
FUNKWHALE_PROTOCOL = env("FUNKWHALE_PROTOCOL", default="https")
|
||||
|
@ -160,10 +169,8 @@ else:
|
|||
|
||||
FUNKWHALE_PROTOCOL = FUNKWHALE_PROTOCOL.lower()
|
||||
FUNKWHALE_HOSTNAME = FUNKWHALE_HOSTNAME.lower()
|
||||
FUNKWHALE_URL = "{}://{}".format(FUNKWHALE_PROTOCOL, FUNKWHALE_HOSTNAME)
|
||||
FUNKWHALE_SPA_HTML_ROOT = env(
|
||||
"FUNKWHALE_SPA_HTML_ROOT", default=FUNKWHALE_URL + "/front/"
|
||||
)
|
||||
FUNKWHALE_URL = f"{FUNKWHALE_PROTOCOL}://{FUNKWHALE_HOSTNAME}"
|
||||
FUNKWHALE_SPA_HTML_ROOT = env("FUNKWHALE_SPA_HTML_ROOT", default=FUNKWHALE_URL)
|
||||
"""
|
||||
URL or path to the Web Application files.
|
||||
Funkwhale needs access to it so that it can inject <meta> tags relevant
|
||||
|
@ -214,6 +221,7 @@ List of allowed hostnames for which the Funkwhale server will answer.
|
|||
# ------------------------------------------------------------------------------
|
||||
DJANGO_APPS = (
|
||||
"channels",
|
||||
"daphne",
|
||||
# Default Django apps:
|
||||
"django.contrib.auth",
|
||||
"django.contrib.contenttypes",
|
||||
|
@ -236,8 +244,8 @@ THIRD_PARTY_APPS = (
|
|||
"oauth2_provider",
|
||||
"rest_framework",
|
||||
"rest_framework.authtoken",
|
||||
"rest_auth",
|
||||
"rest_auth.registration",
|
||||
"dj_rest_auth",
|
||||
"dj_rest_auth.registration",
|
||||
"dynamic_preferences",
|
||||
"django_filters",
|
||||
"django_cleanup",
|
||||
|
@ -325,32 +333,31 @@ FIXTURE_DIRS = (str(APPS_DIR.path("fixtures")),)
|
|||
# EMAIL
|
||||
# ------------------------------------------------------------------------------
|
||||
DEFAULT_FROM_EMAIL = env(
|
||||
"DEFAULT_FROM_EMAIL", default="Funkwhale <noreply@{}>".format(FUNKWHALE_HOSTNAME)
|
||||
"DEFAULT_FROM_EMAIL", default=f"Funkwhale <noreply@{FUNKWHALE_HOSTNAME}>"
|
||||
)
|
||||
"""
|
||||
Name and e-mail address used to send system e-mails.
|
||||
The name and email address used to send system emails.
|
||||
Defaults to ``Funkwhale <noreply@yourdomain>``.
|
||||
|
||||
Default: ``Funkwhale <noreply@yourdomain>``
|
||||
Available formats:
|
||||
|
||||
.. note::
|
||||
|
||||
Both the forms ``Funkwhale <noreply@yourdomain>`` and
|
||||
``noreply@yourdomain`` work.
|
||||
- ``Name <email address>``
|
||||
- ``<Email address>``
|
||||
|
||||
"""
|
||||
EMAIL_SUBJECT_PREFIX = env("EMAIL_SUBJECT_PREFIX", default="[Funkwhale] ")
|
||||
"""
|
||||
Subject prefix for system e-mails.
|
||||
Subject prefix for system emails.
|
||||
"""
|
||||
SERVER_EMAIL = env("SERVER_EMAIL", default=DEFAULT_FROM_EMAIL)
|
||||
|
||||
|
||||
EMAIL_CONFIG = env.email_url("EMAIL_CONFIG", default="consolemail://")
|
||||
"""
|
||||
SMTP configuration for sending e-mails. Possible values:
|
||||
SMTP configuration for sending emails. Possible values:
|
||||
|
||||
- ``EMAIL_CONFIG=consolemail://``: output e-mails to console (the default)
|
||||
- ``EMAIL_CONFIG=dummymail://``: disable e-mail sending completely
|
||||
- ``EMAIL_CONFIG=consolemail://``: output emails to console (the default)
|
||||
- ``EMAIL_CONFIG=dummymail://``: disable email sending completely
|
||||
|
||||
On a production instance, you'll usually want to use an external SMTP server:
|
||||
|
||||
|
@ -358,29 +365,79 @@ On a production instance, you'll usually want to use an external SMTP server:
|
|||
- ``EMAIL_CONFIG=smtp+ssl://user:password@youremail.host:465``
|
||||
- ``EMAIL_CONFIG=smtp+tls://user:password@youremail.host:587``
|
||||
|
||||
.. note::
|
||||
|
||||
If ``user`` or ``password`` contain special characters (eg.
|
||||
``noreply@youremail.host`` as ``user``), be sure to urlencode them, using
|
||||
for example the command:
|
||||
``python3 -c 'import urllib.parse; print(urllib.parse.quote_plus
|
||||
("noreply@youremail.host"))'``
|
||||
(returns ``noreply%40youremail.host``)
|
||||
|
||||
"""
|
||||
vars().update(EMAIL_CONFIG)
|
||||
|
||||
# DATABASE CONFIGURATION
|
||||
# ------------------------------------------------------------------------------
|
||||
# See: https://docs.djangoproject.com/en/dev/ref/settings/#databases
|
||||
DATABASE_URL = env.db("DATABASE_URL")
|
||||
"""
|
||||
URL to connect to the PostgreSQL database. Examples:
|
||||
|
||||
# The `_database_url_docker` variable will only by used as default for DATABASE_URL
|
||||
# in the context of a docker deployment.
|
||||
_database_url_docker = None
|
||||
if IS_DOCKER_SETUP and env.str("DATABASE_URL", None) is None:
|
||||
warnings.warn(
|
||||
DeprecationWarning(
|
||||
"the automatically generated 'DATABASE_URL' configuration in the docker "
|
||||
"setup is deprecated, please configure either the 'DATABASE_URL' "
|
||||
"environment variable or the 'DATABASE_HOST', 'DATABASE_USER' and "
|
||||
"'DATABASE_PASSWORD' environment variables instead"
|
||||
)
|
||||
)
|
||||
_DOCKER_DATABASE_HOST = "postgres"
|
||||
_DOCKER_DATABASE_PORT = 5432
|
||||
_DOCKER_DATABASE_USER = env.str("POSTGRES_ENV_POSTGRES_USER", "postgres")
|
||||
_DOCKER_DATABASE_PASSWORD = env.str("POSTGRES_ENV_POSTGRES_PASSWORD", "")
|
||||
_DOCKER_DATABASE_NAME = _DOCKER_DATABASE_USER
|
||||
|
||||
_database_url_docker = (
|
||||
f"postgres:"
|
||||
f"//{_DOCKER_DATABASE_USER}:{_DOCKER_DATABASE_PASSWORD}"
|
||||
f"@{_DOCKER_DATABASE_HOST}:{_DOCKER_DATABASE_PORT}"
|
||||
f"/{_DOCKER_DATABASE_NAME}"
|
||||
)
|
||||
|
||||
DATABASE_HOST = env.str("DATABASE_HOST", "localhost")
|
||||
"""
|
||||
The hostname of the PostgreSQL server. Defaults to ``localhost``.
|
||||
"""
|
||||
DATABASE_PORT = env.int("DATABASE_PORT", 5432)
|
||||
"""
|
||||
The port of the PostgreSQL server. Defaults to ``5432``.
|
||||
"""
|
||||
DATABASE_USER = env.str("DATABASE_USER", "funkwhale")
|
||||
"""
|
||||
The name of the PostgreSQL user. Defaults to ``funkwhale``.
|
||||
"""
|
||||
DATABASE_PASSWORD = env.str("DATABASE_PASSWORD", "funkwhale")
|
||||
"""
|
||||
The password of the PostgreSQL user. Defaults to ``funkwhale``.
|
||||
"""
|
||||
DATABASE_NAME = env.str("DATABASE_NAME", "funkwhale")
|
||||
"""
|
||||
The name of the PostgreSQL database. Defaults to ``funkwhale``.
|
||||
"""
|
||||
DATABASE_URL = env.db(
|
||||
"DATABASE_URL",
|
||||
_database_url_docker # This is only set in the context of a docker deployment.
|
||||
or (
|
||||
f"postgres:"
|
||||
f"//{DATABASE_USER}:{DATABASE_PASSWORD}"
|
||||
f"@{DATABASE_HOST}:{DATABASE_PORT}"
|
||||
f"/{DATABASE_NAME}"
|
||||
),
|
||||
)
|
||||
"""
|
||||
The URL used to connect to the PostgreSQL database. Defaults to an auto generated url
|
||||
build using the `DATABASE_HOST`, `DATABASE_PORT`, `DATABASE_USER`, `DATABASE_PASSWORD`
|
||||
and `DATABASE_NAME` variables.
|
||||
|
||||
Examples:
|
||||
- ``postgresql://funkwhale@:5432/funkwhale``
|
||||
- ``postgresql://<user>:<password>@<host>:<port>/<database>``
|
||||
- ``postgresql://funkwhale:passw0rd@localhost:5432/funkwhale_database``
|
||||
"""
|
||||
|
||||
DATABASES = {
|
||||
# Raises ImproperlyConfigured exception if DATABASE_URL not in os.environ
|
||||
"default": DATABASE_URL
|
||||
|
@ -390,7 +447,7 @@ DB_CONN_MAX_AGE = DATABASES["default"]["CONN_MAX_AGE"] = env(
|
|||
"DB_CONN_MAX_AGE", default=60 * 5
|
||||
)
|
||||
"""
|
||||
Max time, in seconds, before database connections are closed.
|
||||
The maximum time in seconds before database connections close.
|
||||
"""
|
||||
MIGRATION_MODULES = {
|
||||
# see https://github.com/jazzband/django-oauth-toolkit/issues/634
|
||||
|
@ -400,6 +457,9 @@ MIGRATION_MODULES = {
|
|||
"sites": "funkwhale_api.contrib.sites.migrations",
|
||||
}
|
||||
|
||||
DEFAULT_AUTO_FIELD = "django.db.models.AutoField"
|
||||
# see https://docs.djangoproject.com/en/4.0/releases/3.2/
|
||||
|
||||
# GENERAL CONFIGURATION
|
||||
# ------------------------------------------------------------------------------
|
||||
# Local time zone for this installation. Choices can be found here:
|
||||
|
@ -471,7 +531,7 @@ CRISPY_TEMPLATE_PACK = "bootstrap3"
|
|||
# See: https://docs.djangoproject.com/en/dev/ref/settings/#static-root
|
||||
STATIC_ROOT = env("STATIC_ROOT", default=str(ROOT_DIR("staticfiles")))
|
||||
"""
|
||||
Path were static files should be collected.
|
||||
The path where static files are collected.
|
||||
"""
|
||||
# See: https://docs.djangoproject.com/en/dev/ref/settings/#static-url
|
||||
STATIC_URL = env("STATIC_URL", default=FUNKWHALE_URL + "/staticfiles/")
|
||||
|
@ -479,10 +539,11 @@ DEFAULT_FILE_STORAGE = "funkwhale_api.common.storage.ASCIIFileSystemStorage"
|
|||
|
||||
PROXY_MEDIA = env.bool("PROXY_MEDIA", default=True)
|
||||
"""
|
||||
Wether to proxy audio files through your reverse proxy.
|
||||
It's recommended to keep this on, as a way to enforce access control, however,
|
||||
if you're using S3 storage with :attr:`AWS_QUERYSTRING_AUTH`,
|
||||
it's safe to disable it.
|
||||
Whether to proxy audio files through your reverse proxy.
|
||||
We recommend you leave this enabled to enforce access control.
|
||||
|
||||
If you're using S3 storage with :attr:`AWS_QUERYSTRING_AUTH`
|
||||
enabled, it's safe to disable this setting.
|
||||
"""
|
||||
AWS_DEFAULT_ACL = env("AWS_DEFAULT_ACL", default=None)
|
||||
"""
|
||||
|
@ -491,17 +552,18 @@ bucket.
|
|||
|
||||
ACLs and bucket policies are distinct concepts, and some storage
|
||||
providers (ie Linode, Scaleway) will always apply the most restrictive between
|
||||
a bucket's ACL and policy, meaning a default private ACL will supercede
|
||||
a bucket's ACL and policy, meaning a default private ACL will supersede
|
||||
a relaxed bucket policy.
|
||||
|
||||
If present, the value should be a valid canned ACL.
|
||||
See: https://docs.aws.amazon.com/AmazonS3/latest/userguide/acl-overview.html#canned-acl
|
||||
See `<https://docs.aws.amazon.com/AmazonS3/latest/userguide/acl-overview.html#canned-acl>`_
|
||||
"""
|
||||
AWS_QUERYSTRING_AUTH = env.bool("AWS_QUERYSTRING_AUTH", default=not PROXY_MEDIA)
|
||||
"""
|
||||
Whether to include signatures in S3 urls, as a way to enforce access-control.
|
||||
Whether to include signatures in S3 URLs. Signatures
|
||||
are used to enforce access control.
|
||||
|
||||
Defaults to the inverse of :attr:`PROXY_MEDIA`.
|
||||
Defaults to the opposite of :attr:`PROXY_MEDIA`.
|
||||
"""
|
||||
|
||||
AWS_S3_MAX_MEMORY_SIZE = env.int(
|
||||
|
@ -510,8 +572,8 @@ AWS_S3_MAX_MEMORY_SIZE = env.int(
|
|||
|
||||
AWS_QUERYSTRING_EXPIRE = env.int("AWS_QUERYSTRING_EXPIRE", default=3600)
|
||||
"""
|
||||
Expiration delay, in seconds, of signatures generated when
|
||||
:attr:`AWS_QUERYSTRING_AUTH` is enabled.
|
||||
The time in seconds before AWS signatures expire.
|
||||
Only takes effect you enable :attr:`AWS_QUERYSTRING_AUTH`
|
||||
"""
|
||||
|
||||
AWS_ACCESS_KEY_ID = env("AWS_ACCESS_KEY_ID", default=None)
|
||||
|
@ -527,7 +589,7 @@ if AWS_ACCESS_KEY_ID:
|
|||
"""
|
||||
AWS_STORAGE_BUCKET_NAME = env("AWS_STORAGE_BUCKET_NAME")
|
||||
"""
|
||||
Bucket name of your S3 storage.
|
||||
Your S3 bucket name.
|
||||
"""
|
||||
AWS_S3_CUSTOM_DOMAIN = env("AWS_S3_CUSTOM_DOMAIN", default=None)
|
||||
"""
|
||||
|
@ -536,14 +598,17 @@ if AWS_ACCESS_KEY_ID:
|
|||
AWS_S3_ENDPOINT_URL = env("AWS_S3_ENDPOINT_URL", default=None)
|
||||
"""
|
||||
If you use a S3-compatible storage such as minio,
|
||||
set the following variable to the full URL to the storage server. Example:
|
||||
set the following variable to the full URL to the storage server.
|
||||
|
||||
Examples:
|
||||
|
||||
- ``https://minio.mydomain.com``
|
||||
- ``https://s3.wasabisys.com``
|
||||
"""
|
||||
AWS_S3_REGION_NAME = env("AWS_S3_REGION_NAME", default=None)
|
||||
"""If you are using Amazon S3 to serve media directly,
|
||||
you will need to specify your region name in order to access files.
|
||||
"""
|
||||
If you're using Amazon S3 to serve media without a proxy,
|
||||
you need to specify your region name to access files.
|
||||
|
||||
Example:
|
||||
|
||||
|
@ -553,9 +618,8 @@ if AWS_ACCESS_KEY_ID:
|
|||
AWS_S3_SIGNATURE_VERSION = "s3v4"
|
||||
AWS_LOCATION = env("AWS_LOCATION", default="")
|
||||
"""
|
||||
An optional bucket subdirectory were you want to store the files.
|
||||
This is especially useful if you plan to use share the bucket with other
|
||||
services.
|
||||
A directory in your S3 bucket where you store files.
|
||||
Use this if you plan to share the bucket between services.
|
||||
"""
|
||||
DEFAULT_FILE_STORAGE = "funkwhale_api.common.storage.ASCIIS3Boto3Storage"
|
||||
|
||||
|
@ -576,14 +640,13 @@ STATICFILES_FINDERS = (
|
|||
# See: https://docs.djangoproject.com/en/dev/ref/settings/#media-root
|
||||
MEDIA_ROOT = env("MEDIA_ROOT", default=str(APPS_DIR("media")))
|
||||
"""
|
||||
Path where media files (such as album covers or audio tracks) are stored
|
||||
on your system. Ensure this directory actually exists.
|
||||
The path where you store media files (such as album covers or audio tracks)
|
||||
on your system. Make sure this directory actually exists.
|
||||
"""
|
||||
# See: https://docs.djangoproject.com/en/dev/ref/settings/#media-url
|
||||
MEDIA_URL = env("MEDIA_URL", default=FUNKWHALE_URL + "/media/")
|
||||
"""
|
||||
URL where media files are served. The default value should work fine on most
|
||||
configurations, but could can tweak this if you are hosting media
|
||||
The URL from which your pod serves media files. Change this if you're hosting media
|
||||
files on a separate domain, or if you host Funkwhale on a non-standard port.
|
||||
"""
|
||||
FILE_UPLOAD_PERMISSIONS = 0o644
|
||||
|
@ -592,14 +655,14 @@ ATTACHMENTS_UNATTACHED_PRUNE_DELAY = env.int(
|
|||
"ATTACHMENTS_UNATTACHED_PRUNE_DELAY", default=3600 * 24
|
||||
)
|
||||
"""
|
||||
Delay in seconds before uploaded but unattached attachements are pruned
|
||||
The delay in seconds before Funkwhale prunes uploaded but detached attachments
|
||||
from the system.
|
||||
"""
|
||||
|
||||
# URL Configuration
|
||||
# ------------------------------------------------------------------------------
|
||||
ROOT_URLCONF = "config.urls"
|
||||
SPA_URLCONF = "config.spa_urls"
|
||||
SPA_URLCONF = "config.urls.spa"
|
||||
ASGI_APPLICATION = "config.routing.application"
|
||||
|
||||
# This ensures that Django will be able to detect a secure connection
|
||||
|
@ -623,14 +686,12 @@ ACCOUNT_EMAIL_VERIFICATION_ENFORCE = env.bool(
|
|||
"ACCOUNT_EMAIL_VERIFICATION_ENFORCE", default=False
|
||||
)
|
||||
"""
|
||||
Determine wether users need to verify their e-mail address before using the service. Enabling this can be useful
|
||||
to reduce spam or bots accounts, however, you'll need to configure a mail server so that your users can receive the
|
||||
verification e-mails, using :attr:`EMAIL_CONFIG`.
|
||||
Set whether users need to verify their email address before using your pod. Enabling this setting
|
||||
is useful for reducing spam and bot accounts. To use this setting you need to configure a mail server
|
||||
to send verification emails. See :attr:`EMAIL_CONFIG`.
|
||||
|
||||
Note that regardless of the setting value, superusers created through the command line will never require verification.
|
||||
|
||||
Note that regardless of the setting value, superusers created through the
|
||||
command line will never require verification.
|
||||
.. note::
|
||||
Superusers created through the command line never need to verify their email address.
|
||||
"""
|
||||
ACCOUNT_EMAIL_VERIFICATION = (
|
||||
"mandatory" if ACCOUNT_EMAIL_VERIFICATION_ENFORCE else "optional"
|
||||
|
@ -656,6 +717,7 @@ OAUTH2_PROVIDER = {
|
|||
"ACCESS_TOKEN_EXPIRE_SECONDS", default=60 * 60 * 10
|
||||
),
|
||||
"OAUTH2_SERVER_CLASS": "funkwhale_api.users.oauth.server.OAuth2Server",
|
||||
"PKCE_REQUIRED": False,
|
||||
}
|
||||
OAUTH2_PROVIDER_APPLICATION_MODEL = "users.Application"
|
||||
OAUTH2_PROVIDER_ACCESS_TOKEN_MODEL = "users.AccessToken"
|
||||
|
@ -669,18 +731,17 @@ SCOPED_TOKENS_MAX_AGE = 60 * 60 * 24 * 3
|
|||
# ------------------------------------------------------------------------------
|
||||
AUTH_LDAP_ENABLED = env.bool("LDAP_ENABLED", default=False)
|
||||
"""
|
||||
Wether to enable LDAP authentication.
|
||||
Whether to enable LDAP authentication.
|
||||
|
||||
See :doc:`/installation/ldap` for more information.
|
||||
See :doc:`/administrator_documentation/configuration_docs/ldap` for more information.
|
||||
"""
|
||||
|
||||
if AUTH_LDAP_ENABLED:
|
||||
|
||||
# Import the LDAP modules here.
|
||||
# This way, we don't need the dependency unless someone
|
||||
# actually enables the LDAP support
|
||||
import ldap
|
||||
from django_auth_ldap.config import LDAPSearch, LDAPSearchUnion, GroupOfNamesType
|
||||
from django_auth_ldap.config import GroupOfNamesType, LDAPSearch, LDAPSearchUnion
|
||||
|
||||
# Add LDAP to the authentication backends
|
||||
AUTHENTICATION_BACKENDS += ("django_auth_ldap.backend.LDAPBackend",)
|
||||
|
@ -743,31 +804,43 @@ if AUTH_LDAP_ENABLED:
|
|||
# SLUGLIFIER
|
||||
AUTOSLUG_SLUGIFY_FUNCTION = "slugify.slugify"
|
||||
|
||||
CACHE_DEFAULT = "redis://127.0.0.1:6379/0"
|
||||
CACHE_URL = env.cache_url("CACHE_URL", default=CACHE_DEFAULT)
|
||||
CACHE_URL_DEFAULT = "redis://127.0.0.1:6379/0"
|
||||
if IS_DOCKER_SETUP:
|
||||
CACHE_URL_DEFAULT = "redis://redis:6379/0"
|
||||
|
||||
CACHE_URL = env.str("CACHE_URL", default=CACHE_URL_DEFAULT)
|
||||
"""
|
||||
URL to your redis server. Examples:
|
||||
The URL of your redis server. For example:
|
||||
|
||||
- ``redis://<host>:<port>/<database>``
|
||||
- ``redis://127.0.0.1:6379/0``
|
||||
- ``redis://:password@localhost:6379/0``
|
||||
for password auth (the extra semicolon is important)
|
||||
|
||||
If you're using password auth (the extra slash is important)
|
||||
- ``redis:///run/redis/redis.sock?db=0`` over unix sockets
|
||||
|
||||
.. note::
|
||||
|
||||
If you want to use Redis over unix sockets, you'll also need to update
|
||||
:attr:`CELERY_BROKER_URL`
|
||||
If you want to use Redis over unix sockets, you also need to update
|
||||
:attr:`CELERY_BROKER_URL`, because the scheme differ from the one used by
|
||||
:attr:`CACHE_URL`.
|
||||
|
||||
"""
|
||||
CACHES = {
|
||||
"default": CACHE_URL,
|
||||
"default": {
|
||||
"BACKEND": "django_redis.cache.RedisCache",
|
||||
"LOCATION": CACHE_URL,
|
||||
"OPTIONS": {
|
||||
"CLIENT_CLASS": "funkwhale_api.common.cache.RedisClient",
|
||||
"IGNORE_EXCEPTIONS": True, # mimics memcache behavior.
|
||||
# http://niwinz.github.io/django-redis/latest/#_memcached_exceptions_behavior
|
||||
},
|
||||
},
|
||||
"local": {
|
||||
"BACKEND": "django.core.cache.backends.locmem.LocMemCache",
|
||||
"LOCATION": "local-cache",
|
||||
},
|
||||
}
|
||||
CACHES["default"]["BACKEND"] = "django_redis.cache.RedisCache"
|
||||
|
||||
CHANNEL_LAYERS = {
|
||||
"default": {
|
||||
|
@ -776,17 +849,12 @@ CHANNEL_LAYERS = {
|
|||
}
|
||||
}
|
||||
|
||||
CACHES["default"]["OPTIONS"] = {
|
||||
"CLIENT_CLASS": "funkwhale_api.common.cache.RedisClient",
|
||||
"IGNORE_EXCEPTIONS": True, # mimics memcache behavior.
|
||||
# http://niwinz.github.io/django-redis/latest/#_memcached_exceptions_behavior
|
||||
}
|
||||
CACHEOPS_DURATION = env("CACHEOPS_DURATION", default=0)
|
||||
CACHEOPS_ENABLED = bool(CACHEOPS_DURATION)
|
||||
|
||||
if CACHEOPS_ENABLED:
|
||||
INSTALLED_APPS += ("cacheops",)
|
||||
CACHEOPS_REDIS = env("CACHE_URL", default=CACHE_DEFAULT)
|
||||
CACHEOPS_REDIS = CACHE_URL
|
||||
CACHEOPS_PREFIX = lambda _: "cacheops" # noqa
|
||||
CACHEOPS_DEFAULTS = {"timeout": CACHEOPS_DURATION}
|
||||
CACHEOPS = {
|
||||
|
@ -797,18 +865,17 @@ if CACHEOPS_ENABLED:
|
|||
|
||||
# CELERY
|
||||
INSTALLED_APPS += ("funkwhale_api.taskapp.celery.CeleryConfig",)
|
||||
CELERY_BROKER_URL = env(
|
||||
"CELERY_BROKER_URL", default=env("CACHE_URL", default=CACHE_DEFAULT)
|
||||
)
|
||||
CELERY_BROKER_URL = env.str("CELERY_BROKER_URL", default=CACHE_URL)
|
||||
"""
|
||||
URL to celery's task broker. Defaults to :attr:`CACHE_URL`,
|
||||
so you shouldn't have to tweak this, unless you want
|
||||
to use a different one, or use Redis sockets to connect.
|
||||
The celery task broker URL. Defaults to :attr:`CACHE_URL`.
|
||||
You don't need to tweak this unless you want
|
||||
to use a different server or use Redis sockets to connect.
|
||||
|
||||
Exemple:
|
||||
Example:
|
||||
|
||||
- ``redis://127.0.0.1:6379/0``
|
||||
- ``redis+socket:///run/redis/redis.sock?virtual_host=0``
|
||||
|
||||
"""
|
||||
# END CELERY
|
||||
# Location of root django.contrib.admin URL, use {% url 'admin:index' %}
|
||||
|
@ -852,6 +919,21 @@ CELERY_BEAT_SCHEDULE = {
|
|||
),
|
||||
"options": {"expires": 60 * 60},
|
||||
},
|
||||
"music.library.schedule_remote_scan": {
|
||||
"task": "music.library.schedule_scan",
|
||||
"schedule": crontab(day_of_week="1", minute="0", hour="2"),
|
||||
"options": {"expires": 60 * 60 * 24},
|
||||
},
|
||||
"federation.check_all_remote_instance_availability": {
|
||||
"task": "federation.check_all_remote_instance_availability",
|
||||
"schedule": crontab(
|
||||
**env.dict(
|
||||
"SCHEDULE_FEDERATION_CHECK_INTANCES_AVAILABILITY",
|
||||
default={"minute": "0", "hour": "*"},
|
||||
)
|
||||
),
|
||||
"options": {"expires": 60 * 60},
|
||||
},
|
||||
}
|
||||
|
||||
if env.bool("ADD_ALBUM_TAGS_FROM_TRACKS", default=True):
|
||||
|
@ -891,8 +973,8 @@ AUTH_PASSWORD_VALIDATORS = [
|
|||
]
|
||||
DISABLE_PASSWORD_VALIDATORS = env.bool("DISABLE_PASSWORD_VALIDATORS", default=False)
|
||||
"""
|
||||
Wether to disable password validators (length, common words,
|
||||
similarity with username…) used during regitration.
|
||||
Whether to disable password validation rules during registration.
|
||||
Validators include password length, common words, similarity with username.
|
||||
"""
|
||||
if DISABLE_PASSWORD_VALIDATORS:
|
||||
AUTH_PASSWORD_VALIDATORS = []
|
||||
|
@ -931,9 +1013,9 @@ REST_FRAMEWORK = {
|
|||
}
|
||||
THROTTLING_ENABLED = env.bool("THROTTLING_ENABLED", default=True)
|
||||
"""
|
||||
Wether to enable throttling (also known as rate-limiting).
|
||||
Leaving this enabled is recommended
|
||||
especially on public pods, to improve the quality of service.
|
||||
Whether to enable throttling (also known as rate-limiting).
|
||||
We recommend you leave this enabled to improve the quality
|
||||
of the service, especially on public pods .
|
||||
"""
|
||||
|
||||
if THROTTLING_ENABLED:
|
||||
|
@ -1082,9 +1164,10 @@ THROTTLING_RATES = {
|
|||
}
|
||||
THROTTLING_RATES = THROTTLING_RATES
|
||||
"""
|
||||
Throttling rates for specific endpoints and features of the app.
|
||||
You can tweak this if you are encountering to severe rate limiting issues or,
|
||||
on the contrary, if you want to reduce the consumption on some endpoints.
|
||||
Throttling rates for specific endpoints and app features.
|
||||
Tweak this if you're hitting rate limit issues or if you want
|
||||
to reduce the consumption of specific endpoints. Takes
|
||||
the format ``<endpoint name>=<number>/<interval>``.
|
||||
|
||||
Example:
|
||||
|
||||
|
@ -1106,35 +1189,39 @@ ATOMIC_REQUESTS = False
|
|||
USE_X_FORWARDED_HOST = True
|
||||
USE_X_FORWARDED_PORT = True
|
||||
|
||||
# Wether we should use Apache, Nginx (or other) headers
|
||||
# Whether we should use Apache, Nginx (or other) headers
|
||||
# when serving audio files. Defaults to Nginx.
|
||||
REVERSE_PROXY_TYPE = env("REVERSE_PROXY_TYPE", default="nginx")
|
||||
"""
|
||||
Depending on the reverse proxy used in front of your funkwhale instance,
|
||||
the API will use different kind of headers to serve audio files
|
||||
Set your reverse proxy type. This changes the headers the
|
||||
API uses to serve audio files. Allowed values:
|
||||
|
||||
Allowed values: ``nginx``, ``apache2``
|
||||
- ``nginx``
|
||||
- ``apache2``
|
||||
"""
|
||||
assert REVERSE_PROXY_TYPE in ["apache2", "nginx"], "Unsupported REVERSE_PROXY_TYPE"
|
||||
|
||||
PROTECT_FILES_PATH = env("PROTECT_FILES_PATH", default="/_protected")
|
||||
"""
|
||||
Which path will be used to process the internal redirection
|
||||
to the reverse proxy **DO NOT** put a slash at the end.
|
||||
The path used to process internal redirection
|
||||
to the reverse proxy.
|
||||
|
||||
You shouldn't have to tweak this.
|
||||
.. important::
|
||||
|
||||
Don't insert a slash at the end of this path.
|
||||
"""
|
||||
|
||||
MUSICBRAINZ_CACHE_DURATION = env.int("MUSICBRAINZ_CACHE_DURATION", default=300)
|
||||
"""
|
||||
How long to cache MusicBrainz results, in seconds.
|
||||
Length of time in seconds to cache MusicBrainz results.
|
||||
"""
|
||||
MUSICBRAINZ_HOSTNAME = env("MUSICBRAINZ_HOSTNAME", default="musicbrainz.org")
|
||||
"""
|
||||
Use this setting to change the MusicBrainz hostname, for instance to
|
||||
use a mirror. The hostname can also contain a port number.
|
||||
The hostname of your MusicBrainz instance. Change
|
||||
this setting if you run your own server or use a mirror.
|
||||
You can include a port number in the hostname.
|
||||
|
||||
Example:
|
||||
Examples:
|
||||
|
||||
- ``mymusicbrainz.mirror``
|
||||
- ``localhost:5000``
|
||||
|
@ -1143,7 +1230,7 @@ Example:
|
|||
# Custom Admin URL, use {% url 'admin:index' %}
|
||||
ADMIN_URL = env("DJANGO_ADMIN_URL", default="^api/admin/")
|
||||
"""
|
||||
Path to the Django admin area.
|
||||
Path to the Django admin dashboard.
|
||||
|
||||
Examples:
|
||||
|
||||
|
@ -1151,7 +1238,7 @@ Examples:
|
|||
- ``^api/mycustompath/``
|
||||
|
||||
"""
|
||||
CSRF_USE_SESSIONS = True
|
||||
CSRF_USE_SESSIONS = False
|
||||
SESSION_ENGINE = "django.contrib.sessions.backends.cache"
|
||||
|
||||
ACCOUNT_USERNAME_BLACKLIST = [
|
||||
|
@ -1179,14 +1266,14 @@ ACCOUNT_USERNAME_BLACKLIST = [
|
|||
"actor",
|
||||
] + env.list("ACCOUNT_USERNAME_BLACKLIST", default=[])
|
||||
"""
|
||||
List of usernames that will be unavailable during registration,
|
||||
given as a list of strings.
|
||||
List of usernames that can't be used for registration. Given as a list of strings.
|
||||
"""
|
||||
EXTERNAL_REQUESTS_VERIFY_SSL = env.bool("EXTERNAL_REQUESTS_VERIFY_SSL", default=True)
|
||||
"""
|
||||
Wether to enforce HTTPS certificates verification when doing outgoing HTTP
|
||||
requests (typically with federation).
|
||||
Disabling this is not recommended.
|
||||
Whether to enforce TLS certificate verification
|
||||
when performing outgoing HTTP requests.
|
||||
|
||||
We recommend you leave this setting enabled.
|
||||
"""
|
||||
EXTERNAL_REQUESTS_TIMEOUT = env.int("EXTERNAL_REQUESTS_TIMEOUT", default=10)
|
||||
"""
|
||||
|
@ -1195,46 +1282,46 @@ Default timeout for external requests.
|
|||
|
||||
MUSIC_DIRECTORY_PATH = env("MUSIC_DIRECTORY_PATH", default=None)
|
||||
"""
|
||||
The path on your server where Funkwhale can import files using
|
||||
:ref:`in-place import <in-place-import>`. It must be readable by the webserver
|
||||
and Funkwhale api and worker processes.
|
||||
The path on your server where Funkwhale places
|
||||
files from in-place imports. This path needs to be
|
||||
readable by the webserver and ``api`` and ``worker``
|
||||
processes.
|
||||
|
||||
On docker installations, we recommend you use the default of ``/music``
|
||||
for this value. For non-docker installation, you can use any absolute path.
|
||||
``/srv/funkwhale/data/music`` is a safe choice if you don't know what to use.
|
||||
.. important::
|
||||
|
||||
.. note:: This path should not include any trailing slash.
|
||||
Don’t insert a slash at the end of this path.
|
||||
|
||||
.. warning::
|
||||
On Docker installations, we recommend you use the default ``/music`` path.
|
||||
On Debian installations you can use any absolute path. Defaults to
|
||||
``/srv/funkwhale/data/music``.
|
||||
|
||||
You need to adapt your :ref:`reverse proxy configuration
|
||||
<reverse-proxy-setup>` to serve the directory pointed by
|
||||
``MUSIC_DIRECTORY_PATH`` on ``/_protected/music`` URL.
|
||||
.. note::
|
||||
|
||||
You need to add this path to your reverse proxy configuration.
|
||||
Add the directory to your ``/_protected/music`` server block.
|
||||
|
||||
"""
|
||||
MUSIC_DIRECTORY_SERVE_PATH = env(
|
||||
"MUSIC_DIRECTORY_SERVE_PATH", default=MUSIC_DIRECTORY_PATH
|
||||
)
|
||||
"""
|
||||
Default: :attr:`MUSIC_DIRECTORY_PATH`
|
||||
|
||||
When using Docker, the value of :attr:`MUSIC_DIRECTORY_PATH` in your containers
|
||||
may differ from the real path on your host.
|
||||
Assuming you have the following directive
|
||||
in your :file:`docker-compose.yml` file::
|
||||
On Docker setups the value of :attr:`MUSIC_DIRECTORY_PATH`
|
||||
may be different from the actual path on your server.
|
||||
You can specify this path in your :file:`docker-compose.yml` file::
|
||||
|
||||
volumes:
|
||||
- /srv/funkwhale/data/music:/music:ro
|
||||
- /srv/funkwhale/data/music:/music:ro
|
||||
|
||||
Then, the value of :attr:`MUSIC_DIRECTORY_SERVE_PATH` should be
|
||||
``/srv/funkwhale/data/music``. This must be readable by the webserver.
|
||||
In this case, you need to set :attr:`MUSIC_DIRECTORY_SERVE_PATH`
|
||||
to ``/srv/funkwhale/data/music``. The webserver needs to be
|
||||
able to read this directory.
|
||||
|
||||
On non-docker setup, you don't need to configure this setting.
|
||||
.. important::
|
||||
|
||||
.. note:: This path should not include any trailing slash.
|
||||
Don’t insert a slash at the end of this path.
|
||||
|
||||
"""
|
||||
# When this is set to default=True, we need to reenable migration music/0042
|
||||
# When this is set to default=True, we need to re-enable migration music/0042
|
||||
# to ensure data is populated correctly on existing pods
|
||||
MUSIC_USE_DENORMALIZATION = env.bool("MUSIC_USE_DENORMALIZATION", default=True)
|
||||
|
||||
|
@ -1242,7 +1329,7 @@ USERS_INVITATION_EXPIRATION_DAYS = env.int(
|
|||
"USERS_INVITATION_EXPIRATION_DAYS", default=14
|
||||
)
|
||||
"""
|
||||
Expiration delay, in days, for user invitations.
|
||||
The number of days before a user invite expires.
|
||||
"""
|
||||
|
||||
VERSATILEIMAGEFIELD_RENDITION_KEY_SETS = {
|
||||
|
@ -1272,26 +1359,27 @@ SUBSONIC_DEFAULT_TRANSCODING_FORMAT = (
|
|||
env("SUBSONIC_DEFAULT_TRANSCODING_FORMAT", default="mp3") or None
|
||||
)
|
||||
"""
|
||||
Default format for transcoding when using Subsonic API.
|
||||
The default format files are transcoded into when using the Subsonic
|
||||
API.
|
||||
"""
|
||||
# extra tags will be ignored
|
||||
TAGS_MAX_BY_OBJ = env.int("TAGS_MAX_BY_OBJ", default=30)
|
||||
"""
|
||||
Maximum number of tags that can be associated with an object.
|
||||
Extra tags will be ignored.
|
||||
Extra tags are ignored.
|
||||
"""
|
||||
FEDERATION_OBJECT_FETCH_DELAY = env.int(
|
||||
"FEDERATION_OBJECT_FETCH_DELAY", default=60 * 24 * 3
|
||||
)
|
||||
"""
|
||||
Delay, in minutes, before a remote object will be automatically
|
||||
The delay in minutes before a remote object is automatically
|
||||
refetched when accessed in the UI.
|
||||
"""
|
||||
MODERATION_EMAIL_NOTIFICATIONS_ENABLED = env.bool(
|
||||
"MODERATION_EMAIL_NOTIFICATIONS_ENABLED", default=True
|
||||
)
|
||||
"""
|
||||
Whether to enable e-mail notifications to moderators and pods admins.
|
||||
Whether to enable email notifications to moderators and pod admins.
|
||||
"""
|
||||
FEDERATION_AUTHENTIFY_FETCHES = True
|
||||
FEDERATION_SYNCHRONOUS_FETCH = env.bool("FEDERATION_SYNCHRONOUS_FETCH", default=True)
|
||||
|
@ -1299,27 +1387,29 @@ FEDERATION_DUPLICATE_FETCH_DELAY = env.int(
|
|||
"FEDERATION_DUPLICATE_FETCH_DELAY", default=60 * 50
|
||||
)
|
||||
"""
|
||||
Delay, in seconds, between two manual fetch of the same remote object.
|
||||
The delay in seconds between two manual fetches of the same remote object.
|
||||
"""
|
||||
INSTANCE_SUPPORT_MESSAGE_DELAY = env.int("INSTANCE_SUPPORT_MESSAGE_DELAY", default=15)
|
||||
"""
|
||||
Delay after signup, in days, before the "support your pod" message is shown.
|
||||
The number of days before your pod shows the "support your pod" message.
|
||||
The timer starts after the user signs up.
|
||||
"""
|
||||
FUNKWHALE_SUPPORT_MESSAGE_DELAY = env.int("FUNKWHALE_SUPPORT_MESSAGE_DELAY", default=15)
|
||||
"""
|
||||
Delay after signup, in days, before the "support Funkwhale" message is shown.
|
||||
The number of days before your pod shows the "support Funkwhale" message.
|
||||
The timer starts after the user signs up.
|
||||
"""
|
||||
|
||||
MIN_DELAY_BETWEEN_DOWNLOADS_COUNT = env.int(
|
||||
"MIN_DELAY_BETWEEN_DOWNLOADS_COUNT", default=60 * 60 * 6
|
||||
)
|
||||
"""
|
||||
Minimum required period, in seconds, for two downloads of the same track
|
||||
by the same IP or user to be recorded in statistics.
|
||||
The required number of seconds between downloads of a track
|
||||
by the same IP or user to be counted separately in listen statistics.
|
||||
"""
|
||||
MARKDOWN_EXTENSIONS = env.list("MARKDOWN_EXTENSIONS", default=["nl2br", "extra"])
|
||||
"""
|
||||
List of markdown extensions to enable.
|
||||
A list of markdown extensions to enable.
|
||||
|
||||
See `<https://python-markdown.github.io/extensions/>`_.
|
||||
"""
|
||||
|
@ -1329,27 +1419,28 @@ Additional TLDs to support with our markdown linkifier.
|
|||
"""
|
||||
EXTERNAL_MEDIA_PROXY_ENABLED = env.bool("EXTERNAL_MEDIA_PROXY_ENABLED", default=True)
|
||||
"""
|
||||
Wether to proxy attachment files hosted on third party pods and and servers.
|
||||
Keeping this to true is recommended, to reduce leaking browsing information
|
||||
of your users, and reduce the bandwidth used on remote pods.
|
||||
Whether to proxy attachment files hosted on third party pods and and servers.
|
||||
We recommend you leave this set to ``true``. This reduces the risk of leaking
|
||||
user browsing information and reduces the bandwidth used on remote pods.
|
||||
"""
|
||||
PODCASTS_THIRD_PARTY_VISIBILITY = env("PODCASTS_THIRD_PARTY_VISIBILITY", default="me")
|
||||
"""
|
||||
By default, only people who subscribe to a podcast RSS will have access
|
||||
to their episodes.
|
||||
By default, only people who subscribe to a podcast RSS have access
|
||||
to its episodes. Change to ``instance`` or ``everyone`` to change the
|
||||
default visibility.
|
||||
|
||||
Switch to "instance" or "everyone" to change that.
|
||||
.. note::
|
||||
|
||||
Changing it only affect new podcasts.
|
||||
Changing this value only affect new podcasts.
|
||||
"""
|
||||
PODCASTS_RSS_FEED_REFRESH_DELAY = env.int(
|
||||
"PODCASTS_RSS_FEED_REFRESH_DELAY", default=60 * 60 * 24
|
||||
)
|
||||
"""
|
||||
Delay, in seconds, between two fetch of RSS feeds.
|
||||
The delay in seconds between two fetch of RSS feeds.
|
||||
|
||||
Reducing this mean you'll receive new episodes faster,
|
||||
but will require more resources.
|
||||
A lower rate means new episodes are fetched sooner,
|
||||
but requires more resources.
|
||||
"""
|
||||
# maximum items loaded through XML feed
|
||||
PODCASTS_RSS_FEED_MAX_ITEMS = env.int("PODCASTS_RSS_FEED_MAX_ITEMS", default=250)
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Local settings
|
||||
|
||||
|
@ -8,8 +7,9 @@ Local settings
|
|||
- Add django-extensions as app
|
||||
"""
|
||||
|
||||
from .common import * # noqa
|
||||
from funkwhale_api import __version__ as funkwhale_version
|
||||
|
||||
from .common import * # noqa
|
||||
|
||||
# DEBUG
|
||||
# ------------------------------------------------------------------------------
|
||||
|
@ -78,6 +78,8 @@ DEBUG_TOOLBAR_PANELS = [
|
|||
# ------------------------------------------------------------------------------
|
||||
# INSTALLED_APPS += ('django_extensions', )
|
||||
|
||||
INSTALLED_APPS += ("drf_spectacular",)
|
||||
|
||||
# Debug toolbar is slow, we disable it for tests
|
||||
DEBUG_TOOLBAR_ENABLED = env.bool("DEBUG_TOOLBAR_ENABLED", default=DEBUG)
|
||||
if DEBUG_TOOLBAR_ENABLED:
|
||||
|
@ -96,6 +98,47 @@ CELERY_TASK_ALWAYS_EAGER = False
|
|||
|
||||
CSRF_TRUSTED_ORIGINS = [o for o in ALLOWED_HOSTS]
|
||||
|
||||
REST_FRAMEWORK["DEFAULT_SCHEMA_CLASS"] = "funkwhale_api.schema.CustomAutoSchema"
|
||||
SPECTACULAR_SETTINGS = {
|
||||
"TITLE": "Funkwhale API",
|
||||
"DESCRIPTION": open("Readme.md").read(),
|
||||
"VERSION": funkwhale_version,
|
||||
"SCHEMA_PATH_PREFIX": "/api/(v[0-9])?",
|
||||
"OAUTH_FLOWS": ["authorizationCode"],
|
||||
"AUTHENTICATION_WHITELIST": [
|
||||
"funkwhale_api.common.authentication.OAuth2Authentication",
|
||||
"funkwhale_api.common.authentication.ApplicationTokenAuthentication",
|
||||
],
|
||||
"SERVERS": [
|
||||
{"url": "https://demo.funkwhale.audio", "description": "Demo Server"},
|
||||
{
|
||||
"url": "https://funkwhale.audio",
|
||||
"description": "Read server with real content",
|
||||
},
|
||||
{
|
||||
"url": "{protocol}://{domain}",
|
||||
"description": "Custom server",
|
||||
"variables": {
|
||||
"domain": {
|
||||
"default": "yourdomain",
|
||||
"description": "Your Funkwhale Domain",
|
||||
},
|
||||
"protocol": {"enum": ["http", "https"], "default": "https"},
|
||||
},
|
||||
},
|
||||
],
|
||||
"OAUTH2_FLOWS": ["authorizationCode"],
|
||||
"OAUTH2_AUTHORIZATION_URL": "/authorize",
|
||||
"OAUTH2_TOKEN_URL": "/api/v1/oauth/token/",
|
||||
"PREPROCESSING_HOOKS": ["config.schema.custom_preprocessing_hook"],
|
||||
"ENUM_NAME_OVERRIDES": {
|
||||
"FederationChoiceEnum": "funkwhale_api.federation.models.TYPE_CHOICES",
|
||||
"ReportTypeEnum": "funkwhale_api.moderation.models.REPORT_TYPES",
|
||||
"PrivacyLevelEnum": "funkwhale_api.common.fields.PRIVACY_LEVEL_CHOICES",
|
||||
"LibraryPrivacyLevelEnum": "funkwhale_api.music.models.LIBRARY_PRIVACY_LEVEL_CHOICES",
|
||||
},
|
||||
"COMPONENT_SPLIT_REQUEST": True,
|
||||
}
|
||||
|
||||
if env.bool("WEAK_PASSWORDS", default=False):
|
||||
# Faster during tests
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Production Configurations
|
||||
|
||||
|
@ -9,7 +8,6 @@ Production Configurations
|
|||
|
||||
|
||||
"""
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
from .common import * # noqa
|
||||
|
||||
|
|
|
@ -1,20 +1,25 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.conf import settings
|
||||
from django.conf.urls import url
|
||||
from django.urls import include, path
|
||||
from django.conf.urls.static import static
|
||||
from funkwhale_api.common import admin
|
||||
from django.urls import include, path
|
||||
from django.views import defaults as default_views
|
||||
|
||||
from config import plugins
|
||||
from funkwhale_api.common import admin
|
||||
|
||||
plugins_patterns = plugins.trigger_filter(plugins.URLS, [], enabled=True)
|
||||
|
||||
api_patterns = [
|
||||
url("v1/", include("config.urls.api")),
|
||||
url("v2/", include("config.urls.api_v2")),
|
||||
url("subsonic/", include("config.urls.subsonic")),
|
||||
]
|
||||
|
||||
|
||||
urlpatterns = [
|
||||
# Django Admin, use {% url 'admin:index' %}
|
||||
url(settings.ADMIN_URL, admin.site.urls),
|
||||
url(r"^api/", include(("config.api_urls", "api"), namespace="api")),
|
||||
url(r"^api/", include((api_patterns, "api"), namespace="api")),
|
||||
url(
|
||||
r"^",
|
||||
include(
|
|
@ -0,0 +1,88 @@
|
|||
from django.conf.urls import include, url
|
||||
|
||||
from funkwhale_api.activity import views as activity_views
|
||||
from funkwhale_api.audio import views as audio_views
|
||||
from funkwhale_api.common import routers as common_routers
|
||||
from funkwhale_api.common import views as common_views
|
||||
from funkwhale_api.music import views
|
||||
from funkwhale_api.playlists import views as playlists_views
|
||||
from funkwhale_api.tags import views as tags_views
|
||||
|
||||
router = common_routers.OptionalSlashRouter()
|
||||
router.register(r"activity", activity_views.ActivityViewSet, "activity")
|
||||
router.register(r"tags", tags_views.TagViewSet, "tags")
|
||||
router.register(r"plugins", common_views.PluginViewSet, "plugins")
|
||||
router.register(r"tracks", views.TrackViewSet, "tracks")
|
||||
router.register(r"uploads", views.UploadViewSet, "uploads")
|
||||
router.register(r"libraries", views.LibraryViewSet, "libraries")
|
||||
router.register(r"listen", views.ListenViewSet, "listen")
|
||||
router.register(r"stream", views.StreamViewSet, "stream")
|
||||
router.register(r"artists", views.ArtistViewSet, "artists")
|
||||
router.register(r"channels", audio_views.ChannelViewSet, "channels")
|
||||
router.register(r"subscriptions", audio_views.SubscriptionsViewSet, "subscriptions")
|
||||
router.register(r"albums", views.AlbumViewSet, "albums")
|
||||
router.register(r"licenses", views.LicenseViewSet, "licenses")
|
||||
router.register(r"playlists", playlists_views.PlaylistViewSet, "playlists")
|
||||
router.register(r"mutations", common_views.MutationViewSet, "mutations")
|
||||
router.register(r"attachments", common_views.AttachmentViewSet, "attachments")
|
||||
v1_patterns = router.urls
|
||||
|
||||
v1_patterns += [
|
||||
url(r"^oembed/$", views.OembedView.as_view(), name="oembed"),
|
||||
url(
|
||||
r"^instance/",
|
||||
include(("funkwhale_api.instance.urls", "instance"), namespace="instance"),
|
||||
),
|
||||
url(
|
||||
r"^manage/",
|
||||
include(("funkwhale_api.manage.urls", "manage"), namespace="manage"),
|
||||
),
|
||||
url(
|
||||
r"^moderation/",
|
||||
include(
|
||||
("funkwhale_api.moderation.urls", "moderation"), namespace="moderation"
|
||||
),
|
||||
),
|
||||
url(
|
||||
r"^federation/",
|
||||
include(
|
||||
("funkwhale_api.federation.api_urls", "federation"), namespace="federation"
|
||||
),
|
||||
),
|
||||
url(
|
||||
r"^providers/",
|
||||
include(("funkwhale_api.providers.urls", "providers"), namespace="providers"),
|
||||
),
|
||||
url(
|
||||
r"^favorites/",
|
||||
include(("funkwhale_api.favorites.urls", "favorites"), namespace="favorites"),
|
||||
),
|
||||
url(r"^search$", views.Search.as_view(), name="search"),
|
||||
url(
|
||||
r"^radios/",
|
||||
include(("funkwhale_api.radios.urls", "radios"), namespace="radios"),
|
||||
),
|
||||
url(
|
||||
r"^history/",
|
||||
include(("funkwhale_api.history.urls", "history"), namespace="history"),
|
||||
),
|
||||
url(
|
||||
r"^",
|
||||
include(("funkwhale_api.users.api_urls", "users"), namespace="users"),
|
||||
),
|
||||
# XXX: remove if Funkwhale 1.1
|
||||
url(
|
||||
r"^users/",
|
||||
include(("funkwhale_api.users.api_urls", "users"), namespace="users-nested"),
|
||||
),
|
||||
url(
|
||||
r"^oauth/",
|
||||
include(("funkwhale_api.users.oauth.urls", "oauth"), namespace="oauth"),
|
||||
),
|
||||
url(r"^rate-limit/?$", common_views.RateLimitView.as_view(), name="rate-limit"),
|
||||
url(
|
||||
r"^text-preview/?$", common_views.TextPreviewView.as_view(), name="text-preview"
|
||||
),
|
||||
]
|
||||
|
||||
urlpatterns = [url("", include((v1_patterns, "v1"), namespace="v1"))]
|
|
@ -0,0 +1,15 @@
|
|||
from django.conf.urls import include, url
|
||||
|
||||
from funkwhale_api.common import routers as common_routers
|
||||
|
||||
router = common_routers.OptionalSlashRouter()
|
||||
v2_patterns = router.urls
|
||||
|
||||
v2_patterns += [
|
||||
url(
|
||||
r"^instance/",
|
||||
include(("funkwhale_api.instance.urls", "instance"), namespace="instance"),
|
||||
),
|
||||
]
|
||||
|
||||
urlpatterns = [url("", include((v2_patterns, "v2"), namespace="v2"))]
|
|
@ -4,7 +4,6 @@ from funkwhale_api.audio import spa_views as audio_spa_views
|
|||
from funkwhale_api.federation import spa_views as federation_spa_views
|
||||
from funkwhale_api.music import spa_views
|
||||
|
||||
|
||||
urlpatterns = [
|
||||
urls.re_path(
|
||||
r"^library/tracks/(?P<pk>\d+)/?$", spa_views.library_track, name="library_track"
|
|
@ -0,0 +1,18 @@
|
|||
from django.conf.urls import include, url
|
||||
from rest_framework import routers
|
||||
from rest_framework.urlpatterns import format_suffix_patterns
|
||||
|
||||
from funkwhale_api.subsonic.views import SubsonicViewSet
|
||||
|
||||
subsonic_router = routers.SimpleRouter(trailing_slash=False)
|
||||
subsonic_router.register(r"rest", SubsonicViewSet, basename="subsonic")
|
||||
|
||||
subsonic_patterns = format_suffix_patterns(subsonic_router.urls, allowed=["view"])
|
||||
urlpatterns = [url("", include((subsonic_patterns, "subsonic"), namespace="subsonic"))]
|
||||
|
||||
# urlpatterns = [
|
||||
# url(
|
||||
# r"^subsonic/rest/",
|
||||
# include((subsonic_router.urls, "subsonic"), namespace="subsonic"),
|
||||
# )
|
||||
# ]
|
|
@ -0,0 +1,13 @@
|
|||
#!/bin/sh
|
||||
|
||||
set -eux
|
||||
|
||||
funkwhale-manage collectstatic --noinput
|
||||
funkwhale-manage migrate
|
||||
|
||||
# shellcheck disable=SC2086
|
||||
exec gunicorn config.asgi:application \
|
||||
--workers "${FUNKWHALE_WEB_WORKERS-1}" \
|
||||
--worker-class uvicorn.workers.UvicornWorker \
|
||||
--bind 0.0.0.0:5000 \
|
||||
${GUNICORN_ARGS-}
|
|
@ -1,5 +1,4 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
__version__ = "1.2.10"
|
||||
__version__ = "1.3.0"
|
||||
__version_info__ = tuple(
|
||||
[
|
||||
int(num) if num.isdigit() else num
|
||||
|
|
|
@ -7,7 +7,7 @@ class ActivityConfig(AppConfig):
|
|||
name = "funkwhale_api.activity"
|
||||
|
||||
def ready(self):
|
||||
super(ActivityConfig, self).ready()
|
||||
super().ready()
|
||||
|
||||
app_names = [app.name for app in apps.app_configs.values()]
|
||||
record.registry.autodiscover(app_names)
|
||||
|
|
|
@ -17,7 +17,7 @@ def combined_recent(limit, **kwargs):
|
|||
_qs_list = list(querysets.values())
|
||||
union_qs = _qs_list[0].union(*_qs_list[1:])
|
||||
records = []
|
||||
for row in union_qs.order_by("-{}".format(datetime_field))[:limit]:
|
||||
for row in union_qs.order_by(f"-{datetime_field}")[:limit]:
|
||||
records.append(
|
||||
{"type": row["__type"], "when": row[datetime_field], "pk": row["pk"]}
|
||||
)
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
from drf_spectacular.utils import extend_schema
|
||||
from rest_framework import viewsets
|
||||
from rest_framework.response import Response
|
||||
|
||||
|
@ -13,6 +14,7 @@ class ActivityViewSet(viewsets.GenericViewSet):
|
|||
permission_classes = [ConditionalAuthentication]
|
||||
queryset = TrackFavorite.objects.none()
|
||||
|
||||
@extend_schema(operation_id="get_activity")
|
||||
def list(self, request, *args, **kwargs):
|
||||
activity = utils.get_activity(user=request.user)
|
||||
serializer = self.serializer_class(activity, many=True)
|
||||
|
|
|
@ -2,7 +2,7 @@ import uuid
|
|||
|
||||
import factory
|
||||
|
||||
from funkwhale_api.factories import registry, NoUpdateOnCreate
|
||||
from funkwhale_api.factories import NoUpdateOnCreate, registry
|
||||
from funkwhale_api.federation import actors
|
||||
from funkwhale_api.federation import factories as federation_factories
|
||||
from funkwhale_api.music import factories as music_factories
|
||||
|
@ -15,7 +15,7 @@ def set_actor(o):
|
|||
|
||||
|
||||
def get_rss_channel_name():
|
||||
return "rssfeed-{}".format(uuid.uuid4())
|
||||
return f"rssfeed-{uuid.uuid4()}"
|
||||
|
||||
|
||||
@registry.register
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
from django.db.models import Q
|
||||
|
||||
import django_filters
|
||||
from django.db.models import Q
|
||||
|
||||
from funkwhale_api.common import fields
|
||||
from funkwhale_api.common import filters as common_filters
|
||||
|
@ -54,7 +53,7 @@ class ChannelFilter(moderation_filters.HiddenContentFilterSet):
|
|||
|
||||
query = Q(actor__in=emitted_follows.values_list("target", flat=True))
|
||||
|
||||
if value is True:
|
||||
if value:
|
||||
return queryset.filter(query)
|
||||
else:
|
||||
return queryset.exclude(query)
|
||||
|
@ -64,9 +63,9 @@ class ChannelFilter(moderation_filters.HiddenContentFilterSet):
|
|||
attributed_to=actors.get_service_actor(),
|
||||
actor__preferred_username__startswith="rssfeed-",
|
||||
)
|
||||
if value is True:
|
||||
if value:
|
||||
queryset = queryset.filter(query)
|
||||
if value is False:
|
||||
else:
|
||||
queryset = queryset.exclude(query)
|
||||
|
||||
return queryset
|
||||
|
|
|
@ -0,0 +1,20 @@
|
|||
# Generated by Django 3.2.13 on 2022-06-27 19:15
|
||||
|
||||
import django.core.serializers.json
|
||||
from django.db import migrations, models
|
||||
import funkwhale_api.audio.models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('audio', '0003_channel_rss_url'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='channel',
|
||||
name='metadata',
|
||||
field=models.JSONField(blank=True, default=funkwhale_api.audio.models.empty_dict, encoder=django.core.serializers.json.DjangoJSONEncoder, max_length=50000),
|
||||
),
|
||||
]
|
|
@ -1,14 +1,13 @@
|
|||
import uuid
|
||||
|
||||
|
||||
from django.contrib.contenttypes.fields import GenericRelation
|
||||
from django.contrib.postgres.fields import JSONField
|
||||
from django.core.serializers.json import DjangoJSONEncoder
|
||||
from django.db import models
|
||||
from django.urls import reverse
|
||||
from django.utils import timezone
|
||||
from django.db.models import JSONField
|
||||
from django.db.models.signals import post_delete
|
||||
from django.dispatch import receiver
|
||||
from django.urls import reverse
|
||||
from django.utils import timezone
|
||||
|
||||
from funkwhale_api.federation import keys
|
||||
from funkwhale_api.federation import models as federation_models
|
||||
|
@ -81,7 +80,7 @@ class Channel(models.Model):
|
|||
return self.actor.fid
|
||||
|
||||
@property
|
||||
def is_local(self):
|
||||
def is_local(self) -> bool:
|
||||
return self.actor.is_local
|
||||
|
||||
@property
|
||||
|
@ -94,7 +93,7 @@ class Channel(models.Model):
|
|||
suffix = self.actor.preferred_username
|
||||
else:
|
||||
suffix = self.actor.full_username
|
||||
return federation_utils.full_url("/channels/{}".format(suffix))
|
||||
return federation_utils.full_url(f"/channels/{suffix}")
|
||||
|
||||
def get_rss_url(self):
|
||||
if not self.artist.is_local or self.is_external_rss:
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
import xml.etree.ElementTree as ET
|
||||
|
||||
from rest_framework import negotiation
|
||||
from rest_framework import renderers
|
||||
from rest_framework import negotiation, renderers
|
||||
|
||||
from funkwhale_api.subsonic.renderers import dict_to_xml_tree
|
||||
|
||||
|
|
|
@ -3,39 +3,35 @@ import logging
|
|||
import time
|
||||
import uuid
|
||||
|
||||
import feedparser
|
||||
import pytz
|
||||
import requests
|
||||
from django.conf import settings
|
||||
from django.db import transaction
|
||||
from django.db.models import Q
|
||||
from django.utils import timezone
|
||||
|
||||
import feedparser
|
||||
import requests
|
||||
import pytz
|
||||
|
||||
from rest_framework import serializers
|
||||
|
||||
from django.templatetags.static import static
|
||||
from django.urls import reverse
|
||||
from django.utils import timezone
|
||||
from drf_spectacular.types import OpenApiTypes
|
||||
from drf_spectacular.utils import extend_schema_field
|
||||
from rest_framework import serializers
|
||||
|
||||
from funkwhale_api.common import locales, preferences
|
||||
from funkwhale_api.common import serializers as common_serializers
|
||||
from funkwhale_api.common import utils as common_utils
|
||||
from funkwhale_api.common import locales
|
||||
from funkwhale_api.common import preferences
|
||||
from funkwhale_api.common import session
|
||||
from funkwhale_api.common import utils as common_utils
|
||||
from funkwhale_api.federation import actors
|
||||
from funkwhale_api.federation import models as federation_models
|
||||
from funkwhale_api.federation import serializers as federation_serializers
|
||||
from funkwhale_api.federation import utils as federation_utils
|
||||
from funkwhale_api.moderation import mrf
|
||||
from funkwhale_api.music import models as music_models
|
||||
from funkwhale_api.music import serializers as music_serializers
|
||||
from funkwhale_api.music.serializers import COVER_WRITE_FIELD, CoverField
|
||||
from funkwhale_api.tags import models as tags_models
|
||||
from funkwhale_api.tags import serializers as tags_serializers
|
||||
from funkwhale_api.users import serializers as users_serializers
|
||||
|
||||
from . import categories
|
||||
from . import models
|
||||
|
||||
from . import categories, models
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
@ -66,16 +62,16 @@ class ChannelMetadataSerializer(serializers.Serializer):
|
|||
|
||||
if child not in categories.ITUNES_CATEGORIES[parent]:
|
||||
raise serializers.ValidationError(
|
||||
'"{}" is not a valid subcategory for "{}"'.format(child, parent)
|
||||
f'"{child}" is not a valid subcategory for "{parent}"'
|
||||
)
|
||||
|
||||
return child
|
||||
|
||||
|
||||
class ChannelCreateSerializer(serializers.Serializer):
|
||||
name = serializers.CharField(max_length=music_models.MAX_LENGTHS["ARTIST_NAME"])
|
||||
name = serializers.CharField(max_length=federation_models.MAX_LENGTHS["ACTOR_NAME"])
|
||||
username = serializers.CharField(
|
||||
max_length=music_models.MAX_LENGTHS["ARTIST_NAME"],
|
||||
max_length=federation_models.MAX_LENGTHS["ACTOR_NAME"],
|
||||
validators=[users_serializers.ASCIIUsernameValidator()],
|
||||
)
|
||||
description = common_serializers.ContentSerializer(allow_null=True)
|
||||
|
@ -84,7 +80,7 @@ class ChannelCreateSerializer(serializers.Serializer):
|
|||
choices=music_models.ARTIST_CONTENT_CATEGORY_CHOICES
|
||||
)
|
||||
metadata = serializers.DictField(required=False)
|
||||
cover = music_serializers.COVER_WRITE_FIELD
|
||||
cover = COVER_WRITE_FIELD
|
||||
|
||||
def validate(self, validated_data):
|
||||
existing_channels = self.context["actor"].owned_channels.count()
|
||||
|
@ -135,7 +131,8 @@ class ChannelCreateSerializer(serializers.Serializer):
|
|||
metadata=validated_data["metadata"],
|
||||
)
|
||||
channel.actor = models.generate_actor(
|
||||
validated_data["username"], name=validated_data["name"],
|
||||
validated_data["username"],
|
||||
name=validated_data["name"],
|
||||
)
|
||||
|
||||
channel.library = music_models.Library.objects.create(
|
||||
|
@ -155,14 +152,14 @@ NOOP = object()
|
|||
|
||||
|
||||
class ChannelUpdateSerializer(serializers.Serializer):
|
||||
name = serializers.CharField(max_length=music_models.MAX_LENGTHS["ARTIST_NAME"])
|
||||
name = serializers.CharField(max_length=federation_models.MAX_LENGTHS["ACTOR_NAME"])
|
||||
description = common_serializers.ContentSerializer(allow_null=True)
|
||||
tags = tags_serializers.TagsListField()
|
||||
content_category = serializers.ChoiceField(
|
||||
choices=music_models.ARTIST_CONTENT_CATEGORY_CHOICES
|
||||
)
|
||||
metadata = serializers.DictField(required=False)
|
||||
cover = music_serializers.COVER_WRITE_FIELD
|
||||
cover = COVER_WRITE_FIELD
|
||||
|
||||
def validate(self, validated_data):
|
||||
validated_data = super().validate(validated_data)
|
||||
|
@ -232,8 +229,26 @@ class ChannelUpdateSerializer(serializers.Serializer):
|
|||
return ChannelSerializer(obj, context=self.context).data
|
||||
|
||||
|
||||
class SimpleChannelArtistSerializer(serializers.Serializer):
|
||||
id = serializers.IntegerField()
|
||||
fid = serializers.URLField()
|
||||
mbid = serializers.CharField()
|
||||
name = serializers.CharField()
|
||||
creation_date = serializers.DateTimeField()
|
||||
modification_date = serializers.DateTimeField()
|
||||
is_local = serializers.BooleanField()
|
||||
content_category = serializers.CharField()
|
||||
description = common_serializers.ContentSerializer(allow_null=True, required=False)
|
||||
cover = CoverField(allow_null=True, required=False)
|
||||
channel = serializers.UUIDField(allow_null=True, required=False)
|
||||
tracks_count = serializers.IntegerField(source="_tracks_count", required=False)
|
||||
tags = serializers.ListField(
|
||||
child=serializers.CharField(), source="_prefetched_tagged_items", required=False
|
||||
)
|
||||
|
||||
|
||||
class ChannelSerializer(serializers.ModelSerializer):
|
||||
artist = serializers.SerializerMethodField()
|
||||
artist = SimpleChannelArtistSerializer()
|
||||
actor = serializers.SerializerMethodField()
|
||||
downloads_count = serializers.SerializerMethodField()
|
||||
attributed_to = federation_serializers.APIActorSerializer()
|
||||
|
@ -254,30 +269,42 @@ class ChannelSerializer(serializers.ModelSerializer):
|
|||
"downloads_count",
|
||||
]
|
||||
|
||||
def get_artist(self, obj):
|
||||
return music_serializers.serialize_artist_simple(obj.artist)
|
||||
|
||||
def to_representation(self, obj):
|
||||
data = super().to_representation(obj)
|
||||
if self.context.get("subscriptions_count"):
|
||||
data["subscriptions_count"] = self.get_subscriptions_count(obj)
|
||||
return data
|
||||
|
||||
def get_subscriptions_count(self, obj):
|
||||
def get_subscriptions_count(self, obj) -> int:
|
||||
return obj.actor.received_follows.exclude(approved=False).count()
|
||||
|
||||
def get_downloads_count(self, obj):
|
||||
def get_downloads_count(self, obj) -> int:
|
||||
return getattr(obj, "_downloads_count", None) or 0
|
||||
|
||||
@extend_schema_field(federation_serializers.APIActorSerializer)
|
||||
def get_actor(self, obj):
|
||||
if obj.attributed_to == actors.get_service_actor():
|
||||
return None
|
||||
return federation_serializers.APIActorSerializer(obj.actor).data
|
||||
|
||||
@extend_schema_field(OpenApiTypes.URI)
|
||||
def get_url(self, obj):
|
||||
return obj.actor.url
|
||||
|
||||
|
||||
class InlineSubscriptionSerializer(serializers.Serializer):
|
||||
uuid = serializers.UUIDField()
|
||||
channel = serializers.UUIDField(source="target__channel__uuid")
|
||||
|
||||
|
||||
class AllSubscriptionsSerializer(serializers.Serializer):
|
||||
results = InlineSubscriptionSerializer(source="*", many=True)
|
||||
count = serializers.SerializerMethodField()
|
||||
|
||||
def get_count(self, o) -> int:
|
||||
return len(o)
|
||||
|
||||
|
||||
class SubscriptionSerializer(serializers.Serializer):
|
||||
approved = serializers.BooleanField(read_only=True)
|
||||
fid = serializers.URLField(read_only=True)
|
||||
|
@ -310,7 +337,7 @@ def retrieve_feed(url):
|
|||
except requests.exceptions.HTTPError as e:
|
||||
if e.response:
|
||||
raise FeedFetchException(
|
||||
"Error while fetching feed: HTTP {}".format(e.response.status_code)
|
||||
f"Error while fetching feed: HTTP {e.response.status_code}"
|
||||
)
|
||||
raise FeedFetchException("Error while fetching feed: unknown error")
|
||||
except requests.exceptions.Timeout:
|
||||
|
@ -318,9 +345,9 @@ def retrieve_feed(url):
|
|||
except requests.exceptions.ConnectionError:
|
||||
raise FeedFetchException("Error while fetching feed: connection error")
|
||||
except requests.RequestException as e:
|
||||
raise FeedFetchException("Error while fetching feed: {}".format(e))
|
||||
raise FeedFetchException(f"Error while fetching feed: {e}")
|
||||
except Exception as e:
|
||||
raise FeedFetchException("Error while fetching feed: {}".format(e))
|
||||
raise FeedFetchException(f"Error while fetching feed: {e}")
|
||||
|
||||
return response
|
||||
|
||||
|
@ -339,7 +366,7 @@ def get_channel_from_rss_url(url, raise_exception=False):
|
|||
parsed_feed = feedparser.parse(response.text)
|
||||
serializer = RssFeedSerializer(data=parsed_feed["feed"])
|
||||
if not serializer.is_valid(raise_exception=raise_exception):
|
||||
raise FeedFetchException("Invalid xml content: {}".format(serializer.errors))
|
||||
raise FeedFetchException(f"Invalid xml content: {serializer.errors}")
|
||||
|
||||
# second mrf check with validated data
|
||||
urls_to_check = set()
|
||||
|
@ -369,9 +396,7 @@ def get_channel_from_rss_url(url, raise_exception=False):
|
|||
)
|
||||
)
|
||||
if parsed_feed.feed.get("rights"):
|
||||
track_defaults["copyright"] = parsed_feed.feed.rights[
|
||||
: music_models.MAX_LENGTHS["COPYRIGHT"]
|
||||
]
|
||||
track_defaults["copyright"] = parsed_feed.feed.rights
|
||||
for entry in entries[: settings.PODCASTS_RSS_FEED_MAX_ITEMS]:
|
||||
logger.debug("Importing feed item %s", entry.id)
|
||||
s = RssFeedItemSerializer(data=entry)
|
||||
|
@ -509,7 +534,7 @@ class RssFeedSerializer(serializers.Serializer):
|
|||
else:
|
||||
artist_kwargs = {"pk": None}
|
||||
actor_kwargs = {"pk": None}
|
||||
preferred_username = "rssfeed-{}".format(uuid.uuid4())
|
||||
preferred_username = f"rssfeed-{uuid.uuid4()}"
|
||||
actor_defaults = {
|
||||
"preferred_username": preferred_username,
|
||||
"type": "Application",
|
||||
|
@ -531,9 +556,7 @@ class RssFeedSerializer(serializers.Serializer):
|
|||
**artist_kwargs,
|
||||
defaults={
|
||||
"attributed_to": service_actor,
|
||||
"name": validated_data["title"][
|
||||
: music_models.MAX_LENGTHS["ARTIST_NAME"]
|
||||
],
|
||||
"name": validated_data["title"],
|
||||
"content_category": "podcast",
|
||||
},
|
||||
)
|
||||
|
@ -571,7 +594,8 @@ class RssFeedSerializer(serializers.Serializer):
|
|||
|
||||
# create/update the channel
|
||||
channel, created = models.Channel.objects.update_or_create(
|
||||
pk=existing.pk if existing else None, defaults=channel_defaults,
|
||||
pk=existing.pk if existing else None,
|
||||
defaults=channel_defaults,
|
||||
)
|
||||
return channel
|
||||
|
||||
|
@ -588,7 +612,7 @@ class ItunesDurationField(serializers.CharField):
|
|||
try:
|
||||
int_parts.append(int(part))
|
||||
except (ValueError, TypeError):
|
||||
raise serializers.ValidationError("Invalid duration {}".format(v))
|
||||
raise serializers.ValidationError(f"Invalid duration {v}")
|
||||
|
||||
if len(int_parts) == 2:
|
||||
hours = 0
|
||||
|
@ -596,7 +620,7 @@ class ItunesDurationField(serializers.CharField):
|
|||
elif len(int_parts) == 3:
|
||||
hours, minutes, seconds = int_parts
|
||||
else:
|
||||
raise serializers.ValidationError("Invalid duration {}".format(v))
|
||||
raise serializers.ValidationError(f"Invalid duration {v}")
|
||||
|
||||
return (hours * 3600) + (minutes * 60) + seconds
|
||||
|
||||
|
@ -735,16 +759,12 @@ class RssFeedItemSerializer(serializers.Serializer):
|
|||
{
|
||||
"disc_number": validated_data.get("itunes_season", 1) or 1,
|
||||
"position": validated_data.get("itunes_episode", 1) or 1,
|
||||
"title": validated_data["title"][
|
||||
: music_models.MAX_LENGTHS["TRACK_TITLE"]
|
||||
],
|
||||
"title": validated_data["title"],
|
||||
"artist": channel.artist,
|
||||
}
|
||||
)
|
||||
if "rights" in validated_data:
|
||||
track_defaults["copyright"] = validated_data["rights"][
|
||||
: music_models.MAX_LENGTHS["COPYRIGHT"]
|
||||
]
|
||||
track_defaults["copyright"] = validated_data["rights"]
|
||||
|
||||
if "published_parsed" in validated_data:
|
||||
track_defaults["creation_date"] = datetime.datetime.fromtimestamp(
|
||||
|
@ -773,14 +793,15 @@ class RssFeedItemSerializer(serializers.Serializer):
|
|||
|
||||
# create/update the track
|
||||
track, created = music_models.Track.objects.update_or_create(
|
||||
**track_kwargs, defaults=track_defaults,
|
||||
**track_kwargs,
|
||||
defaults=track_defaults,
|
||||
)
|
||||
# optimisation for reducing SQL queries, because we cannot use select_related with
|
||||
# update or create, so we restore the cache by hand
|
||||
if existing_track:
|
||||
for field in ["attachment_cover", "description"]:
|
||||
cached_id_value = getattr(existing_track, "{}_id".format(field))
|
||||
new_id_value = getattr(track, "{}_id".format(field))
|
||||
cached_id_value = getattr(existing_track, f"{field}_id")
|
||||
new_id_value = getattr(track, f"{field}_id")
|
||||
if new_id_value and cached_id_value == new_id_value:
|
||||
setattr(track, field, getattr(existing_track, field))
|
||||
|
||||
|
|
|
@ -3,12 +3,9 @@ import urllib.parse
|
|||
from django.conf import settings
|
||||
from django.db.models import Q
|
||||
from django.urls import reverse
|
||||
|
||||
from rest_framework import serializers
|
||||
|
||||
from funkwhale_api.common import preferences
|
||||
from funkwhale_api.common import middleware
|
||||
from funkwhale_api.common import utils
|
||||
from funkwhale_api.common import middleware, preferences, utils
|
||||
from funkwhale_api.federation import utils as federation_utils
|
||||
from funkwhale_api.music import spa_views
|
||||
|
||||
|
@ -64,7 +61,7 @@ def channel_detail(query, redirect_to_ap):
|
|||
"rel": "alternate",
|
||||
"type": "application/rss+xml",
|
||||
"href": obj.get_rss_url(),
|
||||
"title": "{} - RSS Podcast Feed".format(obj.artist.name),
|
||||
"title": f"{obj.artist.name} - RSS Podcast Feed",
|
||||
},
|
||||
)
|
||||
|
||||
|
@ -76,7 +73,7 @@ def channel_detail(query, redirect_to_ap):
|
|||
"type": "application/json+oembed",
|
||||
"href": (
|
||||
utils.join_url(settings.FUNKWHALE_URL, reverse("api:v1:oembed"))
|
||||
+ "?format=json&url={}".format(urllib.parse.quote_plus(obj_url))
|
||||
+ f"?format=json&url={urllib.parse.quote_plus(obj_url)}"
|
||||
),
|
||||
}
|
||||
)
|
||||
|
|
|
@ -7,8 +7,7 @@ from django.utils import timezone
|
|||
|
||||
from funkwhale_api.taskapp import celery
|
||||
|
||||
from . import models
|
||||
from . import serializers
|
||||
from . import models, serializers
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
|
@ -1,18 +1,13 @@
|
|||
from rest_framework import decorators
|
||||
from rest_framework import exceptions
|
||||
from rest_framework import mixins
|
||||
from rest_framework import permissions as rest_permissions
|
||||
from rest_framework import response
|
||||
from rest_framework import viewsets
|
||||
|
||||
from django import http
|
||||
from django.db import transaction
|
||||
from django.db.models import Count, Prefetch, Q, Sum
|
||||
from django.utils import timezone
|
||||
from drf_spectacular.utils import extend_schema, extend_schema_view
|
||||
from rest_framework import decorators, exceptions, mixins
|
||||
from rest_framework import permissions as rest_permissions
|
||||
from rest_framework import response, viewsets
|
||||
|
||||
from funkwhale_api.common import locales
|
||||
from funkwhale_api.common import permissions
|
||||
from funkwhale_api.common import preferences
|
||||
from funkwhale_api.common import locales, permissions, preferences
|
||||
from funkwhale_api.common import utils as common_utils
|
||||
from funkwhale_api.common.mixins import MultipleLookupDetailMixin
|
||||
from funkwhale_api.federation import actors
|
||||
|
@ -27,19 +22,28 @@ from funkwhale_api.users.oauth import permissions as oauth_permissions
|
|||
from . import categories, filters, models, renderers, serializers
|
||||
|
||||
ARTIST_PREFETCH_QS = (
|
||||
music_models.Artist.objects.select_related("description", "attachment_cover",)
|
||||
music_models.Artist.objects.select_related(
|
||||
"description",
|
||||
"attachment_cover",
|
||||
)
|
||||
.prefetch_related(music_views.TAG_PREFETCH)
|
||||
.annotate(_tracks_count=Count("tracks"))
|
||||
)
|
||||
|
||||
|
||||
class ChannelsMixin(object):
|
||||
class ChannelsMixin:
|
||||
def dispatch(self, request, *args, **kwargs):
|
||||
if not preferences.get("audio__channels_enabled"):
|
||||
return http.HttpResponse(status=405)
|
||||
return super().dispatch(request, *args, **kwargs)
|
||||
|
||||
|
||||
@extend_schema_view(
|
||||
metedata_choices=extend_schema(operation_id="get_channel_metadata_choices"),
|
||||
subscribe=extend_schema(operation_id="subscribe_channel"),
|
||||
unsubscribe=extend_schema(operation_id="unsubscribe_channel"),
|
||||
rss_subscribe=extend_schema(operation_id="subscribe_channel_rss"),
|
||||
)
|
||||
class ChannelViewSet(
|
||||
ChannelsMixin,
|
||||
MultipleLookupDetailMixin,
|
||||
|
@ -91,7 +95,9 @@ class ChannelViewSet(
|
|||
return serializers.ChannelSerializer
|
||||
elif self.action in ["update", "partial_update"]:
|
||||
return serializers.ChannelUpdateSerializer
|
||||
return serializers.ChannelCreateSerializer
|
||||
elif self.action == "create":
|
||||
return serializers.ChannelCreateSerializer
|
||||
return serializers.ChannelSerializer
|
||||
|
||||
def get_queryset(self):
|
||||
queryset = super().get_queryset()
|
||||
|
@ -131,6 +137,7 @@ class ChannelViewSet(
|
|||
detail=True,
|
||||
methods=["post"],
|
||||
permission_classes=[rest_permissions.IsAuthenticated],
|
||||
serializer_class=serializers.SubscriptionSerializer,
|
||||
)
|
||||
def subscribe(self, request, *args, **kwargs):
|
||||
object = self.get_object()
|
||||
|
@ -153,6 +160,7 @@ class ChannelViewSet(
|
|||
data = serializers.SubscriptionSerializer(subscription).data
|
||||
return response.Response(data, status=201)
|
||||
|
||||
@extend_schema(responses={204: None})
|
||||
@decorators.action(
|
||||
detail=True,
|
||||
methods=["post", "delete"],
|
||||
|
@ -192,7 +200,9 @@ class ChannelViewSet(
|
|||
"track",
|
||||
queryset=music_models.Track.objects.select_related(
|
||||
"attachment_cover", "description"
|
||||
).prefetch_related(music_views.TAG_PREFETCH,),
|
||||
).prefetch_related(
|
||||
music_views.TAG_PREFETCH,
|
||||
),
|
||||
),
|
||||
)
|
||||
.select_related("track__attachment_cover", "track__description")
|
||||
|
@ -232,7 +242,9 @@ class ChannelViewSet(
|
|||
if not serializer.is_valid():
|
||||
return response.Response(serializer.errors, status=400)
|
||||
channel = (
|
||||
models.Channel.objects.filter(rss_url=serializer.validated_data["url"],)
|
||||
models.Channel.objects.filter(
|
||||
rss_url=serializer.validated_data["url"],
|
||||
)
|
||||
.order_by("id")
|
||||
.first()
|
||||
)
|
||||
|
@ -243,7 +255,10 @@ class ChannelViewSet(
|
|||
serializer.validated_data["url"]
|
||||
)
|
||||
except serializers.FeedFetchException as e:
|
||||
return response.Response({"detail": str(e)}, status=400,)
|
||||
return response.Response(
|
||||
{"detail": str(e)},
|
||||
status=400,
|
||||
)
|
||||
|
||||
subscription = federation_models.Follow(actor=request.user.actor)
|
||||
subscription.fid = subscription.get_federation_id()
|
||||
|
@ -312,6 +327,10 @@ class SubscriptionsViewSet(
|
|||
qs = super().get_queryset()
|
||||
return qs.filter(actor=self.request.user.actor)
|
||||
|
||||
@extend_schema(
|
||||
responses=serializers.AllSubscriptionsSerializer(),
|
||||
operation_id="get_all_subscriptions",
|
||||
)
|
||||
@decorators.action(methods=["get"], detail=False)
|
||||
def all(self, request, *args, **kwargs):
|
||||
"""
|
||||
|
@ -319,12 +338,7 @@ class SubscriptionsViewSet(
|
|||
to have a performant endpoint and avoid lots of queries just to display
|
||||
subscription status in the UI
|
||||
"""
|
||||
subscriptions = list(
|
||||
self.get_queryset().values_list("uuid", "target__channel__uuid")
|
||||
)
|
||||
subscriptions = self.get_queryset().values("uuid", "target__channel__uuid")
|
||||
|
||||
payload = {
|
||||
"results": [{"uuid": str(u[0]), "channel": u[1]} for u in subscriptions],
|
||||
"count": len(subscriptions),
|
||||
}
|
||||
payload = serializers.AllSubscriptionsSerializer(subscriptions).data
|
||||
return response.Response(payload, status=200)
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
import click
|
||||
import functools
|
||||
|
||||
import click
|
||||
|
||||
|
||||
@click.group()
|
||||
def cli():
|
||||
|
|
|
@ -6,7 +6,8 @@ from . import base
|
|||
|
||||
|
||||
def handler_add_tags_from_tracks(
|
||||
artists=False, albums=False,
|
||||
artists=False,
|
||||
albums=False,
|
||||
):
|
||||
result = None
|
||||
if artists:
|
||||
|
@ -19,7 +20,7 @@ def handler_add_tags_from_tracks(
|
|||
if result is None:
|
||||
click.echo(" No relevant tags found")
|
||||
else:
|
||||
click.echo(" Relevant tags added to {} objects".format(len(result)))
|
||||
click.echo(f" Relevant tags added to {len(result)} objects")
|
||||
|
||||
|
||||
@base.cli.group()
|
||||
|
|
|
@ -1,13 +1,13 @@
|
|||
import click
|
||||
import sys
|
||||
|
||||
from . import base
|
||||
import click
|
||||
from rest_framework.exceptions import ValidationError
|
||||
|
||||
from . import library # noqa
|
||||
from . import media # noqa
|
||||
from . import plugins # noqa
|
||||
from . import users # noqa
|
||||
|
||||
from rest_framework.exceptions import ValidationError
|
||||
from . import base
|
||||
|
||||
|
||||
def invoke():
|
||||
|
@ -16,7 +16,7 @@ def invoke():
|
|||
except ValidationError as e:
|
||||
click.secho("Invalid data:", fg="red")
|
||||
for field, errors in e.detail.items():
|
||||
click.secho(" {}:".format(field), fg="red")
|
||||
click.secho(f" {field}:", fg="red")
|
||||
for error in errors:
|
||||
click.secho(" - {}".format(error), fg="red")
|
||||
click.secho(f" - {error}", fg="red")
|
||||
sys.exit(1)
|
||||
|
|
|
@ -1,11 +1,9 @@
|
|||
import click
|
||||
|
||||
from django.core.cache import cache
|
||||
from django.conf import settings
|
||||
from django.core.cache import cache
|
||||
from django.core.files.storage import default_storage
|
||||
|
||||
from versatileimagefield.image_warmer import VersatileImageFieldWarmer
|
||||
from versatileimagefield import settings as vif_settings
|
||||
from versatileimagefield.image_warmer import VersatileImageFieldWarmer
|
||||
|
||||
from funkwhale_api.common import utils as common_utils
|
||||
from funkwhale_api.common.models import Attachment
|
||||
|
@ -41,19 +39,15 @@ def generate_thumbnails(delete):
|
|||
(Attachment, "file", "attachment_square"),
|
||||
]
|
||||
for model, attribute, key_set in MODELS:
|
||||
click.echo(
|
||||
"Generating thumbnails for {}.{}…".format(model._meta.label, attribute)
|
||||
)
|
||||
qs = model.objects.exclude(**{"{}__isnull".format(attribute): True})
|
||||
click.echo(f"Generating thumbnails for {model._meta.label}.{attribute}…")
|
||||
qs = model.objects.exclude(**{f"{attribute}__isnull": True})
|
||||
qs = qs.exclude(**{attribute: ""})
|
||||
cache_key = "*{}{}*".format(
|
||||
settings.MEDIA_URL, vif_settings.VERSATILEIMAGEFIELD_SIZED_DIRNAME
|
||||
)
|
||||
entries = cache.keys(cache_key)
|
||||
if entries:
|
||||
click.echo(
|
||||
" Clearing {} cache entries: {}…".format(len(entries), cache_key)
|
||||
)
|
||||
click.echo(f" Clearing {len(entries)} cache entries: {cache_key}…")
|
||||
for keys in common_utils.batch(iter(entries)):
|
||||
cache.delete_many(keys)
|
||||
warmer = VersatileImageFieldWarmer(
|
||||
|
@ -64,6 +58,4 @@ def generate_thumbnails(delete):
|
|||
)
|
||||
click.echo(" Creating images")
|
||||
num_created, failed_to_create = warmer.warm()
|
||||
click.echo(
|
||||
" {} created, {} in error".format(num_created, len(failed_to_create))
|
||||
)
|
||||
click.echo(f" {num_created} created, {len(failed_to_create)} in error")
|
||||
|
|
|
@ -5,7 +5,6 @@ import sys
|
|||
import click
|
||||
from django.conf import settings
|
||||
|
||||
|
||||
from . import base
|
||||
|
||||
|
||||
|
|
|
@ -1,17 +1,13 @@
|
|||
import click
|
||||
|
||||
from django.db import transaction
|
||||
|
||||
from funkwhale_api.federation import models as federation_models
|
||||
from funkwhale_api.users import models
|
||||
from funkwhale_api.users import serializers
|
||||
from funkwhale_api.users import tasks
|
||||
from funkwhale_api.users import models, serializers, tasks
|
||||
|
||||
from . import base
|
||||
from . import utils
|
||||
from . import base, utils
|
||||
|
||||
|
||||
class FakeRequest(object):
|
||||
class FakeRequest:
|
||||
def __init__(self, session={}):
|
||||
self.session = session
|
||||
|
||||
|
@ -48,7 +44,7 @@ def handler_create_user(
|
|||
for permission in permissions:
|
||||
if permission in models.PERMISSIONS:
|
||||
utils.logger.debug("Setting %s permission to True", permission)
|
||||
setattr(user, "permission_{}".format(permission), True)
|
||||
setattr(user, f"permission_{permission}", True)
|
||||
else:
|
||||
utils.logger.warn("Unknown permission %s", permission)
|
||||
utils.logger.debug("Creating actor…")
|
||||
|
@ -60,7 +56,7 @@ def handler_create_user(
|
|||
@transaction.atomic
|
||||
def handler_delete_user(usernames, soft=True):
|
||||
for username in usernames:
|
||||
click.echo("Deleting {}…".format(username))
|
||||
click.echo(f"Deleting {username}…")
|
||||
actor = None
|
||||
user = None
|
||||
try:
|
||||
|
@ -157,13 +153,16 @@ def users():
|
|||
type=click.INT,
|
||||
)
|
||||
@click.option(
|
||||
"--superuser/--no-superuser", default=False,
|
||||
"--superuser/--no-superuser",
|
||||
default=False,
|
||||
)
|
||||
@click.option(
|
||||
"--staff/--no-staff", default=False,
|
||||
"--staff/--no-staff",
|
||||
default=False,
|
||||
)
|
||||
@click.option(
|
||||
"--permission", multiple=True,
|
||||
"--permission",
|
||||
multiple=True,
|
||||
)
|
||||
def create(username, password, email, superuser, staff, permission, upload_quota):
|
||||
"""Create a new user"""
|
||||
|
@ -179,9 +178,9 @@ def create(username, password, email, superuser, staff, permission, upload_quota
|
|||
permissions=permission,
|
||||
upload_quota=upload_quota,
|
||||
)
|
||||
click.echo("User {} created!".format(user.username))
|
||||
click.echo(f"User {user.username} created!")
|
||||
if generated_password:
|
||||
click.echo(" Generated password: {}".format(generated_password))
|
||||
click.echo(f" Generated password: {generated_password}")
|
||||
|
||||
|
||||
@base.delete_command(group=users, id_var="username")
|
||||
|
@ -210,7 +209,9 @@ def delete(username, hard):
|
|||
@click.option("--permission-settings/--no-permission-settings", default=None)
|
||||
@click.option("--password", default=None, envvar="FUNKWHALE_CLI_USER_UPDATE_PASSWORD")
|
||||
@click.option(
|
||||
"-q", "--upload-quota", type=click.INT,
|
||||
"-q",
|
||||
"--upload-quota",
|
||||
type=click.INT,
|
||||
)
|
||||
def update(username, **kwargs):
|
||||
"""Update attributes for given users"""
|
||||
|
|
|
@ -1,8 +1,9 @@
|
|||
from django.contrib.admin import register as initial_register, site, ModelAdmin # noqa
|
||||
from django.contrib.admin import site # noqa: F401
|
||||
from django.contrib.admin import ModelAdmin
|
||||
from django.contrib.admin import register as initial_register
|
||||
from django.db.models.fields.related import RelatedField
|
||||
|
||||
from . import models
|
||||
from . import tasks
|
||||
from . import models, tasks
|
||||
|
||||
|
||||
def register(model):
|
||||
|
|
|
@ -3,8 +3,7 @@ from django.conf import settings
|
|||
|
||||
from config import plugins
|
||||
|
||||
from . import mutations
|
||||
from . import utils
|
||||
from . import mutations, utils
|
||||
|
||||
|
||||
class CommonConfig(AppConfig):
|
||||
|
|
|
@ -1,9 +1,6 @@
|
|||
from django.conf import settings
|
||||
from django.utils.translation import ugettext as _
|
||||
|
||||
from django.core.cache import cache
|
||||
|
||||
from allauth.account.utils import send_email_confirmation
|
||||
from django.core.cache import cache
|
||||
from django.utils.translation import ugettext as _
|
||||
from oauth2_provider.contrib.rest_framework.authentication import (
|
||||
OAuth2Authentication as BaseOAuth2Authentication,
|
||||
)
|
||||
|
@ -12,14 +9,6 @@ from rest_framework import exceptions
|
|||
from funkwhale_api.users import models as users_models
|
||||
|
||||
|
||||
def should_verify_email(user):
|
||||
if user.is_superuser:
|
||||
return False
|
||||
has_unverified_email = not user.has_verified_primary_email
|
||||
mandatory_verification = settings.ACCOUNT_EMAIL_VERIFICATION != "optional"
|
||||
return has_unverified_email and mandatory_verification
|
||||
|
||||
|
||||
class UnverifiedEmail(Exception):
|
||||
def __init__(self, user):
|
||||
self.user = user
|
||||
|
@ -27,7 +16,7 @@ class UnverifiedEmail(Exception):
|
|||
|
||||
def resend_confirmation_email(request, user):
|
||||
THROTTLE_DELAY = 500
|
||||
cache_key = "auth:resent-email-confirmation:{}".format(user.pk)
|
||||
cache_key = f"auth:resent-email-confirmation:{user.pk}"
|
||||
if cache.get(cache_key):
|
||||
return False
|
||||
|
||||
|
@ -45,7 +34,7 @@ class OAuth2Authentication(BaseOAuth2Authentication):
|
|||
resend_confirmation_email(request, e.user)
|
||||
|
||||
|
||||
class ApplicationTokenAuthentication(object):
|
||||
class ApplicationTokenAuthentication:
|
||||
def authenticate(self, request):
|
||||
try:
|
||||
header = request.headers["Authorization"]
|
||||
|
@ -68,7 +57,7 @@ class ApplicationTokenAuthentication(object):
|
|||
msg = _("User account is disabled.")
|
||||
raise exceptions.AuthenticationFailed(msg)
|
||||
|
||||
if should_verify_email(user):
|
||||
if user.should_verify_email():
|
||||
raise UnverifiedEmail(user)
|
||||
|
||||
request.scopes = application.scope.split()
|
||||
|
|
|
@ -1,17 +1,10 @@
|
|||
from django.db import transaction
|
||||
from drf_spectacular.utils import OpenApiParameter, extend_schema
|
||||
from rest_framework import decorators, exceptions, response, status
|
||||
|
||||
from rest_framework import decorators
|
||||
from rest_framework import exceptions
|
||||
from rest_framework import response
|
||||
from rest_framework import status
|
||||
|
||||
from . import filters
|
||||
from . import models
|
||||
from . import filters, models
|
||||
from . import mutations as common_mutations
|
||||
from . import serializers
|
||||
from . import signals
|
||||
from . import tasks
|
||||
from . import utils
|
||||
from . import serializers, signals, tasks, utils
|
||||
|
||||
|
||||
def action_route(serializer_class):
|
||||
|
@ -87,6 +80,16 @@ def mutations_route(types):
|
|||
)
|
||||
return response.Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
|
||||
return decorators.action(
|
||||
methods=["get", "post"], detail=True, required_scope="edits"
|
||||
)(mutations)
|
||||
return extend_schema(
|
||||
methods=["post"], responses=serializers.APIMutationSerializer()
|
||||
)(
|
||||
extend_schema(
|
||||
methods=["get"],
|
||||
responses=serializers.APIMutationSerializer(many=True),
|
||||
parameters=[OpenApiParameter("id", location="query", exclude=True)],
|
||||
)(
|
||||
decorators.action(
|
||||
methods=["get", "post"], detail=True, required_scope="edits"
|
||||
)(mutations)
|
||||
)
|
||||
)
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
import factory
|
||||
|
||||
from funkwhale_api.factories import registry, NoUpdateOnCreate
|
||||
|
||||
from funkwhale_api.factories import NoUpdateOnCreate, registry
|
||||
from funkwhale_api.federation import factories as federation_factories
|
||||
|
||||
|
||||
|
|
|
@ -2,7 +2,6 @@ import django_filters
|
|||
from django import forms
|
||||
from django.core.serializers.json import DjangoJSONEncoder
|
||||
from django.db import models
|
||||
|
||||
from rest_framework import serializers
|
||||
|
||||
from . import search
|
||||
|
@ -25,9 +24,9 @@ def privacy_level_query(user, lookup_field="privacy_level", user_field="user"):
|
|||
if user.is_anonymous:
|
||||
return models.Q(**{lookup_field: "everyone"})
|
||||
|
||||
return models.Q(
|
||||
**{"{}__in".format(lookup_field): ["instance", "everyone"]}
|
||||
) | models.Q(**{lookup_field: "me", user_field: user})
|
||||
return models.Q(**{f"{lookup_field}__in": ["instance", "everyone"]}) | models.Q(
|
||||
**{lookup_field: "me", user_field: user}
|
||||
)
|
||||
|
||||
|
||||
class SearchFilter(django_filters.CharFilter):
|
||||
|
@ -98,7 +97,7 @@ def get_generic_filter_query(value, relation_name, choices):
|
|||
obj = related_queryset.get(obj_query)
|
||||
except related_queryset.model.DoesNotExist:
|
||||
raise forms.ValidationError("Invalid object")
|
||||
filter_query &= models.Q(**{"{}_id".format(relation_name): obj.id})
|
||||
filter_query &= models.Q(**{f"{relation_name}_id": obj.id})
|
||||
|
||||
return filter_query
|
||||
|
||||
|
@ -164,7 +163,7 @@ class GenericRelation(serializers.JSONField):
|
|||
id_value = v[id_attr]
|
||||
id_value = id_field.to_internal_value(id_value)
|
||||
except (TypeError, KeyError, serializers.ValidationError):
|
||||
raise serializers.ValidationError("Invalid {}".format(id_attr))
|
||||
raise serializers.ValidationError(f"Invalid {id_attr}")
|
||||
|
||||
query_getter = conf.get(
|
||||
"get_query", lambda attr, value: models.Q(**{attr: value})
|
||||
|
|
|
@ -1,16 +1,13 @@
|
|||
from django import forms
|
||||
from django.db.models import Q
|
||||
|
||||
from django_filters import widgets
|
||||
from django_filters import rest_framework as filters
|
||||
from django_filters import widgets
|
||||
from drf_spectacular.utils import extend_schema_field
|
||||
|
||||
from . import fields
|
||||
from . import models
|
||||
from . import search
|
||||
from . import utils
|
||||
from . import fields, models, search, utils
|
||||
|
||||
|
||||
class NoneObject(object):
|
||||
class NoneObject:
|
||||
def __eq__(self, other):
|
||||
return other.__class__ == NoneObject
|
||||
|
||||
|
@ -49,9 +46,10 @@ class CoerceChoiceField(forms.ChoiceField):
|
|||
try:
|
||||
return [b for a, b in self.choices if v == a][0]
|
||||
except IndexError:
|
||||
raise forms.ValidationError("Invalid value {}".format(value))
|
||||
raise forms.ValidationError(f"Invalid value {value}")
|
||||
|
||||
|
||||
@extend_schema_field(bool)
|
||||
class NullBooleanFilter(filters.ChoiceFilter):
|
||||
field_class = CoerceChoiceField
|
||||
|
||||
|
@ -65,9 +63,7 @@ class NullBooleanFilter(filters.ChoiceFilter):
|
|||
return qs
|
||||
if value == NONE:
|
||||
value = None
|
||||
qs = self.get_method(qs)(
|
||||
**{"%s__%s" % (self.field_name, self.lookup_expr): value}
|
||||
)
|
||||
qs = self.get_method(qs)(**{f"{self.field_name}__{self.lookup_expr}": value})
|
||||
return qs.distinct() if self.distinct else qs
|
||||
|
||||
|
||||
|
@ -219,7 +215,7 @@ class ActorScopeFilter(filters.CharFilter):
|
|||
if not self.library_field:
|
||||
predicate = "pk__in"
|
||||
else:
|
||||
predicate = "{}__in".format(self.library_field)
|
||||
predicate = f"{self.library_field}__in"
|
||||
return Q(**{predicate: followed_libraries})
|
||||
|
||||
elif scope.startswith("actor:"):
|
||||
|
@ -227,7 +223,8 @@ class ActorScopeFilter(filters.CharFilter):
|
|||
username, domain = full_username.split("@")
|
||||
try:
|
||||
actor = federation_models.Actor.objects.get(
|
||||
preferred_username__iexact=username, domain_id=domain,
|
||||
preferred_username__iexact=username,
|
||||
domain_id=domain,
|
||||
)
|
||||
except federation_models.Actor.DoesNotExist:
|
||||
raise EmptyQuerySet()
|
||||
|
@ -235,7 +232,7 @@ class ActorScopeFilter(filters.CharFilter):
|
|||
return Q(**{self.actor_field: actor})
|
||||
elif scope.startswith("domain:"):
|
||||
domain = scope.split("domain:", 1)[1]
|
||||
return Q(**{"{}__domain_id".format(self.actor_field): domain})
|
||||
return Q(**{f"{self.actor_field}__domain_id": domain})
|
||||
else:
|
||||
raise EmptyQuerySet()
|
||||
|
||||
|
|
|
@ -0,0 +1,93 @@
|
|||
import os
|
||||
|
||||
import debugpy
|
||||
import uvicorn
|
||||
from django.core.management import call_command
|
||||
from django.core.management.commands.migrate import Command as BaseCommand
|
||||
|
||||
from funkwhale_api.common import preferences
|
||||
from funkwhale_api.music.models import Library
|
||||
from funkwhale_api.users.models import User
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Manage gitpod environment"
|
||||
|
||||
def add_arguments(self, parser):
|
||||
parser.add_argument("command", nargs="?", type=str)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
command = options["command"]
|
||||
|
||||
if not command:
|
||||
return self.show_help()
|
||||
|
||||
if command == "init":
|
||||
return self.init()
|
||||
|
||||
if command == "dev":
|
||||
return self.dev()
|
||||
|
||||
def show_help(self):
|
||||
self.stdout.write("")
|
||||
self.stdout.write("Available commands:")
|
||||
self.stdout.write("init - Initialize gitpod workspace")
|
||||
self.stdout.write("dev - Run Funkwhale in development mode with debug server")
|
||||
self.stdout.write("")
|
||||
|
||||
def init(self):
|
||||
try:
|
||||
user = User.objects.get(username="gitpod")
|
||||
except Exception:
|
||||
call_command(
|
||||
"createsuperuser",
|
||||
username="gitpod",
|
||||
email="gitpod@example.com",
|
||||
no_input=False,
|
||||
)
|
||||
user = User.objects.get(username="gitpod")
|
||||
|
||||
user.set_password("gitpod")
|
||||
if not user.actor:
|
||||
user.create_actor()
|
||||
|
||||
user.save()
|
||||
|
||||
# Allow anonymous access
|
||||
preferences.set("common__api_authentication_required", False)
|
||||
|
||||
# Download music catalog
|
||||
os.system(
|
||||
"git clone https://dev.funkwhale.audio/funkwhale/catalog.git /tmp/catalog"
|
||||
)
|
||||
os.system("mv -f /tmp/catalog/music /workspace/funkwhale/data")
|
||||
os.system("rm -rf /tmp/catalog/music")
|
||||
|
||||
# Import music catalog into library
|
||||
call_command(
|
||||
"create_library",
|
||||
"gitpod",
|
||||
name="funkwhale/catalog",
|
||||
privacy_level="everyone",
|
||||
)
|
||||
call_command(
|
||||
"import_files",
|
||||
Library.objects.get(actor=user.actor).uuid,
|
||||
"/workspace/funkwhale/data/music/",
|
||||
recursive=True,
|
||||
in_place=True,
|
||||
no_input=False,
|
||||
)
|
||||
|
||||
def dev(self):
|
||||
debugpy.listen(5678)
|
||||
uvicorn.run(
|
||||
"config.asgi:application",
|
||||
host="0.0.0.0",
|
||||
port=5000,
|
||||
reload=True,
|
||||
reload_dirs=[
|
||||
"/workspace/funkwhale/api/config/",
|
||||
"/workspace/funkwhale/api/funkwhale_api/",
|
||||
],
|
||||
)
|
|
@ -5,14 +5,12 @@ from django.conf import settings
|
|||
from django.core.management.base import BaseCommand
|
||||
from django.db import transaction
|
||||
|
||||
|
||||
from funkwhale_api.federation import keys
|
||||
from funkwhale_api.federation import models as federation_models
|
||||
from funkwhale_api.music import models as music_models
|
||||
from funkwhale_api.tags import models as tags_models
|
||||
from funkwhale_api.users import models as users_models
|
||||
|
||||
|
||||
BATCH_SIZE = 500
|
||||
|
||||
|
||||
|
@ -238,6 +236,7 @@ class Command(BaseCommand):
|
|||
|
||||
def handle(self, *args, **options):
|
||||
from django.apps import apps
|
||||
|
||||
from funkwhale_api import factories
|
||||
|
||||
app_names = [app.name for app in apps.app_configs.values()]
|
||||
|
|
|
@ -26,7 +26,7 @@ class Command(BaseCommand):
|
|||
script = available_scripts[name]
|
||||
except KeyError:
|
||||
raise CommandError(
|
||||
"{} is not a valid script. Run python manage.py script for a "
|
||||
"{} is not a valid script. Run funkwhale-manage for a "
|
||||
"list of available scripts".format(name)
|
||||
)
|
||||
|
||||
|
@ -43,14 +43,14 @@ class Command(BaseCommand):
|
|||
def show_help(self):
|
||||
self.stdout.write("")
|
||||
self.stdout.write("Available scripts:")
|
||||
self.stdout.write("Launch with: python manage.py <script_name>")
|
||||
self.stdout.write("Launch with: funkwhale-manage <script_name>")
|
||||
available_scripts = self.get_scripts()
|
||||
for name, script in sorted(available_scripts.items()):
|
||||
self.stdout.write("")
|
||||
self.stdout.write(self.style.SUCCESS(name))
|
||||
self.stdout.write("")
|
||||
for line in script["help"].splitlines():
|
||||
self.stdout.write(" {}".format(line))
|
||||
self.stdout.write(f" {line}")
|
||||
self.stdout.write("")
|
||||
|
||||
def get_scripts(self):
|
||||
|
|
|
@ -1,27 +1,23 @@
|
|||
import html
|
||||
import logging
|
||||
import io
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import time
|
||||
import tracemalloc
|
||||
import urllib.parse
|
||||
import xml.sax.saxutils
|
||||
|
||||
from django import http
|
||||
from django import http, urls
|
||||
from django.conf import settings
|
||||
from django.contrib import auth
|
||||
from django.core.cache import caches
|
||||
from django.middleware import csrf
|
||||
from django.contrib import auth
|
||||
from django import urls
|
||||
from rest_framework import views
|
||||
import tracemalloc
|
||||
|
||||
from funkwhale_api.federation import utils as federation_utils
|
||||
|
||||
from . import preferences
|
||||
from . import session
|
||||
from . import throttling
|
||||
from . import utils
|
||||
from . import preferences, session, throttling, utils
|
||||
|
||||
EXCLUDED_PATHS = ["/api", "/federation", "/.well-known"]
|
||||
|
||||
|
@ -82,7 +78,7 @@ def serve_spa(request):
|
|||
# We add the style add the end of the body to ensure it has the highest
|
||||
# priority (since it will come after other stylesheets)
|
||||
body, tail = tail.split("</body>", 1)
|
||||
css = "<style>{}</style>".format(css)
|
||||
css = f"<style>{css}</style>"
|
||||
tail = body + "\n" + css + "\n</body>" + tail
|
||||
|
||||
# set a csrf token so that visitor can login / query API if needed
|
||||
|
@ -97,13 +93,13 @@ TITLE_REGEX = re.compile(r"<title>.*</title>")
|
|||
|
||||
|
||||
def replace_manifest_url(head, new_url):
|
||||
replacement = '<link rel=manifest href="{}">'.format(new_url)
|
||||
replacement = f'<link rel=manifest href="{new_url}">'
|
||||
head = MANIFEST_LINK_REGEX.sub(replacement, head)
|
||||
return head
|
||||
|
||||
|
||||
def replace_title(head, new_title):
|
||||
replacement = "<title>{}</title>".format(html.escape(new_title))
|
||||
replacement = f"<title>{html.escape(new_title)}</title>"
|
||||
head = TITLE_REGEX.sub(replacement, head)
|
||||
return head
|
||||
|
||||
|
@ -121,12 +117,14 @@ def get_spa_file(spa_url, name):
|
|||
# we try to open a local file
|
||||
with open(path, "rb") as f:
|
||||
return f.read().decode("utf-8")
|
||||
cache_key = "spa-file:{}:{}".format(spa_url, name)
|
||||
cache_key = f"spa-file:{spa_url}:{name}"
|
||||
cached = caches["local"].get(cache_key)
|
||||
if cached:
|
||||
return cached
|
||||
|
||||
response = session.get_session().get(utils.join_url(spa_url, name),)
|
||||
response = session.get_session().get(
|
||||
utils.join_url(spa_url, name),
|
||||
)
|
||||
response.raise_for_status()
|
||||
response.encoding = "utf-8"
|
||||
content = response.text
|
||||
|
@ -173,11 +171,7 @@ def render_tags(tags):
|
|||
yield "<{tag} {attrs} />".format(
|
||||
tag=tag.pop("tag"),
|
||||
attrs=" ".join(
|
||||
[
|
||||
'{}="{}"'.format(a, html.escape(str(v)))
|
||||
for a, v in sorted(tag.items())
|
||||
if v
|
||||
]
|
||||
[f'{a}="{html.escape(str(v))}"' for a, v in sorted(tag.items()) if v]
|
||||
),
|
||||
)
|
||||
|
||||
|
|
|
@ -36,8 +36,8 @@ class Migration(migrations.Migration):
|
|||
models.UUIDField(db_index=True, default=uuid.uuid4, unique=True),
|
||||
),
|
||||
("type", models.CharField(db_index=True, max_length=100)),
|
||||
("is_approved", models.NullBooleanField(default=None)),
|
||||
("is_applied", models.NullBooleanField(default=None)),
|
||||
("is_approved", models.BooleanField(default=None, null=True)),
|
||||
("is_applied", models.BooleanField(default=None, null=True)),
|
||||
(
|
||||
"creation_date",
|
||||
models.DateTimeField(
|
||||
|
|
|
@ -0,0 +1,39 @@
|
|||
# Generated by Django 3.2.13 on 2022-06-27 19:15
|
||||
|
||||
import django.core.serializers.json
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('common', '0008_auto_20200701_1317'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='mutation',
|
||||
name='is_applied',
|
||||
field=models.BooleanField(default=None, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='mutation',
|
||||
name='is_approved',
|
||||
field=models.BooleanField(default=None, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='mutation',
|
||||
name='payload',
|
||||
field=models.JSONField(encoder=django.core.serializers.json.DjangoJSONEncoder),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='mutation',
|
||||
name='previous_state',
|
||||
field=models.JSONField(default=None, encoder=django.core.serializers.json.DjangoJSONEncoder, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='pluginconfiguration',
|
||||
name='conf',
|
||||
field=models.JSONField(blank=True, null=True),
|
||||
),
|
||||
]
|
|
@ -1,10 +1,9 @@
|
|||
from rest_framework import serializers
|
||||
|
||||
from django.db.models import Q
|
||||
from django.shortcuts import get_object_or_404
|
||||
from rest_framework import serializers
|
||||
|
||||
|
||||
class MultipleLookupDetailMixin(object):
|
||||
class MultipleLookupDetailMixin:
|
||||
lookup_value_regex = "[^/]+"
|
||||
lookup_field = "composite"
|
||||
|
||||
|
|
|
@ -1,28 +1,24 @@
|
|||
import uuid
|
||||
import magic
|
||||
import mimetypes
|
||||
import uuid
|
||||
|
||||
from django.contrib.postgres.fields import JSONField
|
||||
import magic
|
||||
from django.conf import settings
|
||||
from django.contrib.contenttypes.fields import GenericForeignKey
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.conf import settings
|
||||
from django.core.serializers.json import DjangoJSONEncoder
|
||||
from django.db import connections, models, transaction
|
||||
from django.db.models import Lookup
|
||||
from django.db.models import JSONField, Lookup
|
||||
from django.db.models.fields import Field
|
||||
from django.db.models.sql.compiler import SQLCompiler
|
||||
from django.dispatch import receiver
|
||||
from django.utils import timezone
|
||||
from django.urls import reverse
|
||||
|
||||
from django.utils import timezone
|
||||
from versatileimagefield.fields import VersatileImageField
|
||||
from versatileimagefield.image_warmer import VersatileImageFieldWarmer
|
||||
|
||||
from funkwhale_api.federation import utils as federation_utils
|
||||
|
||||
from . import utils
|
||||
from . import validators
|
||||
|
||||
from . import utils, validators
|
||||
|
||||
CONTENT_TEXT_MAX_LENGTH = 5000
|
||||
CONTENT_TEXT_SUPPORTED_TYPES = [
|
||||
|
@ -40,7 +36,7 @@ class NotEqual(Lookup):
|
|||
lhs, lhs_params = self.process_lhs(compiler, connection)
|
||||
rhs, rhs_params = self.process_rhs(compiler, connection)
|
||||
params = lhs_params + rhs_params
|
||||
return "%s <> %s" % (lhs, rhs), params
|
||||
return f"{lhs} <> {rhs}", params
|
||||
|
||||
|
||||
class NullsLastSQLCompiler(SQLCompiler):
|
||||
|
@ -81,8 +77,8 @@ class NullsLastQuerySet(models.QuerySet):
|
|||
class LocalFromFidQuerySet:
|
||||
def local(self, include=True):
|
||||
host = settings.FEDERATION_HOSTNAME
|
||||
query = models.Q(fid__startswith="http://{}/".format(host)) | models.Q(
|
||||
fid__startswith="https://{}/".format(host)
|
||||
query = models.Q(fid__startswith=f"http://{host}/") | models.Q(
|
||||
fid__startswith=f"https://{host}/"
|
||||
)
|
||||
if include:
|
||||
return self.filter(query)
|
||||
|
@ -116,10 +112,10 @@ class Mutation(models.Model):
|
|||
|
||||
type = models.CharField(max_length=100, db_index=True)
|
||||
# None = no choice, True = approved, False = refused
|
||||
is_approved = models.NullBooleanField(default=None)
|
||||
is_approved = models.BooleanField(default=None, null=True)
|
||||
|
||||
# None = not applied, True = applied, False = failed
|
||||
is_applied = models.NullBooleanField(default=None)
|
||||
is_applied = models.BooleanField(default=None, null=True)
|
||||
creation_date = models.DateTimeField(default=timezone.now, db_index=True)
|
||||
applied_date = models.DateTimeField(null=True, blank=True, db_index=True)
|
||||
summary = models.TextField(max_length=2000, null=True, blank=True)
|
||||
|
@ -187,7 +183,7 @@ class AttachmentQuerySet(models.QuerySet):
|
|||
field_query = ~models.Q(**{field: None})
|
||||
query = query | field_query if query else field_query
|
||||
|
||||
if include is False:
|
||||
if not include:
|
||||
query = ~query
|
||||
|
||||
return self.filter(query)
|
||||
|
@ -222,7 +218,8 @@ class Attachment(models.Model):
|
|||
validators=[
|
||||
validators.ImageDimensionsValidator(min_width=50, min_height=50),
|
||||
validators.FileValidator(
|
||||
allowed_extensions=["png", "jpg", "jpeg"], max_size=1024 * 1024 * 5,
|
||||
allowed_extensions=["png", "jpg", "jpeg"],
|
||||
max_size=1024 * 1024 * 5,
|
||||
),
|
||||
],
|
||||
)
|
||||
|
@ -365,7 +362,7 @@ CONTENT_FKS = {
|
|||
def remove_attached_content(sender, instance, **kwargs):
|
||||
fk_fields = CONTENT_FKS.get(instance._meta.label, [])
|
||||
for field in fk_fields:
|
||||
if getattr(instance, "{}_id".format(field)):
|
||||
if getattr(instance, f"{field}_id"):
|
||||
try:
|
||||
getattr(instance, field).delete()
|
||||
except Content.DoesNotExist:
|
||||
|
|
|
@ -1,8 +1,6 @@
|
|||
import persisting_theory
|
||||
|
||||
from rest_framework import serializers
|
||||
|
||||
from django.db import models, transaction
|
||||
from rest_framework import serializers
|
||||
|
||||
|
||||
class ConfNotFound(KeyError):
|
||||
|
@ -45,7 +43,7 @@ class Registry(persisting_theory.Registry):
|
|||
|
||||
def has_perm(self, perm, type, obj, actor):
|
||||
if perm not in ["approve", "suggest"]:
|
||||
raise ValueError("Invalid permission {}".format(perm))
|
||||
raise ValueError(f"Invalid permission {perm}")
|
||||
conf = self.get_conf(type, obj)
|
||||
checker = conf["perm_checkers"].get(perm)
|
||||
if not checker:
|
||||
|
@ -56,7 +54,7 @@ class Registry(persisting_theory.Registry):
|
|||
try:
|
||||
type_conf = self[type]
|
||||
except KeyError:
|
||||
raise ConfNotFound("{} is not a registered mutation".format(type))
|
||||
raise ConfNotFound(f"{type} is not a registered mutation")
|
||||
|
||||
try:
|
||||
conf = type_conf[obj.__class__]
|
||||
|
@ -65,7 +63,7 @@ class Registry(persisting_theory.Registry):
|
|||
conf = type_conf[None]
|
||||
except KeyError:
|
||||
raise ConfNotFound(
|
||||
"No mutation configuration found for {}".format(obj.__class__)
|
||||
f"No mutation configuration found for {obj.__class__}"
|
||||
)
|
||||
return conf
|
||||
|
||||
|
|
|
@ -2,7 +2,6 @@ import operator
|
|||
|
||||
from django.core.exceptions import ObjectDoesNotExist
|
||||
from django.http import Http404
|
||||
|
||||
from rest_framework.permissions import BasePermission
|
||||
|
||||
from funkwhale_api.common import preferences
|
||||
|
|
|
@ -1,13 +1,13 @@
|
|||
import json
|
||||
|
||||
from django import forms
|
||||
from django.contrib.postgres.forms import JSONField
|
||||
from django.conf import settings
|
||||
from django.contrib.postgres.forms import JSONField
|
||||
from dynamic_preferences import serializers, types
|
||||
from dynamic_preferences.registries import global_preferences_registry
|
||||
|
||||
|
||||
class DefaultFromSettingMixin(object):
|
||||
class DefaultFromSettingMixin:
|
||||
def get_default(self):
|
||||
return getattr(settings, self.setting)
|
||||
|
||||
|
@ -38,7 +38,7 @@ class StringListSerializer(serializers.BaseSerializer):
|
|||
|
||||
if type(value) not in [list, tuple]:
|
||||
raise cls.exception(
|
||||
"Cannot serialize, value {} is not a list or a tuple".format(value)
|
||||
f"Cannot serialize, value {value} is not a list or a tuple"
|
||||
)
|
||||
|
||||
if cls.sort:
|
||||
|
@ -57,7 +57,7 @@ class StringListPreference(types.BasePreferenceType):
|
|||
field_class = forms.MultipleChoiceField
|
||||
|
||||
def get_api_additional_data(self):
|
||||
d = super(StringListPreference, self).get_api_additional_data()
|
||||
d = super().get_api_additional_data()
|
||||
d["choices"] = self.get("choices")
|
||||
return d
|
||||
|
||||
|
@ -72,14 +72,14 @@ class JSONSerializer(serializers.BaseSerializer):
|
|||
data_serializer = cls.data_serializer_class(data=value)
|
||||
if not data_serializer.is_valid():
|
||||
raise cls.exception(
|
||||
"{} is not a valid value: {}".format(value, data_serializer.errors)
|
||||
f"{value} is not a valid value: {data_serializer.errors}"
|
||||
)
|
||||
value = data_serializer.validated_data
|
||||
try:
|
||||
return json.dumps(value, sort_keys=True)
|
||||
except TypeError:
|
||||
raise cls.exception(
|
||||
"Cannot serialize, value {} is not JSON serializable".format(value)
|
||||
f"Cannot serialize, value {value} is not JSON serializable"
|
||||
)
|
||||
|
||||
@classmethod
|
||||
|
|
|
@ -0,0 +1,5 @@
|
|||
from rest_framework.renderers import JSONRenderer
|
||||
|
||||
|
||||
class ActivityStreamRenderer(JSONRenderer):
|
||||
media_type = "application/activity+json"
|
|
@ -1,9 +1,10 @@
|
|||
from . import create_actors
|
||||
from . import django_permissions_to_user_permissions
|
||||
from . import migrate_to_user_libraries
|
||||
from . import delete_pre_017_federated_uploads
|
||||
from . import test
|
||||
|
||||
from . import (
|
||||
create_actors,
|
||||
delete_pre_017_federated_uploads,
|
||||
django_permissions_to_user_permissions,
|
||||
migrate_to_user_libraries,
|
||||
test,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"create_actors",
|
||||
|
|
|
@ -9,15 +9,13 @@ from funkwhale_api.users.models import User, create_actor
|
|||
def main(command, **kwargs):
|
||||
qs = User.objects.filter(actor__isnull=True).order_by("username")
|
||||
total = len(qs)
|
||||
command.stdout.write("{} users found without actors".format(total))
|
||||
command.stdout.write(f"{total} users found without actors")
|
||||
for i, user in enumerate(qs):
|
||||
command.stdout.write(
|
||||
"{}/{} creating actor for {}".format(i + 1, total, user.username)
|
||||
)
|
||||
command.stdout.write(f"{i + 1}/{total} creating actor for {user.username}")
|
||||
try:
|
||||
user.actor = create_actor(user)
|
||||
except IntegrityError as e:
|
||||
# somehow, an actor with the the url exists in the database
|
||||
command.stderr.write("Error while creating actor: {}".format(str(e)))
|
||||
command.stderr.write(f"Error while creating actor: {str(e)}")
|
||||
continue
|
||||
user.save(update_fields=["actor"])
|
||||
|
|
|
@ -6,7 +6,6 @@ from versatileimagefield.image_warmer import VersatileImageFieldWarmer
|
|||
|
||||
from funkwhale_api.common.models import Attachment
|
||||
|
||||
|
||||
MODELS = [
|
||||
(Attachment, "file", "attachment_square"),
|
||||
]
|
||||
|
@ -14,7 +13,7 @@ MODELS = [
|
|||
|
||||
def main(command, **kwargs):
|
||||
for model, attribute, key_set in MODELS:
|
||||
qs = model.objects.exclude(**{"{}__isnull".format(attribute): True})
|
||||
qs = model.objects.exclude(**{f"{attribute}__isnull": True})
|
||||
qs = qs.exclude(**{attribute: ""})
|
||||
warmer = VersatileImageFieldWarmer(
|
||||
instance_or_queryset=qs,
|
||||
|
@ -22,10 +21,8 @@ def main(command, **kwargs):
|
|||
image_attr=attribute,
|
||||
verbose=True,
|
||||
)
|
||||
command.stdout.write(
|
||||
"Creating images for {} / {}".format(model.__name__, attribute)
|
||||
)
|
||||
command.stdout.write(f"Creating images for {model.__name__} / {attribute}")
|
||||
num_created, failed_to_create = warmer.warm()
|
||||
command.stdout.write(
|
||||
" {} created, {} in error".format(num_created, len(failed_to_create))
|
||||
f" {num_created} created, {len(failed_to_create)} in error"
|
||||
)
|
||||
|
|
|
@ -10,5 +10,5 @@ def main(command, **kwargs):
|
|||
source__startswith="http", source__contains="/federation/music/file/"
|
||||
).exclude(source__contains="youtube")
|
||||
total = queryset.count()
|
||||
command.stdout.write("{} uploads found".format(total))
|
||||
command.stdout.write(f"{total} uploads found")
|
||||
queryset.delete()
|
||||
|
|
|
@ -23,6 +23,6 @@ def main(command, **kwargs):
|
|||
total = users.count()
|
||||
|
||||
command.stdout.write(
|
||||
"Updating {} users with {} permission...".format(total, user_permission)
|
||||
f"Updating {total} users with {user_permission} permission..."
|
||||
)
|
||||
users.update(**{"permission_{}".format(user_permission): True})
|
||||
users.update(**{f"permission_{user_permission}": True})
|
||||
|
|
|
@ -12,12 +12,12 @@ This command will also generate federation ids for existing resources.
|
|||
"""
|
||||
|
||||
from django.conf import settings
|
||||
from django.db.models import functions, CharField, F, Value
|
||||
from django.db.models import CharField, F, Value, functions
|
||||
|
||||
from funkwhale_api.common import preferences
|
||||
from funkwhale_api.federation import models as federation_models
|
||||
from funkwhale_api.music import models
|
||||
from funkwhale_api.users.models import User
|
||||
from funkwhale_api.federation import models as federation_models
|
||||
from funkwhale_api.common import preferences
|
||||
|
||||
|
||||
def create_libraries(open_api, stdout):
|
||||
|
@ -36,9 +36,7 @@ def create_libraries(open_api, stdout):
|
|||
)
|
||||
libraries_by_user[library.actor.user.pk] = library.pk
|
||||
if created:
|
||||
stdout.write(
|
||||
" * Created library {} for user {}".format(library.pk, a.user.pk)
|
||||
)
|
||||
stdout.write(f" * Created library {library.pk} for user {a.user.pk}")
|
||||
else:
|
||||
stdout.write(
|
||||
" * Found existing library {} for user {}".format(
|
||||
|
@ -60,13 +58,9 @@ def update_uploads(libraries_by_user, stdout):
|
|||
)
|
||||
total = candidates.update(library=library_id, import_status="finished")
|
||||
if total:
|
||||
stdout.write(
|
||||
" * Assigned {} uploads to user {}'s library".format(total, user_id)
|
||||
)
|
||||
stdout.write(f" * Assigned {total} uploads to user {user_id}'s library")
|
||||
else:
|
||||
stdout.write(
|
||||
" * No uploads to assign to user {}'s library".format(user_id)
|
||||
)
|
||||
stdout.write(f" * No uploads to assign to user {user_id}'s library")
|
||||
|
||||
|
||||
def update_orphan_uploads(open_api, stdout):
|
||||
|
@ -105,14 +99,12 @@ def update_orphan_uploads(open_api, stdout):
|
|||
def set_fid(queryset, path, stdout):
|
||||
model = queryset.model._meta.label
|
||||
qs = queryset.filter(fid=None)
|
||||
base_url = "{}{}".format(settings.FUNKWHALE_URL, path)
|
||||
stdout.write(
|
||||
"* Assigning federation ids to {} entries (path: {})".format(model, base_url)
|
||||
)
|
||||
base_url = f"{settings.FUNKWHALE_URL}{path}"
|
||||
stdout.write(f"* Assigning federation ids to {model} entries (path: {base_url})")
|
||||
new_fid = functions.Concat(Value(base_url), F("uuid"), output_field=CharField())
|
||||
total = qs.update(fid=new_fid)
|
||||
|
||||
stdout.write(" * {} entries updated".format(total))
|
||||
stdout.write(f" * {total} entries updated")
|
||||
|
||||
|
||||
def update_shared_inbox_url(stdout):
|
||||
|
@ -123,16 +115,16 @@ def update_shared_inbox_url(stdout):
|
|||
|
||||
|
||||
def generate_actor_urls(part, stdout):
|
||||
field = "{}_url".format(part)
|
||||
stdout.write("* Update {} for local actors...".format(field))
|
||||
field = f"{part}_url"
|
||||
stdout.write(f"* Update {field} for local actors...")
|
||||
|
||||
queryset = federation_models.Actor.objects.local().filter(**{field: None})
|
||||
base_url = "{}/federation/actors/".format(settings.FUNKWHALE_URL)
|
||||
base_url = f"{settings.FUNKWHALE_URL}/federation/actors/"
|
||||
|
||||
new_field = functions.Concat(
|
||||
Value(base_url),
|
||||
F("preferred_username"),
|
||||
Value("/{}".format(part)),
|
||||
Value(f"/{part}"),
|
||||
output_field=CharField(),
|
||||
)
|
||||
|
||||
|
|
|
@ -5,14 +5,13 @@ from django.db.models import Q
|
|||
|
||||
from . import utils
|
||||
|
||||
|
||||
QUERY_REGEX = re.compile(r'(((?P<key>\w+):)?(?P<value>"[^"]+"|[\S]+))')
|
||||
|
||||
|
||||
def parse_query(query):
|
||||
"""
|
||||
Given a search query such as "hello is:issue status:opened",
|
||||
returns a list of dictionnaries discribing each query token
|
||||
returns a list of dictionaries describing each query token
|
||||
"""
|
||||
matches = [m.groupdict() for m in QUERY_REGEX.finditer(query.lower())]
|
||||
for m in matches:
|
||||
|
@ -26,20 +25,20 @@ def normalize_query(
|
|||
findterms=re.compile(r'"([^"]+)"|(\S+)').findall,
|
||||
normspace=re.compile(r"\s{2,}").sub,
|
||||
):
|
||||
""" Splits the query string in invidual keywords, getting rid of unecessary spaces
|
||||
and grouping quoted words together.
|
||||
Example:
|
||||
"""Splits the query string in individual keywords, getting rid of unnecessary spaces
|
||||
and grouping quoted words together.
|
||||
Example:
|
||||
|
||||
>>> normalize_query(' some random words "with quotes " and spaces')
|
||||
['some', 'random', 'words', 'with quotes', 'and', 'spaces']
|
||||
>>> normalize_query(' some random words "with quotes " and spaces')
|
||||
['some', 'random', 'words', 'with quotes', 'and', 'spaces']
|
||||
|
||||
"""
|
||||
return [normspace(" ", (t[0] or t[1]).strip()) for t in findterms(query_string)]
|
||||
|
||||
|
||||
def get_query(query_string, search_fields):
|
||||
""" Returns a query, that is a combination of Q objects. That combination
|
||||
aims to search keywords within a model by testing the given search fields.
|
||||
"""Returns a query, that is a combination of Q objects. That combination
|
||||
aims to search keywords within a model by testing the given search fields.
|
||||
|
||||
"""
|
||||
query = None # Query to search for every search term
|
||||
|
@ -73,7 +72,7 @@ def get_fts_query(query_string, fts_fields=["body_text"], model=None):
|
|||
else:
|
||||
query_string = remove_chars(query_string, ['"', "&", "(", ")", "!", "'"])
|
||||
parts = query_string.replace(":", "").split(" ")
|
||||
parts = ["{}:*".format(p) for p in parts if p]
|
||||
parts = [f"{p}:*" for p in parts if p]
|
||||
if not parts:
|
||||
return Q(pk=None)
|
||||
|
||||
|
@ -98,7 +97,7 @@ def get_fts_query(query_string, fts_fields=["body_text"], model=None):
|
|||
)
|
||||
}
|
||||
).values_list("pk", flat=True)
|
||||
new_query = Q(**{"{}__in".format(fk_field_name): list(subquery)})
|
||||
new_query = Q(**{f"{fk_field_name}__in": list(subquery)})
|
||||
else:
|
||||
new_query = Q(
|
||||
**{
|
||||
|
@ -181,7 +180,7 @@ class SearchConfig:
|
|||
except KeyError:
|
||||
# no cleaning to apply
|
||||
value = token["value"]
|
||||
q = Q(**{"{}__icontains".format(to): value})
|
||||
q = Q(**{f"{to}__icontains": value})
|
||||
if not specific_field_query:
|
||||
specific_field_query = q
|
||||
else:
|
||||
|
|
|
@ -1,17 +1,17 @@
|
|||
import collections
|
||||
import io
|
||||
import PIL
|
||||
import os
|
||||
|
||||
from rest_framework import serializers
|
||||
|
||||
import PIL
|
||||
from django.core.exceptions import ObjectDoesNotExist
|
||||
from django.core.files.uploadedfile import SimpleUploadedFile
|
||||
from django.utils.encoding import smart_text
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from drf_spectacular.types import OpenApiTypes
|
||||
from drf_spectacular.utils import extend_schema_field
|
||||
from rest_framework import serializers
|
||||
|
||||
from . import models
|
||||
from . import utils
|
||||
from . import models, utils
|
||||
|
||||
|
||||
class RelatedField(serializers.RelatedField):
|
||||
|
@ -82,14 +82,14 @@ class RelatedField(serializers.RelatedField):
|
|||
)
|
||||
|
||||
|
||||
class Action(object):
|
||||
class Action:
|
||||
def __init__(self, name, allow_all=False, qs_filter=None):
|
||||
self.name = name
|
||||
self.allow_all = allow_all
|
||||
self.qs_filter = qs_filter
|
||||
|
||||
def __repr__(self):
|
||||
return "<Action {}>".format(self.name)
|
||||
return f"<Action {self.name}>"
|
||||
|
||||
|
||||
class ActionSerializer(serializers.Serializer):
|
||||
|
@ -113,7 +113,7 @@ class ActionSerializer(serializers.Serializer):
|
|||
)
|
||||
|
||||
for action in self.actions_by_name.keys():
|
||||
handler_name = "handle_{}".format(action)
|
||||
handler_name = f"handle_{action}"
|
||||
assert hasattr(self, handler_name), "{} miss a {} method".format(
|
||||
self.__class__.__name__, handler_name
|
||||
)
|
||||
|
@ -133,9 +133,9 @@ class ActionSerializer(serializers.Serializer):
|
|||
if value == "all":
|
||||
return self.queryset.all().order_by("id")
|
||||
if type(value) in [list, tuple]:
|
||||
return self.queryset.filter(
|
||||
**{"{}__in".format(self.pk_field): value}
|
||||
).order_by(self.pk_field)
|
||||
return self.queryset.filter(**{f"{self.pk_field}__in": value}).order_by(
|
||||
self.pk_field
|
||||
)
|
||||
|
||||
raise serializers.ValidationError(
|
||||
"{} is not a valid value for objects. You must provide either a "
|
||||
|
@ -270,6 +270,7 @@ class APIMutationSerializer(serializers.ModelSerializer):
|
|||
"previous_state",
|
||||
]
|
||||
|
||||
@extend_schema_field(OpenApiTypes.OBJECT)
|
||||
def get_target(self, obj):
|
||||
target = obj.target
|
||||
if not target:
|
||||
|
@ -280,7 +281,7 @@ class APIMutationSerializer(serializers.ModelSerializer):
|
|||
|
||||
def validate_type(self, value):
|
||||
if value not in self.context["registry"]:
|
||||
raise serializers.ValidationError("Invalid mutation type {}".format(value))
|
||||
raise serializers.ValidationError(f"Invalid mutation type {value}")
|
||||
return value
|
||||
|
||||
|
||||
|
@ -292,6 +293,7 @@ class AttachmentSerializer(serializers.Serializer):
|
|||
file = StripExifImageField(write_only=True)
|
||||
urls = serializers.SerializerMethodField()
|
||||
|
||||
@extend_schema_field(OpenApiTypes.OBJECT)
|
||||
def get_urls(self, o):
|
||||
urls = {}
|
||||
urls["source"] = o.url
|
||||
|
@ -310,14 +312,16 @@ class ContentSerializer(serializers.Serializer):
|
|||
text = serializers.CharField(
|
||||
max_length=models.CONTENT_TEXT_MAX_LENGTH, allow_null=True
|
||||
)
|
||||
content_type = serializers.ChoiceField(choices=models.CONTENT_TEXT_SUPPORTED_TYPES,)
|
||||
content_type = serializers.ChoiceField(
|
||||
choices=models.CONTENT_TEXT_SUPPORTED_TYPES,
|
||||
)
|
||||
html = serializers.SerializerMethodField()
|
||||
|
||||
def get_html(self, o):
|
||||
def get_html(self, o) -> str:
|
||||
return utils.render_html(o.text, o.content_type)
|
||||
|
||||
|
||||
class NullToEmptDict(object):
|
||||
class NullToEmptDict:
|
||||
def get_attribute(self, o):
|
||||
attr = super().get_attribute(o)
|
||||
if attr is None:
|
||||
|
@ -328,3 +332,36 @@ class NullToEmptDict(object):
|
|||
if not v:
|
||||
return v
|
||||
return super().to_representation(v)
|
||||
|
||||
|
||||
class ScopesSerializer(serializers.Serializer):
|
||||
id = serializers.CharField()
|
||||
rate = serializers.CharField()
|
||||
description = serializers.CharField()
|
||||
limit = serializers.IntegerField()
|
||||
duration = serializers.IntegerField()
|
||||
remaining = serializers.IntegerField()
|
||||
available = serializers.IntegerField()
|
||||
available_seconds = serializers.IntegerField()
|
||||
reset = serializers.IntegerField()
|
||||
reset_seconds = serializers.IntegerField()
|
||||
|
||||
|
||||
class IdentSerializer(serializers.Serializer):
|
||||
type = serializers.CharField()
|
||||
id = serializers.IntegerField()
|
||||
|
||||
|
||||
class RateLimitSerializer(serializers.Serializer):
|
||||
enabled = serializers.BooleanField()
|
||||
ident = IdentSerializer()
|
||||
scopes = serializers.ListField(child=ScopesSerializer())
|
||||
|
||||
|
||||
class ErrorDetailSerializer(serializers.Serializer):
|
||||
detail = serializers.CharField(source="*")
|
||||
|
||||
|
||||
class TextPreviewSerializer(serializers.Serializer):
|
||||
rendered = serializers.CharField(read_only=True, source="*")
|
||||
text = serializers.CharField(write_only=True)
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import os
|
||||
import shutil
|
||||
import slugify
|
||||
|
||||
import slugify
|
||||
from django.core.files.storage import FileSystemStorage
|
||||
from storages.backends.s3boto3 import S3Boto3Storage
|
||||
|
||||
|
|
|
@ -11,10 +11,7 @@ from django.utils import timezone
|
|||
from funkwhale_api.common import channels
|
||||
from funkwhale_api.taskapp import celery
|
||||
|
||||
from . import models
|
||||
from . import serializers
|
||||
from . import session
|
||||
from . import signals
|
||||
from . import models, serializers, session, signals
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
|
|
@ -1,10 +1,9 @@
|
|||
import collections
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.cache import cache
|
||||
from rest_framework import throttling as rest_throttling
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
|
||||
def get_ident(user, request):
|
||||
if user and user.is_authenticated:
|
||||
|
|
|
@ -1,24 +1,21 @@
|
|||
import datetime
|
||||
import hashlib
|
||||
|
||||
from django.core.files.base import ContentFile
|
||||
from django.http import request
|
||||
from django.utils.deconstruct import deconstructible
|
||||
|
||||
import bleach.sanitizer
|
||||
import logging
|
||||
import markdown
|
||||
import os
|
||||
import shutil
|
||||
import uuid
|
||||
import xml.etree.ElementTree as ET
|
||||
|
||||
from urllib.parse import parse_qs, urlencode, urlsplit, urlunsplit
|
||||
|
||||
from django.conf import settings
|
||||
import bleach.sanitizer
|
||||
import markdown
|
||||
from django import urls
|
||||
from django.conf import settings
|
||||
from django.core.files.base import ContentFile
|
||||
from django.db import models, transaction
|
||||
from django.http import request
|
||||
from django.utils import timezone
|
||||
from django.utils.deconstruct import deconstructible
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
@ -39,7 +36,7 @@ def rename_file(instance, field_name, new_name, allow_missing_file=False):
|
|||
field = getattr(instance, field_name)
|
||||
current_name, extension = os.path.splitext(field.name)
|
||||
|
||||
new_name_with_extension = "{}{}".format(new_name, extension)
|
||||
new_name_with_extension = f"{new_name}{extension}"
|
||||
try:
|
||||
shutil.move(field.path, new_name_with_extension)
|
||||
except FileNotFoundError:
|
||||
|
@ -74,7 +71,7 @@ def set_query_parameter(url, **kwargs):
|
|||
|
||||
|
||||
@deconstructible
|
||||
class ChunkedPath(object):
|
||||
class ChunkedPath:
|
||||
def sanitize_filename(self, filename):
|
||||
return filename.replace("/", "-")
|
||||
|
||||
|
@ -91,7 +88,7 @@ class ChunkedPath(object):
|
|||
parts = chunks[:3] + [filename]
|
||||
else:
|
||||
ext = os.path.splitext(filename)[1][1:].lower()
|
||||
new_filename = "".join(chunks[3:]) + ".{}".format(ext)
|
||||
new_filename = "".join(chunks[3:]) + f".{ext}"
|
||||
parts = chunks[:3] + [new_filename]
|
||||
return os.path.join(self.root, *parts)
|
||||
|
||||
|
@ -141,7 +138,7 @@ def chunk_queryset(source_qs, chunk_size):
|
|||
|
||||
def join_url(start, end):
|
||||
if end.startswith("http://") or end.startswith("https://"):
|
||||
# alread a full URL, joining makes no sense
|
||||
# already a full URL, joining makes no sense
|
||||
return end
|
||||
if start.endswith("/") and end.startswith("/"):
|
||||
return start + end[1:]
|
||||
|
@ -230,7 +227,7 @@ def replace_prefix(queryset, field, old, new):
|
|||
|
||||
on a whole table with a single query.
|
||||
"""
|
||||
qs = queryset.filter(**{"{}__startswith".format(field): old})
|
||||
qs = queryset.filter(**{f"{field}__startswith": old})
|
||||
# we extract the part after the old prefix, and Concat it with our new prefix
|
||||
update = models.functions.Concat(
|
||||
models.Value(new),
|
||||
|
@ -311,7 +308,7 @@ HTMl_CLEANER = bleach.sanitizer.Cleaner(strip=True, tags=SAFE_TAGS)
|
|||
HTML_PERMISSIVE_CLEANER = bleach.sanitizer.Cleaner(
|
||||
strip=True,
|
||||
tags=SAFE_TAGS + ["h1", "h2", "h3", "h4", "h5", "h6", "div", "section", "article"],
|
||||
attributes=["class", "rel", "alt", "title"],
|
||||
attributes=["class", "rel", "alt", "title", "href"],
|
||||
)
|
||||
|
||||
# support for additional tlds
|
||||
|
@ -356,7 +353,7 @@ def attach_content(obj, field, content_data):
|
|||
from . import models
|
||||
|
||||
content_data = content_data or {}
|
||||
existing = getattr(obj, "{}_id".format(field))
|
||||
existing = getattr(obj, f"{field}_id")
|
||||
|
||||
if existing:
|
||||
if same_content(getattr(obj, field), **content_data):
|
||||
|
@ -379,10 +376,9 @@ def attach_content(obj, field, content_data):
|
|||
|
||||
@transaction.atomic
|
||||
def attach_file(obj, field, file_data, fetch=False):
|
||||
from . import models
|
||||
from . import tasks
|
||||
from . import models, tasks
|
||||
|
||||
existing = getattr(obj, "{}_id".format(field))
|
||||
existing = getattr(obj, f"{field}_id")
|
||||
if existing:
|
||||
getattr(obj, field).delete()
|
||||
|
||||
|
@ -399,7 +395,7 @@ def attach_file(obj, field, file_data, fetch=False):
|
|||
name = [
|
||||
getattr(obj, field) for field in name_fields if getattr(obj, field, None)
|
||||
][0]
|
||||
filename = "{}-{}.{}".format(field, name, extension)
|
||||
filename = f"{field}-{name}.{extension}"
|
||||
if "url" in file_data:
|
||||
attachment.url = file_data["url"]
|
||||
else:
|
||||
|
@ -491,4 +487,4 @@ def get_file_hash(file, algo=None, chunk_size=None, full_read=False):
|
|||
# sometimes, it's useful to only hash the beginning of the file, e.g
|
||||
# to avoid a lot of I/O when crawling large libraries
|
||||
hash.update(file.read(chunk_size))
|
||||
return "{}:{}".format(algo, hash.hexdigest())
|
||||
return f"{algo}:{hash.hexdigest()}"
|
||||
|
|
|
@ -72,7 +72,7 @@ class ImageDimensionsValidator:
|
|||
|
||||
|
||||
@deconstructible
|
||||
class FileValidator(object):
|
||||
class FileValidator:
|
||||
"""
|
||||
Taken from https://gist.github.com/jrosebr1/2140738
|
||||
Validator for files, checking the size, extension and mimetype.
|
||||
|
@ -97,7 +97,7 @@ class FileValidator(object):
|
|||
"MIME type '%(mimetype)s' is not valid. Allowed types are: %(allowed_mimetypes)s."
|
||||
)
|
||||
min_size_message = _(
|
||||
"The current file %(size)s, which is too small. The minumum file size is %(allowed_size)s."
|
||||
"The current file %(size)s, which is too small. The minimum file size is %(allowed_size)s."
|
||||
)
|
||||
max_size_message = _(
|
||||
"The current file %(size)s, which is too large. The maximum file size is %(allowed_size)s."
|
||||
|
@ -163,5 +163,5 @@ class DomainValidator(validators.URLValidator):
|
|||
|
||||
If it fails, we know the domain is not valid.
|
||||
"""
|
||||
super().__call__("http://{}".format(value))
|
||||
super().__call__(f"http://{value}")
|
||||
return value
|
||||
|
|
|
@ -3,28 +3,26 @@ import time
|
|||
|
||||
from django.conf import settings
|
||||
from django.db import transaction
|
||||
|
||||
from drf_spectacular.utils import extend_schema
|
||||
from rest_framework import (
|
||||
exceptions,
|
||||
generics,
|
||||
mixins,
|
||||
permissions,
|
||||
response,
|
||||
views,
|
||||
viewsets,
|
||||
)
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework import exceptions
|
||||
from rest_framework import mixins
|
||||
from rest_framework import permissions
|
||||
from rest_framework import response
|
||||
from rest_framework import views
|
||||
from rest_framework import viewsets
|
||||
|
||||
from config import plugins
|
||||
|
||||
from funkwhale_api.common.serializers import (
|
||||
ErrorDetailSerializer,
|
||||
TextPreviewSerializer,
|
||||
)
|
||||
from funkwhale_api.users.oauth import permissions as oauth_permissions
|
||||
|
||||
from . import filters
|
||||
from . import models
|
||||
from . import mutations
|
||||
from . import serializers
|
||||
from . import signals
|
||||
from . import tasks
|
||||
from . import throttling
|
||||
from . import utils
|
||||
|
||||
from . import filters, models, mutations, serializers, signals, tasks, throttling, utils
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
@ -78,6 +76,7 @@ class MutationViewSet(
|
|||
|
||||
return super().perform_destroy(instance)
|
||||
|
||||
@extend_schema(operation_id="approve_mutation")
|
||||
@action(detail=True, methods=["post"])
|
||||
@transaction.atomic
|
||||
def approve(self, request, *args, **kwargs):
|
||||
|
@ -107,6 +106,7 @@ class MutationViewSet(
|
|||
)
|
||||
return response.Response({}, status=200)
|
||||
|
||||
@extend_schema(operation_id="reject_mutation")
|
||||
@action(detail=True, methods=["post"])
|
||||
@transaction.atomic
|
||||
def reject(self, request, *args, **kwargs):
|
||||
|
@ -139,6 +139,7 @@ class MutationViewSet(
|
|||
class RateLimitView(views.APIView):
|
||||
permission_classes = []
|
||||
throttle_classes = []
|
||||
serializer_class = serializers.RateLimitSerializer
|
||||
|
||||
def get(self, request, *args, **kwargs):
|
||||
ident = throttling.get_ident(getattr(request, "user", None), request)
|
||||
|
@ -147,7 +148,7 @@ class RateLimitView(views.APIView):
|
|||
"ident": ident,
|
||||
"scopes": throttling.get_status(ident, time.time()),
|
||||
}
|
||||
return response.Response(data, status=200)
|
||||
return response.Response(serializers.RateLimitSerializer(data).data, status=200)
|
||||
|
||||
|
||||
class AttachmentViewSet(
|
||||
|
@ -197,20 +198,25 @@ class AttachmentViewSet(
|
|||
instance.delete()
|
||||
|
||||
|
||||
class TextPreviewView(views.APIView):
|
||||
class TextPreviewView(generics.GenericAPIView):
|
||||
permission_classes = []
|
||||
serializer_class = TextPreviewSerializer
|
||||
|
||||
@extend_schema(
|
||||
operation_id="preview_text",
|
||||
responses={200: TextPreviewSerializer, 400: ErrorDetailSerializer},
|
||||
)
|
||||
def post(self, request, *args, **kwargs):
|
||||
payload = request.data
|
||||
if "text" not in payload:
|
||||
return response.Response({"detail": "Invalid input"}, status=400)
|
||||
return response.Response(
|
||||
ErrorDetailSerializer("Invalid input").data, status=400
|
||||
)
|
||||
|
||||
permissive = payload.get("permissive", False)
|
||||
data = {
|
||||
"rendered": utils.render_html(
|
||||
payload["text"], "text/markdown", permissive=permissive
|
||||
)
|
||||
}
|
||||
data = TextPreviewSerializer(
|
||||
utils.render_html(payload["text"], "text/markdown", permissive=permissive)
|
||||
).data
|
||||
return response.Response(data, status=200)
|
||||
|
||||
|
||||
|
@ -272,6 +278,7 @@ class PluginViewSet(mixins.ListModelMixin, viewsets.GenericViewSet):
|
|||
user.plugins.filter(code=kwargs["pk"]).delete()
|
||||
return response.Response(status=204)
|
||||
|
||||
@extend_schema(operation_id="enable_plugin")
|
||||
@action(detail=True, methods=["post"])
|
||||
def enable(self, request, *args, **kwargs):
|
||||
user = request.user
|
||||
|
@ -280,6 +287,7 @@ class PluginViewSet(mixins.ListModelMixin, viewsets.GenericViewSet):
|
|||
plugins.enable_conf(kwargs["pk"], True, user)
|
||||
return response.Response({}, status=200)
|
||||
|
||||
@extend_schema(operation_id="disable_plugin")
|
||||
@action(detail=True, methods=["post"])
|
||||
def disable(self, request, *args, **kwargs):
|
||||
user = request.user
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue