Compare commits
399 Commits
Author | SHA1 | Date |
---|---|---|
![]() |
c82d7bc73c | |
![]() |
c7bd63d1c1 | |
![]() |
a9927df89c | |
![]() |
93ba70b0b7 | |
![]() |
b78c829d42 | |
![]() |
c3bd945efe | |
![]() |
6c6cb60a28 | |
![]() |
c1b0b71479 | |
![]() |
0937990980 | |
![]() |
6696b671dc | |
![]() |
31557fdced | |
![]() |
d23b2e9ff4 | |
![]() |
de7ad0135c | |
![]() |
1a5dca8606 | |
![]() |
37e22e8b35 | |
![]() |
15a137f261 | |
![]() |
ba62af15d5 | |
![]() |
01915f91cf | |
![]() |
8d726c2c8d | |
![]() |
8736924b36 | |
![]() |
2b2ea1e54a | |
![]() |
a3479e8c95 | |
![]() |
e0e66fc333 | |
![]() |
4db233b0c8 | |
![]() |
994765d952 | |
![]() |
801ffbce40 | |
![]() |
3843996e75 | |
![]() |
5fc8102776 | |
![]() |
b4b8a36516 | |
![]() |
0507c193d3 | |
![]() |
830b0a485f | |
![]() |
d65fb8e640 | |
![]() |
85ec0011d7 | |
![]() |
187108d495 | |
![]() |
689c9feb79 | |
![]() |
f6982f8936 | |
![]() |
82a1facdb5 | |
![]() |
2636a3dde7 | |
![]() |
e5aa82e141 | |
![]() |
606066bf3b | |
![]() |
8cc555321e | |
![]() |
95d2520420 | |
![]() |
d0c67d2488 | |
![]() |
7c33efa1cd | |
![]() |
769a3dc79e | |
![]() |
227379b7ab | |
![]() |
980bba942f | |
![]() |
2db7dc41fe | |
![]() |
78856cc32a | |
![]() |
75c4b3a7ff | |
![]() |
e0c051f04a | |
![]() |
b6d27a58d3 | |
![]() |
6f2c001bc2 | |
![]() |
d59019b9a7 | |
![]() |
f173029f75 | |
![]() |
6a79f048cd | |
![]() |
73bd66404b | |
![]() |
c9d915fb33 | |
![]() |
7102da8ed3 | |
![]() |
8f692f8b91 | |
![]() |
7376c85521 | |
![]() |
c4652fbf61 | |
![]() |
66810099fc | |
![]() |
dd715452d0 | |
![]() |
4bc4e3fcac | |
![]() |
6a9b05575f | |
![]() |
7e3c4d70dd | |
![]() |
910f950abc | |
![]() |
f1393da745 | |
![]() |
ebf2f15c26 | |
![]() |
d1e7e289f1 | |
![]() |
3aeecd0aca | |
![]() |
fd1224f7c7 | |
![]() |
08330bbd60 | |
![]() |
6d3788dbfc | |
![]() |
7f9375c3c1 | |
![]() |
888e6477f1 | |
![]() |
fedd340ed5 | |
![]() |
4bfa1feacf | |
![]() |
788e84d70c | |
![]() |
20c5cebfae | |
![]() |
d2cbc3689b | |
![]() |
e814b2fd01 | |
![]() |
8176bc6088 | |
![]() |
daa7e60160 | |
![]() |
9804de3650 | |
![]() |
33ec6783aa | |
![]() |
d1287a36a5 | |
![]() |
bf2670519c | |
![]() |
1e71b868f6 | |
![]() |
31330fed3e | |
![]() |
d2ac7bf84a | |
![]() |
fc6d8ed73c | |
![]() |
2f0b342866 | |
![]() |
1e6e6923d2 | |
![]() |
4a74c2f5d0 | |
![]() |
345607cca3 | |
![]() |
4a11f9b58d | |
![]() |
3b5de1a32d | |
![]() |
007fe3b192 | |
![]() |
965fad5bba | |
![]() |
fb5c863dda | |
![]() |
181d39ffbc | |
![]() |
a972708334 | |
![]() |
7c0ac160c5 | |
![]() |
76eb908d77 | |
![]() |
b59f71ef0f | |
![]() |
b50b5cb661 | |
![]() |
40935ec5ce | |
![]() |
2c2afe0b8f | |
![]() |
615ebde282 | |
![]() |
cf32e16547 | |
![]() |
c24b6ee183 | |
![]() |
0705467bf9 | |
![]() |
13f6571ad0 | |
![]() |
4441f054c8 | |
![]() |
dd1dc97be5 | |
![]() |
d6a8ce03c7 | |
![]() |
47c5c08572 | |
![]() |
1ee3c33128 | |
![]() |
f0706ecc5f | |
![]() |
95e463a7ef | |
![]() |
71688bdfbc | |
![]() |
1bb7108df5 | |
![]() |
4bef27552f | |
![]() |
ec368e0cd3 | |
![]() |
a2579bdc60 | |
![]() |
e1e0045a23 | |
![]() |
85c2be6a5b | |
![]() |
35de9bd48e | |
![]() |
ba5b657b61 | |
![]() |
4fc73c1430 | |
![]() |
97e24bcaa6 | |
![]() |
1b15fea1ab | |
![]() |
b624fea2fa | |
![]() |
e028e8788b | |
![]() |
67f74d40a6 | |
![]() |
547bd6f371 | |
![]() |
05ec6f6d0f | |
![]() |
a03cc1db24 | |
![]() |
2a364d5785 | |
![]() |
5bc0171694 | |
![]() |
37acfa475d | |
![]() |
f45fd1e465 | |
![]() |
17c4a92f77 | |
![]() |
6414302899 | |
![]() |
94a5b9e696 | |
![]() |
d673e77dff | |
![]() |
02400ceea3 | |
![]() |
31f35a43f1 | |
![]() |
932de8c242 | |
![]() |
a947a16b0f | |
![]() |
a01079850d | |
![]() |
8d22eb925e | |
![]() |
6fe153c8da | |
![]() |
cb7284ef95 | |
![]() |
5ca8691feb | |
![]() |
b4920af0b8 | |
![]() |
803b077f00 | |
![]() |
f1f6ef43ad | |
![]() |
0fd0192b37 | |
![]() |
ac6d136105 | |
![]() |
4e825527a5 | |
![]() |
46ee53c967 | |
![]() |
765c801142 | |
![]() |
e0e8a54d45 | |
![]() |
c67884a245 | |
![]() |
d2ca28ca47 | |
![]() |
30540ec186 | |
![]() |
673fe8b828 | |
![]() |
fe4af475af | |
![]() |
ad1bb6a220 | |
![]() |
298ace1b72 | |
![]() |
37a1b008b3 | |
![]() |
e42646d8a1 | |
![]() |
0095fc566e | |
![]() |
419da80e37 | |
![]() |
0b99740d64 | |
![]() |
51f56bc808 | |
![]() |
b00d782006 | |
![]() |
f3a7394461 | |
![]() |
cb8725a838 | |
![]() |
cddf6b9d93 | |
![]() |
521c4d927c | |
![]() |
78329ca821 | |
![]() |
1ca5ea2b73 | |
![]() |
62f84a311b | |
![]() |
5bf6e23815 | |
![]() |
318aa196fa | |
![]() |
b313d0e48c | |
![]() |
cea9d9cf47 | |
![]() |
97aa045b0b | |
![]() |
ccef0197c6 | |
![]() |
14d099b872 | |
![]() |
5647a1072d | |
![]() |
de232cb749 | |
![]() |
b1eba58dcc | |
![]() |
06cfe8da95 | |
![]() |
6aa609970f | |
![]() |
2b1228e620 | |
![]() |
83120cced2 | |
![]() |
367ba84f13 | |
![]() |
7957661573 | |
![]() |
9e2d47f698 | |
![]() |
243f2a57e3 | |
![]() |
670b522675 | |
![]() |
ff6fc46c58 | |
![]() |
84bb893f3a | |
![]() |
6c38bae189 | |
![]() |
4364d82b0b | |
![]() |
ac74380986 | |
![]() |
ee0abed0b7 | |
![]() |
fc456e6985 | |
![]() |
b0423d412f | |
![]() |
9853b89911 | |
![]() |
e6e1b5cdc4 | |
![]() |
3b45fde10a | |
![]() |
1eaad85c7d | |
![]() |
f76a797638 | |
![]() |
d7d6976229 | |
![]() |
765bc62a2b | |
![]() |
446b49fd46 | |
![]() |
0210304338 | |
![]() |
6d7a52c5ec | |
![]() |
825baecf8f | |
![]() |
62f7fda42c | |
![]() |
d82eceecae | |
![]() |
f58a33ec02 | |
![]() |
abf0edfcdc | |
![]() |
b658089e70 | |
![]() |
82fdc82f93 | |
![]() |
2371f2a4cb | |
![]() |
136f24a917 | |
![]() |
a5ee48818e | |
![]() |
d227490f5b | |
![]() |
bf8f1e41b9 | |
![]() |
e169e8edb1 | |
![]() |
0fab0470c2 | |
![]() |
81401075aa | |
![]() |
c1d91ce4d6 | |
![]() |
1f8c03e248 | |
![]() |
42bf16034b | |
![]() |
787acab3ab | |
![]() |
f43ef89c28 | |
![]() |
c4bec419ab | |
![]() |
55a4221b69 | |
![]() |
60f66eea6d | |
![]() |
4148cdd186 | |
![]() |
004d535eb7 | |
![]() |
132e291708 | |
![]() |
40d2dcaeaf | |
![]() |
fa36c97d72 | |
![]() |
9b8828ca42 | |
![]() |
e0791b570f | |
![]() |
90c9230a60 | |
![]() |
1e0f3abb54 | |
![]() |
bfff1f85f9 | |
![]() |
ae9fea0cf1 | |
![]() |
da370f5915 | |
![]() |
d6a078643b | |
![]() |
7fcaa1fed2 | |
![]() |
c3ae40cabe | |
![]() |
daf9e80ca5 | |
![]() |
b4f18edaff | |
![]() |
fa6d48f1b7 | |
![]() |
8f3ab416ae | |
![]() |
cd9d6d696e | |
![]() |
2c90b32bb3 | |
![]() |
e96748c029 | |
![]() |
d12ca2bad8 | |
![]() |
332ae20f05 | |
![]() |
736625e235 | |
![]() |
33cd0f05a7 | |
![]() |
06d135875b | |
![]() |
de41545ab3 | |
![]() |
5ce00a9230 | |
![]() |
d112d82768 | |
![]() |
03e9be77f9 | |
![]() |
b6bcc88287 | |
![]() |
4677b9117d | |
![]() |
bc573e47bc | |
![]() |
9a5a749171 | |
![]() |
de60ca7309 | |
![]() |
5693d0f86d | |
![]() |
22084cbca7 | |
![]() |
731ee7c21e | |
![]() |
afea533aed | |
![]() |
8a6b19fb6f | |
![]() |
0eec47e493 | |
![]() |
4f9280bd2c | |
![]() |
2ac4e25fce | |
![]() |
295b0dcc3a | |
![]() |
ab0efa3edf | |
![]() |
587bbc1118 | |
![]() |
b8978021c0 | |
![]() |
349610bbeb | |
![]() |
65f13a379f | |
![]() |
ba53d03ac5 | |
![]() |
cb65ee69e1 | |
![]() |
65728c81c4 | |
![]() |
5b022d94d1 | |
![]() |
21ff5f65da | |
![]() |
d8c734d3cd | |
![]() |
b1f3a62fae | |
![]() |
20cfaa8dc9 | |
![]() |
038b696e75 | |
![]() |
59687b2f32 | |
![]() |
da71fb640d | |
![]() |
09facc553d | |
![]() |
da01070455 | |
![]() |
b00daa189d | |
![]() |
aa0ce033aa | |
![]() |
cc2272bb80 | |
![]() |
f0e79b4a0a | |
![]() |
9da91df798 | |
![]() |
807a6fd02c | |
![]() |
517d99f9bf | |
![]() |
6ab1dc0536 | |
![]() |
803eb85b67 | |
![]() |
6fcae233df | |
![]() |
bf43b95208 | |
![]() |
d721a3808b | |
![]() |
d22a911619 | |
![]() |
7c52227d43 | |
![]() |
58e2c896b2 | |
![]() |
91b85cab46 | |
![]() |
bc15de7556 | |
![]() |
f99de1ef97 | |
![]() |
5cc0219196 | |
![]() |
369b80bb1c | |
![]() |
60db27dfba | |
![]() |
efffeac280 | |
![]() |
d112ea4bc6 | |
![]() |
b8ed2ccd5c | |
![]() |
ab15803be0 | |
![]() |
e282422592 | |
![]() |
96d25ff25d | |
![]() |
8645180620 | |
![]() |
142a517b93 | |
![]() |
233d17d287 | |
![]() |
630ba7262a | |
![]() |
0b78affdcd | |
![]() |
41dbf62356 | |
![]() |
6b6ba94291 | |
![]() |
9eda066a39 | |
![]() |
4cf2d68a4f | |
![]() |
a19b459533 | |
![]() |
e3206e2122 | |
![]() |
ba3300a682 | |
![]() |
c6aec56e71 | |
![]() |
02fd31d321 | |
![]() |
07f665cb8b | |
![]() |
0b03bd6c89 | |
![]() |
2aa301387c | |
![]() |
46531884b3 | |
![]() |
6234dfd2a7 | |
![]() |
1c93460ffb | |
![]() |
b6c906bf7c | |
![]() |
793fc31e13 | |
![]() |
80b4906438 | |
![]() |
e11a6cea02 | |
![]() |
b46aa638bc | |
![]() |
17e08fd332 | |
![]() |
86ce4cfd7c | |
![]() |
b21e241f37 | |
![]() |
08bfc93243 | |
![]() |
4cbce95bcb | |
![]() |
3ee6ba6658 | |
![]() |
259fb1b61d | |
![]() |
516c281a57 | |
![]() |
d842243b3c | |
![]() |
a4ea1a06b9 | |
![]() |
d44c29bedb | |
![]() |
6e46660d70 | |
![]() |
32db5e92a3 | |
![]() |
ba365d6722 | |
![]() |
fd44d0bf12 | |
![]() |
70c0a038fc | |
![]() |
06e49598a3 | |
![]() |
779a3ee717 | |
![]() |
92f73b1755 | |
![]() |
f34eb14c9a | |
![]() |
358ce509a5 | |
![]() |
65ebb8d90e | |
![]() |
499e1a8354 | |
![]() |
8de3c1489d | |
![]() |
11f7fa25ae | |
![]() |
1ccf18412f | |
![]() |
1061275487 | |
![]() |
af592d99c2 | |
![]() |
d1dd0bebcf | |
![]() |
9da463e69d | |
![]() |
1ee1c88ed1 | |
![]() |
e38808e2ce | |
![]() |
2edbc6c98f | |
![]() |
bfa50a0c35 | |
![]() |
74b2593cb2 | |
![]() |
cc2ff8ae88 | |
![]() |
9dbbe9e768 |
|
@ -7,6 +7,7 @@ nd
|
|||
readby
|
||||
serie
|
||||
upto
|
||||
afterall
|
||||
|
||||
# Names
|
||||
nin
|
||||
|
|
|
@ -67,3 +67,6 @@ mailhog
|
|||
*.sqlite3
|
||||
api/music
|
||||
api/media
|
||||
|
||||
# Docker state
|
||||
.state
|
||||
|
|
23
.env.dev
23
.env.dev
|
@ -1,23 +0,0 @@
|
|||
DJANGO_ALLOWED_HOSTS=.funkwhale.test,localhost,nginx,0.0.0.0,127.0.0.1,.gitpod.io
|
||||
DJANGO_SETTINGS_MODULE=config.settings.local
|
||||
DJANGO_SECRET_KEY=dev
|
||||
C_FORCE_ROOT=true
|
||||
FUNKWHALE_HOSTNAME=localhost
|
||||
FUNKWHALE_PROTOCOL=http
|
||||
PYTHONDONTWRITEBYTECODE=true
|
||||
VUE_PORT=8080
|
||||
MUSIC_DIRECTORY_PATH=/music
|
||||
BROWSABLE_API_ENABLED=True
|
||||
FORWARDED_PROTO=http
|
||||
LDAP_ENABLED=False
|
||||
FUNKWHALE_SPA_HTML_ROOT=http://nginx/
|
||||
PYTHONTRACEMALLOC=0
|
||||
MEDIA_ROOT=/data/media
|
||||
|
||||
# Uncomment this if you're using traefik/https
|
||||
# FORCE_HTTPS_URLS=True
|
||||
|
||||
# Customize to your needs
|
||||
POSTGRES_VERSION=11
|
||||
DEBUG=true
|
||||
TYPESENSE_API_KEY="apikey"
|
|
@ -0,0 +1,58 @@
|
|||
# api + celeryworker
|
||||
DEBUG=True
|
||||
DEFAULT_FROM_EMAIL=hello@funkwhale.test
|
||||
|
||||
FUNKWHALE_DOMAIN=funkwhale.test
|
||||
FUNKWHALE_PROTOCOL=https
|
||||
|
||||
DJANGO_SECRET_KEY=dev
|
||||
DJANGO_ALLOWED_HOSTS=.funkwhale.test,nginx
|
||||
DJANGO_SETTINGS_MODULE=config.settings.local
|
||||
|
||||
DATABASE_URL=postgresql://postgres@postgres/postgres
|
||||
CACHE_URL=redis://redis:6379/0
|
||||
EMAIL_CONFIG=smtp://mailpit.funkwhale.test:1025
|
||||
|
||||
FORCE_HTTPS_URLS=True
|
||||
EXTERNAL_REQUESTS_VERIFY_SSL=false
|
||||
|
||||
C_FORCE_ROOT=true
|
||||
PYTHONDONTWRITEBYTECODE=true
|
||||
PYTHONTRACEMALLOC=0
|
||||
|
||||
# api
|
||||
|
||||
FUNKWHALE_SPA_HTML_ROOT=http://nginx/
|
||||
LDAP_ENABLED=False
|
||||
BROWSABLE_API_ENABLED=True
|
||||
|
||||
# celeryworker
|
||||
|
||||
CELERYD_CONCURRENCY=0
|
||||
|
||||
# api + nginx
|
||||
|
||||
STATIC_ROOT=/staticfiles
|
||||
MEDIA_ROOT=/data/media
|
||||
|
||||
# api + Typesense
|
||||
TYPESENSE_API_KEY=apikey
|
||||
|
||||
# front
|
||||
|
||||
HOST=0.0.0.0
|
||||
VUE_PORT=8080
|
||||
|
||||
# nginx
|
||||
|
||||
NGINX_MAX_BODY_SIZE=10G
|
||||
|
||||
FUNKWHALE_API_HOST=api
|
||||
FUNKWHALE_API_PORT=5000
|
||||
|
||||
FUNKWHALE_FRONT_IP=front
|
||||
FUNKWHALE_FRONT_PORT=${VUE_PORT}
|
||||
|
||||
# postgres
|
||||
|
||||
POSTGRES_HOST_AUTH_METHOD=trust
|
|
@ -1,3 +1,5 @@
|
|||
/dist
|
||||
|
||||
### OSX ###
|
||||
.DS_Store
|
||||
.AppleDouble
|
||||
|
@ -83,10 +85,15 @@ front/yarn-debug.log*
|
|||
front/yarn-error.log*
|
||||
front/tests/unit/coverage
|
||||
front/tests/e2e/reports
|
||||
front/test_results.xml
|
||||
front/coverage/
|
||||
front/selenium-debug.log
|
||||
docs/_build
|
||||
#Tauri
|
||||
front/tauri/gen
|
||||
|
||||
/data/
|
||||
.state
|
||||
.env
|
||||
|
||||
po/*.po
|
||||
|
@ -97,10 +104,20 @@ _build
|
|||
# Docker
|
||||
docker-bake.*.json
|
||||
metadata.json
|
||||
compose/var/test.*
|
||||
|
||||
# Linting
|
||||
.eslintcache
|
||||
tsconfig.tsbuildinfo
|
||||
|
||||
# Nix
|
||||
.direnv/
|
||||
.envrc
|
||||
flake.nix
|
||||
flake.lock
|
||||
|
||||
# Vscode
|
||||
.vscode/
|
||||
|
||||
# Zed
|
||||
.zed/
|
||||
|
|
|
@ -144,13 +144,13 @@ find_broken_links:
|
|||
--cache
|
||||
--no-progress
|
||||
--exclude-all-private
|
||||
--exclude-mail
|
||||
--exclude 'demo\.funkwhale\.audio'
|
||||
--exclude 'nginx\.com'
|
||||
--exclude-path 'docs/_templates/'
|
||||
-- . || exit $?
|
||||
|
||||
require_changelog:
|
||||
allow_failure: false
|
||||
stage: lint
|
||||
rules:
|
||||
# Don't run on merge request that mention NOCHANGELOG or renovate bot commits
|
||||
|
@ -175,7 +175,8 @@ lint_api:
|
|||
- if: $CI_COMMIT_BRANCH =~ /(stable|develop)/
|
||||
- changes: [api/**/*]
|
||||
|
||||
image: $CI_REGISTRY/funkwhale/ci/python-funkwhale-api:3.11
|
||||
image: $CI_REGISTRY/funkwhale/ci/python-funkwhale-api:3.13
|
||||
cache: *api_cache
|
||||
before_script:
|
||||
- cd api
|
||||
- make install
|
||||
|
@ -231,7 +232,7 @@ test_api:
|
|||
image: $CI_REGISTRY/funkwhale/ci/python-funkwhale-api:$PYTHON_VERSION
|
||||
parallel:
|
||||
matrix:
|
||||
- PYTHON_VERSION: ["3.8", "3.9", "3.10", "3.11"]
|
||||
- PYTHON_VERSION: ["3.10", "3.11", "3.12", "3.13"]
|
||||
services:
|
||||
- name: postgres:15-alpine
|
||||
command:
|
||||
|
@ -248,7 +249,7 @@ test_api:
|
|||
CACHE_URL: "redis://redis:6379/0"
|
||||
before_script:
|
||||
- cd api
|
||||
- poetry install --all-extras
|
||||
- make install
|
||||
script:
|
||||
- >
|
||||
poetry run pytest
|
||||
|
@ -288,6 +289,7 @@ test_front:
|
|||
coverage_report:
|
||||
coverage_format: cobertura
|
||||
path: front/coverage/cobertura-coverage.xml
|
||||
coverage: '/All files\s+(?:\|\s+((?:\d+\.)?\d+)\s+){4}.*/'
|
||||
|
||||
build_metadata:
|
||||
stage: build
|
||||
|
@ -313,7 +315,7 @@ test_integration:
|
|||
interruptible: true
|
||||
|
||||
image:
|
||||
name: cypress/included:12.14.0
|
||||
name: cypress/included:13.6.4
|
||||
entrypoint: [""]
|
||||
cache:
|
||||
- *front_cache
|
||||
|
@ -337,7 +339,7 @@ build_api_schema:
|
|||
# Add build_docs rules because it depends on the build_api_schema artifact
|
||||
- changes: [docs/**/*]
|
||||
|
||||
image: $CI_REGISTRY/funkwhale/ci/python-funkwhale-api:3.11
|
||||
image: $CI_REGISTRY/funkwhale/ci/python-funkwhale-api:3.13
|
||||
services:
|
||||
- postgres:15-alpine
|
||||
- redis:7-alpine
|
||||
|
@ -351,10 +353,15 @@ build_api_schema:
|
|||
API_TYPE: "v1"
|
||||
before_script:
|
||||
- cd api
|
||||
- poetry install --all-extras
|
||||
- make install
|
||||
- poetry run funkwhale-manage migrate
|
||||
script:
|
||||
- poetry run funkwhale-manage spectacular --file ../docs/schema.yml
|
||||
- diff ../docs/schema.yml ./funkwhale_api/common/schema.yml || (
|
||||
echo "Schema files do not match! run sudo docker compose run --rm
|
||||
api funkwhale-manage spectacular > ./api/funkwhale_api/common/schema.yml" &&
|
||||
exit 1
|
||||
)
|
||||
artifacts:
|
||||
expire_in: 2 weeks
|
||||
paths:
|
||||
|
@ -430,6 +437,25 @@ build_api:
|
|||
paths:
|
||||
- api
|
||||
|
||||
# build_tauri:
|
||||
# stage: build
|
||||
# rules:
|
||||
# - if: $CI_COMMIT_BRANCH =~ /(stable|develop)/
|
||||
# - changes: [front/**/*]
|
||||
|
||||
# image: $CI_REGISTRY/funkwhale/ci/node-tauri:18
|
||||
# variables:
|
||||
# <<: *keep_git_files_permissions
|
||||
# before_script:
|
||||
# - source /root/.cargo/env
|
||||
# - yarn install
|
||||
# script:
|
||||
# - yarn tauri build --verbose
|
||||
# artifacts:
|
||||
# name: desktop_${CI_COMMIT_REF_NAME}
|
||||
# paths:
|
||||
# - front/tauri/target/release/bundle/appimage/*.AppImage
|
||||
|
||||
deploy_docs:
|
||||
interruptible: false
|
||||
extends: .ssh-agent
|
||||
|
@ -462,7 +488,7 @@ docker:
|
|||
variables:
|
||||
BUILD_ARGS: >
|
||||
--set *.platform=linux/amd64,linux/arm64,linux/arm/v7
|
||||
--set *.no-cache
|
||||
--no-cache
|
||||
--push
|
||||
|
||||
- if: $CI_COMMIT_BRANCH =~ /(stable|develop)/
|
||||
|
@ -473,7 +499,8 @@ docker:
|
|||
--set *.cache-to=type=registry,ref=$DOCKER_CACHE_IMAGE:$CI_COMMIT_BRANCH,mode=max,oci-mediatypes=false
|
||||
--push
|
||||
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
|
||||
- if: $CI_PIPELINE_SOURCE == "merge_request_event" && $CI_PROJECT_NAMESPACE == "funkwhale"
|
||||
# We don't provide priviledged runners to everyone, so we can only build docker images in the funkwhale group
|
||||
variables:
|
||||
BUILD_ARGS: >
|
||||
--set *.platform=linux/amd64
|
||||
|
@ -508,3 +535,24 @@ docker:
|
|||
name: docker_metadata_${CI_COMMIT_REF_NAME}
|
||||
paths:
|
||||
- metadata.json
|
||||
|
||||
package:
|
||||
stage: publish
|
||||
needs:
|
||||
- job: build_metadata
|
||||
artifacts: true
|
||||
- job: build_api
|
||||
artifacts: true
|
||||
- job: build_front
|
||||
artifacts: true
|
||||
# - job: build_tauri
|
||||
# artifacts: true
|
||||
rules:
|
||||
- if: $CI_COMMIT_BRANCH =~ /(stable|develop)/
|
||||
|
||||
image: $CI_REGISTRY/funkwhale/ci/python:3.11
|
||||
variables:
|
||||
<<: *keep_git_files_permissions
|
||||
script:
|
||||
- make package
|
||||
- scripts/ci-upload-packages.sh
|
||||
|
|
|
@ -16,7 +16,7 @@
|
|||
"ignoreDeps": ["$CI_REGISTRY/funkwhale/backend-test-docker"],
|
||||
"packageRules": [
|
||||
{
|
||||
"matchPaths": ["api/*", "front/*", "docs/*"],
|
||||
"matchFileNames": ["api/*", "front/*", "docs/*"],
|
||||
"additionalBranchPrefix": "{{parentDir}}-",
|
||||
"semanticCommitScope": "{{parentDir}}"
|
||||
},
|
||||
|
@ -25,6 +25,16 @@
|
|||
"branchConcurrentLimit": 0,
|
||||
"prConcurrentLimit": 0
|
||||
},
|
||||
{
|
||||
"matchBaseBranches": ["develop"],
|
||||
"matchUpdateTypes": ["major"],
|
||||
"prPriority": 2
|
||||
},
|
||||
{
|
||||
"matchBaseBranches": ["develop"],
|
||||
"matchUpdateTypes": ["minor"],
|
||||
"prPriority": 1
|
||||
},
|
||||
{
|
||||
"matchUpdateTypes": ["major", "minor"],
|
||||
"matchBaseBranches": ["stable"],
|
||||
|
@ -35,12 +45,6 @@
|
|||
"matchBaseBranches": ["stable"],
|
||||
"enabled": false
|
||||
},
|
||||
{
|
||||
"matchUpdateTypes": ["patch", "pin", "digest"],
|
||||
"matchBaseBranches": ["develop"],
|
||||
"automerge": true,
|
||||
"automergeType": "branch"
|
||||
},
|
||||
{
|
||||
"matchManagers": ["npm"],
|
||||
"addLabels": ["Area::Frontend"]
|
||||
|
@ -50,20 +54,20 @@
|
|||
"addLabels": ["Area::Backend"]
|
||||
},
|
||||
{
|
||||
"matchPackagePatterns": ["^@vueuse/.*"],
|
||||
"groupName": "vueuse"
|
||||
"groupName": "vueuse",
|
||||
"matchDepNames": ["/^@vueuse/.*/"]
|
||||
},
|
||||
{
|
||||
"matchPackageNames": ["channels", "channels-redis", "daphne"],
|
||||
"matchDepNames": ["channels", "channels-redis", "daphne"],
|
||||
"groupName": "channels"
|
||||
},
|
||||
{
|
||||
"matchPackageNames": ["node"],
|
||||
"matchDepNames": ["node"],
|
||||
"allowedVersions": "/\\d+[02468]$/"
|
||||
},
|
||||
{
|
||||
"matchFiles": ["deploy/docker-compose.yml"],
|
||||
"matchPackageNames": ["postgres"],
|
||||
"matchFileNames": ["deploy/docker-compose.yml"],
|
||||
"matchDepNames": ["postgres"],
|
||||
"postUpgradeTasks": {
|
||||
"commands": [
|
||||
"echo 'Upgrade Postgres to version {{ newVersion }}. [Make sure to migrate!](https://docs.funkwhale.audio/administrator_documentation/upgrade_docs/docker.html#upgrade-the-postgres-container)' > changes/changelog.d/postgres.update"
|
||||
|
@ -72,7 +76,7 @@
|
|||
}
|
||||
},
|
||||
{
|
||||
"matchPackageNames": ["python"],
|
||||
"matchDepNames": ["python"],
|
||||
"rangeStrategy": "widen"
|
||||
}
|
||||
]
|
||||
|
|
|
@ -14,7 +14,7 @@ tasks:
|
|||
docker-compose up -d
|
||||
|
||||
poetry env use python
|
||||
poetry install
|
||||
make install
|
||||
|
||||
gp ports await 5432
|
||||
|
||||
|
|
|
@ -6,6 +6,8 @@ RUN sudo apt update -y \
|
|||
|
||||
RUN pyenv install 3.11 && pyenv global 3.11
|
||||
|
||||
RUN pip install poetry pre-commit \
|
||||
RUN brew install neovim
|
||||
|
||||
RUN pip install poetry pre-commit jinja2 towncrier \
|
||||
&& poetry config virtualenvs.create true \
|
||||
&& poetry config virtualenvs.in-project true
|
||||
|
|
|
@ -28,15 +28,16 @@ services:
|
|||
environment:
|
||||
- "NGINX_MAX_BODY_SIZE=100M"
|
||||
- "FUNKWHALE_API_IP=host.docker.internal"
|
||||
- "FUNKWHALE_API_HOST=host.docker.internal"
|
||||
- "FUNKWHALE_API_PORT=5000"
|
||||
- "FUNKWHALE_FRONT_IP=host.docker.internal"
|
||||
- "FUNKWHALE_FRONT_PORT=8080"
|
||||
- "FUNKWHALE_HOSTNAME=${FUNKWHALE_HOSTNAME-host.docker.internal}"
|
||||
- "FUNKWHALE_PROTOCOL=https"
|
||||
volumes:
|
||||
- ../data/media:/protected/media:ro
|
||||
- ../data/media:/workspace/funkwhale/data/media:ro
|
||||
- ../data/music:/music:ro
|
||||
- ../data/staticfiles:/staticfiles:ro
|
||||
- ../data/staticfiles:/usr/share/nginx/html/staticfiles/:ro
|
||||
- ../deploy/funkwhale_proxy.conf:/etc/nginx/funkwhale_proxy.conf:ro
|
||||
- ../docker/nginx/conf.dev:/etc/nginx/templates/default.conf.template:ro
|
||||
- ../front:/frontend:ro
|
||||
|
|
|
@ -6,6 +6,7 @@ repos:
|
|||
rev: v4.4.0
|
||||
hooks:
|
||||
- id: check-added-large-files
|
||||
exclude: "api/funkwhale_api/common/schema.yml"
|
||||
- id: check-case-conflict
|
||||
- id: check-executables-have-shebangs
|
||||
- id: check-shebang-scripts-are-executable
|
||||
|
@ -53,7 +54,7 @@ repos:
|
|||
- id: isort
|
||||
|
||||
- repo: https://github.com/pycqa/flake8
|
||||
rev: 6.0.0
|
||||
rev: 6.1.0
|
||||
hooks:
|
||||
- id: flake8
|
||||
|
||||
|
@ -62,6 +63,7 @@ repos:
|
|||
hooks:
|
||||
- id: prettier
|
||||
files: \.(md|yml|yaml|json)$
|
||||
exclude: "api/funkwhale_api/common/schema.yml"
|
||||
|
||||
- repo: https://github.com/codespell-project/codespell
|
||||
rev: v2.2.6
|
||||
|
|
89
CHANGELOG.md
89
CHANGELOG.md
|
@ -9,12 +9,13 @@ This changelog is viewable on the web at https://docs.funkwhale.audio/changelog.
|
|||
|
||||
<!-- towncrier -->
|
||||
|
||||
## 1.4.0-rc1 (2023-11-28)
|
||||
## 1.4.0 (2023-12-12)
|
||||
|
||||
Upgrade instructions are available at https://docs.funkwhale.audio/administrator/upgrade/index.html
|
||||
|
||||
Features:
|
||||
|
||||
- Add a management command to generate dummy notifications for testing
|
||||
- Add atom1.0 to node info services (#2085)
|
||||
- Add basic cypress testing
|
||||
- Add NodeInfo 2.1 (#2085)
|
||||
|
@ -25,14 +26,14 @@ Features:
|
|||
- Cache radio queryset into redis. New radio track endpoint for api v2 is /api/v2/radios/sessions/{radiosessionid}/tracks (#2135)
|
||||
- Create a testing environment in production for ListenBrainz recommendation engine (troi-recommendation-playground) (#1861)
|
||||
- Generate all nginx configurations from one template
|
||||
- New management command to update Uploads which have been imported using --in-place and are now stored in s3 (#2156)
|
||||
- Add option to only allow MusicBrainz tagged file on a pod (#2083)
|
||||
- New management command to update Uploads which have been imported using --in-place and are now
|
||||
stored in s3 (#2156)
|
||||
- Only allow MusicBrainz tagged file on a pod (#2083)
|
||||
- Prohibit the creation of new users using django's `createsuperuser` command in favor of our own CLI
|
||||
entry point. Run `funkwhale-manage fw users create --superuser` instead. (#1288)
|
||||
|
||||
Enhancements:
|
||||
|
||||
- Add a management command to generate dummy notifications for testing
|
||||
- Add custom logging functionality (#2155)
|
||||
- Adding typesense container and api client (2104)
|
||||
- Cache pip package in api docker builds (#2193)
|
||||
|
@ -50,9 +51,12 @@ Bugfixes:
|
|||
|
||||
- `postgres > db_dump.sql` cannot be used if the postgres container is stopped. Update command.
|
||||
- Avoid troi radio to give duplicates (#2231)
|
||||
- Do not cache all requests to avoid missing updates #2258
|
||||
- Fix broken nginx templates for docker setup (#2252)
|
||||
- Fix help messages for running scripts using funkwhale-manage
|
||||
- Fix missing og meta tags (#2208)
|
||||
- Fix multiarch docker builds #2211
|
||||
- Fix regression that prevent static files from being served in non-docker-deployments (#2256)
|
||||
- Fixed an issue where the copy button didn't copy the Embed code in the embed modal.
|
||||
- Fixed an issue with the nginx templates that caused issues when connecting to websockets.
|
||||
- Fixed development docker setup (2102)
|
||||
|
@ -96,6 +100,79 @@ Other:
|
|||
Removal:
|
||||
|
||||
- Drop support for python3.7
|
||||
- This release doesn't support Debian 10 anymore. If you are still on Debian 10, we recommend
|
||||
updating to a later version. Alternatively, install a supported Python version (>= Python 3.8). Python 3.11 is recommended.
|
||||
|
||||
Contributors to our Issues:
|
||||
|
||||
- AMoonRabbit
|
||||
- Alexandra Parker
|
||||
- ChengChung
|
||||
- Ciarán Ainsworth
|
||||
- Georg Krause
|
||||
- Ghost User
|
||||
- Johann Queuniet
|
||||
- JuniorJPDJ
|
||||
- Kasper Seweryn
|
||||
- Kay Borowski
|
||||
- Marcos Peña
|
||||
- Mathieu Jourdan
|
||||
- Nicolas Derive
|
||||
- Virgile Robles
|
||||
- jooola
|
||||
- petitminion
|
||||
- theit8514
|
||||
|
||||
Contributors to our Merge Requests:
|
||||
|
||||
- AMoonRabbit
|
||||
- Alexander Dunkel
|
||||
- Alexander Torre
|
||||
- Ciarán Ainsworth
|
||||
- Georg Krause
|
||||
- JuniorJPDJ
|
||||
- Kasper Seweryn
|
||||
- Kay Borowski
|
||||
- Marcos Peña
|
||||
- Mathieu Jourdan
|
||||
- Philipp Wolfer
|
||||
- Virgile Robles
|
||||
- interfect
|
||||
- jooola
|
||||
- petitminion
|
||||
|
||||
Committers:
|
||||
|
||||
- Aitor
|
||||
- Alexander Dunkel
|
||||
- alextprog
|
||||
- Aznörth Niryn
|
||||
- Ciarán Ainsworth
|
||||
- dignny
|
||||
- drakonicguy
|
||||
- Fun.k.whale Trad
|
||||
- Georg krause
|
||||
- Georg Krause
|
||||
- Jérémie Lorente
|
||||
- jo
|
||||
- jooola
|
||||
- josé m
|
||||
- Julian-Samuel Gebühr
|
||||
- JuniorJPDJ
|
||||
- Kasper Seweryn
|
||||
- Marcos Peña
|
||||
- Mathieu Jourdan
|
||||
- Matteo Piovanelli
|
||||
- Matyáš Caras
|
||||
- MhP
|
||||
- omarmaciasmolina
|
||||
- petitminion
|
||||
- Philipp Wolfer
|
||||
- ppom
|
||||
- Quentin PAGÈS
|
||||
- rinenweb
|
||||
- Thomas
|
||||
- Transcriber allium
|
||||
|
||||
## 1.3.4 (2023-11-16)
|
||||
|
||||
|
@ -325,13 +402,13 @@ Update instructions:
|
|||
2. Stop your containers using the **docker-compose** syntax.
|
||||
|
||||
```sh
|
||||
sudo docker-compose down
|
||||
docker compose down
|
||||
```
|
||||
|
||||
3. Bring the containers back up using the **docker compose** syntax.
|
||||
|
||||
```sh
|
||||
sudo docker compose up -d
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
After this you can continue to use the **docker compose** syntax for all Docker management tasks.
|
||||
|
|
38
Makefile
38
Makefile
|
@ -17,3 +17,41 @@ docker-build: docker-metadata
|
|||
|
||||
build-metadata:
|
||||
./scripts/build_metadata.py --format env | tee build_metadata.env
|
||||
|
||||
BUILD_DIR = dist
|
||||
package:
|
||||
rm -Rf $(BUILD_DIR)
|
||||
mkdir -p $(BUILD_DIR)
|
||||
tar --create --gunzip --file='$(BUILD_DIR)/funkwhale-api.tar.gz' \
|
||||
--owner='root' \
|
||||
--group='root' \
|
||||
--exclude-vcs \
|
||||
api/config \
|
||||
api/funkwhale_api \
|
||||
api/install_os_dependencies.sh \
|
||||
api/manage.py \
|
||||
api/poetry.lock \
|
||||
api/pyproject.toml \
|
||||
api/Readme.md
|
||||
|
||||
cd '$(BUILD_DIR)' && \
|
||||
tar --extract --gunzip --file='funkwhale-api.tar.gz' && \
|
||||
zip -q 'funkwhale-api.zip' -r api && \
|
||||
rm -Rf api
|
||||
|
||||
tar --create --gunzip --file='$(BUILD_DIR)/funkwhale-front.tar.gz' \
|
||||
--owner='root' \
|
||||
--group='root' \
|
||||
--exclude-vcs \
|
||||
--transform='s/^front\/dist/front/' \
|
||||
front/dist
|
||||
|
||||
cd '$(BUILD_DIR)' && \
|
||||
tar --extract --gunzip --file='funkwhale-front.tar.gz' && \
|
||||
zip -q 'funkwhale-front.zip' -r front && \
|
||||
rm -Rf front
|
||||
|
||||
# cd '$(BUILD_DIR)' && \
|
||||
# cp ../front/tauri/target/release/bundle/appimage/funkwhale_*.AppImage FunkwhaleDesktop.AppImage
|
||||
|
||||
cd '$(BUILD_DIR)' && sha256sum * > SHA256SUMS
|
||||
|
|
124
api/Dockerfile
124
api/Dockerfile
|
@ -1,124 +0,0 @@
|
|||
FROM alpine:3.17 as requirements
|
||||
|
||||
# We need this additional step to avoid having poetrys deps interacting with our
|
||||
# dependencies. This is only required until alpine 3.16 is released, since this
|
||||
# allows us to install poetry as package.
|
||||
|
||||
RUN set -eux; \
|
||||
apk add --no-cache \
|
||||
poetry \
|
||||
py3-cryptography \
|
||||
py3-pip \
|
||||
python3
|
||||
|
||||
COPY pyproject.toml poetry.lock /
|
||||
RUN set -eux; \
|
||||
poetry export --without-hashes --extras typesense > requirements.txt; \
|
||||
poetry export --without-hashes --with dev > dev-requirements.txt;
|
||||
|
||||
FROM alpine:3.17 as builder
|
||||
|
||||
ENV PYTHONDONTWRITEBYTECODE=1
|
||||
ENV PYTHONUNBUFFERED=1
|
||||
ARG PIP_NO_CACHE_DIR=1
|
||||
ENV CARGO_NET_GIT_FETCH_WITH_CLI=true
|
||||
|
||||
RUN set -eux; \
|
||||
apk add --no-cache \
|
||||
cargo \
|
||||
curl \
|
||||
gcc \
|
||||
g++ \
|
||||
git \
|
||||
jpeg-dev \
|
||||
libffi-dev \
|
||||
libldap \
|
||||
libxml2-dev \
|
||||
libxslt-dev \
|
||||
make \
|
||||
musl-dev \
|
||||
openldap-dev \
|
||||
openssl-dev \
|
||||
postgresql-dev \
|
||||
zlib-dev \
|
||||
py3-cryptography=38.0.3-r1 \
|
||||
py3-lxml=4.9.3-r1 \
|
||||
py3-pillow=9.3.0-r0 \
|
||||
py3-psycopg2=2.9.5-r0 \
|
||||
py3-watchfiles=0.18.1-r0 \
|
||||
python3-dev
|
||||
|
||||
# Create virtual env
|
||||
RUN python3 -m venv --system-site-packages /venv
|
||||
ENV PATH="/venv/bin:$PATH"
|
||||
|
||||
COPY --from=requirements /requirements.txt /requirements.txt
|
||||
COPY --from=requirements /dev-requirements.txt /dev-requirements.txt
|
||||
|
||||
RUN --mount=type=cache,target=~/.cache/pip; \
|
||||
set -eux; \
|
||||
pip3 install --upgrade pip; \
|
||||
pip3 install setuptools wheel; \
|
||||
# Currently we are unable to relieably build rust-based packages on armv7. This
|
||||
# is why we need to use the packages shipped by Alpine Linux.
|
||||
# Since poetry does not allow in-place dependency pinning, we need
|
||||
# to install the deps using pip.
|
||||
grep -Ev 'cryptography|lxml|pillow|psycopg2|watchfiles' /requirements.txt \
|
||||
| pip3 install -r /dev/stdin \
|
||||
cryptography==38.0.3 \
|
||||
lxml==4.9.3 \
|
||||
pillow==9.3.0 \
|
||||
psycopg2==2.9.5 \
|
||||
watchfiles==0.18.1
|
||||
|
||||
ARG install_dev_deps=0
|
||||
RUN --mount=type=cache,target=~/.cache/pip; \
|
||||
set -eux; \
|
||||
if [ "$install_dev_deps" = "1" ] ; then \
|
||||
grep -Ev 'cryptography|lxml|pillow|psycopg2|watchfiles' /dev-requirements.txt \
|
||||
| pip3 install -r /dev/stdin \
|
||||
cryptography==38.0.3 \
|
||||
lxml==4.9.3 \
|
||||
pillow==9.3.0 \
|
||||
psycopg2==2.9.5 \
|
||||
watchfiles==0.18.1; \
|
||||
fi
|
||||
|
||||
FROM alpine:3.17 as production
|
||||
|
||||
ENV PYTHONDONTWRITEBYTECODE=1
|
||||
ENV PYTHONUNBUFFERED=1
|
||||
ARG PIP_NO_CACHE_DIR=1
|
||||
|
||||
RUN set -eux; \
|
||||
apk add --no-cache \
|
||||
bash \
|
||||
ffmpeg \
|
||||
gettext \
|
||||
jpeg-dev \
|
||||
libldap \
|
||||
libmagic \
|
||||
libpq \
|
||||
libxml2 \
|
||||
libxslt \
|
||||
py3-cryptography=38.0.3-r1 \
|
||||
py3-lxml=4.9.3-r1 \
|
||||
py3-pillow=9.3.0-r0 \
|
||||
py3-psycopg2=2.9.5-r0 \
|
||||
py3-watchfiles=0.18.1-r0 \
|
||||
python3 \
|
||||
tzdata
|
||||
|
||||
COPY --from=builder /venv /venv
|
||||
ENV PATH="/venv/bin:$PATH"
|
||||
|
||||
COPY . /app
|
||||
WORKDIR /app
|
||||
|
||||
RUN --mount=type=cache,target=~/.cache/pip; \
|
||||
set -eux; \
|
||||
pip3 install --no-deps --editable .
|
||||
|
||||
ENV IS_DOCKER_SETUP=true
|
||||
|
||||
CMD ["./docker/server.sh"]
|
|
@ -0,0 +1 @@
|
|||
Dockerfile.alpine
|
|
@ -0,0 +1,137 @@
|
|||
FROM alpine:3.21 AS requirements
|
||||
|
||||
RUN set -eux; \
|
||||
apk add --no-cache \
|
||||
poetry \
|
||||
py3-cryptography \
|
||||
py3-pip \
|
||||
python3
|
||||
|
||||
COPY pyproject.toml poetry.lock /
|
||||
RUN set -eux; \
|
||||
poetry export --without-hashes --extras typesense > requirements.txt; \
|
||||
poetry export --without-hashes --with dev > dev-requirements.txt;
|
||||
|
||||
FROM alpine:3.21 AS builder
|
||||
|
||||
ENV PYTHONDONTWRITEBYTECODE=1
|
||||
ENV PYTHONUNBUFFERED=1
|
||||
ARG PIP_NO_CACHE_DIR=1
|
||||
ENV CARGO_NET_GIT_FETCH_WITH_CLI=true
|
||||
|
||||
RUN set -eux; \
|
||||
apk add --no-cache \
|
||||
cargo \
|
||||
curl \
|
||||
gcc \
|
||||
g++ \
|
||||
git \
|
||||
jpeg-dev \
|
||||
libffi-dev \
|
||||
libldap \
|
||||
libxml2-dev \
|
||||
libxslt-dev \
|
||||
make \
|
||||
musl-dev \
|
||||
openldap-dev \
|
||||
openssl-dev \
|
||||
postgresql-dev \
|
||||
zlib-dev \
|
||||
py3-cryptography \
|
||||
py3-lxml \
|
||||
py3-pillow \
|
||||
py3-psycopg2 \
|
||||
py3-watchfiles \
|
||||
python3-dev \
|
||||
gfortran \
|
||||
libgfortran \
|
||||
openblas-dev \
|
||||
py3-scipy \
|
||||
py3-scikit-learn;
|
||||
|
||||
# Create virtual env
|
||||
RUN python3 -m venv --system-site-packages /venv
|
||||
ENV PATH="/venv/bin:$PATH"
|
||||
|
||||
COPY --from=requirements /requirements.txt /requirements.txt
|
||||
COPY --from=requirements /dev-requirements.txt /dev-requirements.txt
|
||||
|
||||
RUN --mount=type=cache,target=~/.cache/pip; \
|
||||
set -eux; \
|
||||
pip3 install --upgrade pip;
|
||||
|
||||
|
||||
RUN --mount=type=cache,target=~/.cache/pip; \
|
||||
set -eux; \
|
||||
pip3 install setuptools wheel;
|
||||
|
||||
RUN --mount=type=cache,target=~/.cache/pip; \
|
||||
set -eux; \
|
||||
# Currently we are unable to relieably build rust-based packages on armv7. This
|
||||
# is why we need to use the packages shipped by Alpine Linux.
|
||||
# Since poetry does not allow in-place dependency pinning, we need
|
||||
# to install the deps using pip.
|
||||
grep -Ev 'cryptography|lxml|pillow|psycopg2|watchfiles|scipy|scikit-learn' /requirements.txt \
|
||||
| pip3 install -r /dev/stdin \
|
||||
cryptography \
|
||||
lxml \
|
||||
pillow \
|
||||
psycopg2 \
|
||||
watchfiles \
|
||||
scipy \
|
||||
scikit-learn;
|
||||
|
||||
ARG install_dev_deps=0
|
||||
RUN --mount=type=cache,target=~/.cache/pip; \
|
||||
set -eux; \
|
||||
if [ "$install_dev_deps" = "1" ] ; then \
|
||||
grep -Ev 'cryptography|lxml|pillow|psycopg2|watchfiles' /dev-requirements.txt \
|
||||
| pip3 install -r /dev/stdin \
|
||||
cryptography \
|
||||
lxml \
|
||||
pillow \
|
||||
psycopg2 \
|
||||
watchfiles; \
|
||||
fi
|
||||
|
||||
FROM alpine:3.21 AS production
|
||||
|
||||
ENV PYTHONDONTWRITEBYTECODE=1
|
||||
ENV PYTHONUNBUFFERED=1
|
||||
ARG PIP_NO_CACHE_DIR=1
|
||||
|
||||
RUN set -eux; \
|
||||
apk add --no-cache \
|
||||
bash \
|
||||
ffmpeg \
|
||||
gettext \
|
||||
jpeg-dev \
|
||||
libldap \
|
||||
libmagic \
|
||||
libpq \
|
||||
libxml2 \
|
||||
libxslt \
|
||||
py3-cryptography \
|
||||
py3-lxml \
|
||||
py3-pillow \
|
||||
py3-psycopg2 \
|
||||
py3-watchfiles \
|
||||
py3-scipy \
|
||||
py3-scikit-learn \
|
||||
python3 \
|
||||
tzdata
|
||||
|
||||
COPY --from=builder /venv /venv
|
||||
ENV PATH="/venv/bin:$PATH"
|
||||
|
||||
COPY . /app
|
||||
WORKDIR /app
|
||||
|
||||
RUN apk add --no-cache gfortran
|
||||
RUN --mount=type=cache,target=~/.cache/pip; \
|
||||
set -eux; \
|
||||
pip3 install --no-deps --editable .
|
||||
|
||||
ENV IS_DOCKER_SETUP=true
|
||||
|
||||
CMD ["./docker/server.sh"]
|
|
@ -0,0 +1,71 @@
|
|||
FROM python:3.13-slim AS builder
|
||||
|
||||
ARG POETRY_VERSION=1.8
|
||||
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
ENV VIRTUAL_ENV=/venv
|
||||
ENV PATH="/venv/bin:$PATH"
|
||||
|
||||
ENV POETRY_HOME=/opt/poetry
|
||||
ENV POETRY_NO_INTERACTION=1
|
||||
ENV POETRY_VIRTUALENVS_IN_PROJECT=1
|
||||
ENV POETRY_VIRTUALENVS_CREATE=1
|
||||
ENV PYTHONDONTWRITEBYTECODE=1
|
||||
ENV PYTHONUNBUFFERED=1
|
||||
# Tell Poetry where to place its cache and virtual environment
|
||||
ENV POETRY_CACHE_DIR=/opt/.cache
|
||||
|
||||
RUN pip install "poetry==${POETRY_VERSION}"
|
||||
|
||||
RUN --mount=type=cache,target=/var/lib/apt/lists \
|
||||
apt update; \
|
||||
apt install -y \
|
||||
build-essential \
|
||||
python3-dev \
|
||||
libldap-dev \
|
||||
libsasl2-dev \
|
||||
slapd \
|
||||
ldap-utils \
|
||||
tox \
|
||||
lcov \
|
||||
valgrind
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY pyproject.toml .
|
||||
|
||||
RUN python3 -m venv --system-site-packages ${VIRTUAL_ENV} && . ${VIRTUAL_ENV}/bin/activate
|
||||
RUN --mount=type=cache,target=/opt/.cache \
|
||||
poetry install --no-root --extras typesense
|
||||
|
||||
FROM python:3.13-slim AS runtime
|
||||
|
||||
ARG POETRY_VERSION=1.8
|
||||
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
ENV VIRTUAL_ENV=/venv
|
||||
ENV PATH="/venv/bin:$PATH"
|
||||
|
||||
RUN --mount=type=cache,target=/var/lib/apt/lists \
|
||||
apt update; \
|
||||
apt install -y \
|
||||
ffmpeg \
|
||||
gettext \
|
||||
libjpeg-dev \
|
||||
libldap-2.5-0 \
|
||||
libmagic1 \
|
||||
libpq5 \
|
||||
libxml2 \
|
||||
libxslt1.1
|
||||
RUN pip install "poetry==${POETRY_VERSION}"
|
||||
|
||||
COPY --from=builder ${VIRTUAL_ENV} ${VIRTUAL_ENV}
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY . /app
|
||||
RUN poetry install --extras typesense
|
||||
|
||||
CMD ["./docker/server.sh"]
|
|
@ -4,11 +4,12 @@ CPU_CORES := $(shell N=$$(nproc); echo $$(( $$N > 4 ? 4 : $$N )))
|
|||
.PHONY: install lint
|
||||
|
||||
install:
|
||||
poetry install
|
||||
poetry install --all-extras
|
||||
|
||||
lint:
|
||||
poetry run pylint \
|
||||
--jobs=$(CPU_CORES) \
|
||||
--output-format=colorized \
|
||||
--recursive=true \
|
||||
--disable=C,R,W,I \
|
||||
config funkwhale_api tests
|
||||
|
|
|
@ -299,10 +299,31 @@ def background_task(name):
|
|||
|
||||
|
||||
# HOOKS
|
||||
TRIGGER_THIRD_PARTY_UPLOAD = "third_party_upload"
|
||||
"""
|
||||
Called when a track is being listened
|
||||
"""
|
||||
LISTENING_CREATED = "listening_created"
|
||||
"""
|
||||
Called when a track is being listened
|
||||
"""
|
||||
LISTENING_SYNC = "listening_sync"
|
||||
"""
|
||||
Called by the task manager to trigger listening sync
|
||||
"""
|
||||
FAVORITE_CREATED = "favorite_created"
|
||||
"""
|
||||
Called when a track is being favorited
|
||||
"""
|
||||
FAVORITE_DELETED = "favorite_deleted"
|
||||
"""
|
||||
Called when a favorited track is being unfavorited
|
||||
"""
|
||||
FAVORITE_SYNC = "favorite_sync"
|
||||
"""
|
||||
Called by the task manager to trigger favorite sync
|
||||
"""
|
||||
|
||||
SCAN = "scan"
|
||||
"""
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
from channels.auth import AuthMiddlewareStack
|
||||
from channels.routing import ProtocolTypeRouter, URLRouter
|
||||
from django.conf.urls import url
|
||||
from django.core.asgi import get_asgi_application
|
||||
from django.urls import re_path
|
||||
|
||||
from funkwhale_api.instance import consumers
|
||||
|
||||
|
@ -10,7 +10,12 @@ application = ProtocolTypeRouter(
|
|||
# Empty for now (http->django views is added by default)
|
||||
"websocket": AuthMiddlewareStack(
|
||||
URLRouter(
|
||||
[url("^api/v1/activity$", consumers.InstanceActivityConsumer.as_asgi())]
|
||||
[
|
||||
re_path(
|
||||
"^api/v1/activity$",
|
||||
consumers.InstanceActivityConsumer.as_asgi(),
|
||||
)
|
||||
]
|
||||
)
|
||||
),
|
||||
"http": get_asgi_application(),
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
import os
|
||||
|
||||
from drf_spectacular.contrib.django_oauth_toolkit import OpenApiAuthenticationExtension
|
||||
from drf_spectacular.plumbing import build_bearer_security_scheme_object
|
||||
|
||||
|
@ -44,7 +42,6 @@ def custom_preprocessing_hook(endpoints):
|
|||
filtered = []
|
||||
|
||||
# your modifications to the list of operations that are exposed in the schema
|
||||
api_type = os.environ.get("API_TYPE", "v1")
|
||||
|
||||
for path, path_regex, method, callback in endpoints:
|
||||
if path.startswith("/api/v1/providers"):
|
||||
|
@ -56,7 +53,7 @@ def custom_preprocessing_hook(endpoints):
|
|||
if path.startswith("/api/v1/oauth/authorize"):
|
||||
continue
|
||||
|
||||
if path.startswith(f"/api/{api_type}"):
|
||||
if path.startswith("/api/v1") or path.startswith("/api/v2"):
|
||||
filtered.append((path, path_regex, method, callback))
|
||||
|
||||
return filtered
|
||||
|
|
|
@ -2,7 +2,7 @@ import logging.config
|
|||
import sys
|
||||
import warnings
|
||||
from collections import OrderedDict
|
||||
from urllib.parse import urlsplit
|
||||
from urllib.parse import urlparse, urlsplit
|
||||
|
||||
import environ
|
||||
from celery.schedules import crontab
|
||||
|
@ -114,6 +114,7 @@ else:
|
|||
logger.info("Loaded env file at %s/.env", path)
|
||||
break
|
||||
|
||||
FUNKWHALE_PLUGINS = env("FUNKWHALE_PLUGINS", default="")
|
||||
FUNKWHALE_PLUGINS_PATH = env(
|
||||
"FUNKWHALE_PLUGINS_PATH", default="/srv/funkwhale/plugins/"
|
||||
)
|
||||
|
@ -224,6 +225,16 @@ ALLOWED_HOSTS = env.list("DJANGO_ALLOWED_HOSTS", default=[]) + [FUNKWHALE_HOSTNA
|
|||
List of allowed hostnames for which the Funkwhale server will answer.
|
||||
"""
|
||||
|
||||
CSRF_TRUSTED_ORIGINS = [
|
||||
urlparse("//" + o, FUNKWHALE_PROTOCOL).geturl() for o in ALLOWED_HOSTS
|
||||
]
|
||||
"""
|
||||
List of origins that are trusted for unsafe requests
|
||||
We simply consider all allowed hosts to be trusted origins
|
||||
See DJANGO_ALLOWED_HOSTS in .env.example for details
|
||||
See https://docs.djangoproject.com/en/4.2/ref/settings/#csrf-trusted-origins
|
||||
"""
|
||||
|
||||
# APP CONFIGURATION
|
||||
# ------------------------------------------------------------------------------
|
||||
DJANGO_APPS = (
|
||||
|
@ -269,6 +280,7 @@ LOCAL_APPS = (
|
|||
# Your stuff: custom apps go here
|
||||
"funkwhale_api.instance",
|
||||
"funkwhale_api.audio",
|
||||
"funkwhale_api.contrib.listenbrainz",
|
||||
"funkwhale_api.music",
|
||||
"funkwhale_api.requests",
|
||||
"funkwhale_api.favorites",
|
||||
|
@ -303,6 +315,7 @@ MIDDLEWARE = (
|
|||
tuple(plugins.trigger_filter(plugins.MIDDLEWARES_BEFORE, [], enabled=True))
|
||||
+ tuple(ADDITIONAL_MIDDLEWARES_BEFORE)
|
||||
+ (
|
||||
"allauth.account.middleware.AccountMiddleware",
|
||||
"django.middleware.security.SecurityMiddleware",
|
||||
"django.middleware.clickjacking.XFrameOptionsMiddleware",
|
||||
"corsheaders.middleware.CorsMiddleware",
|
||||
|
@ -601,7 +614,20 @@ if AWS_ACCESS_KEY_ID:
|
|||
"""
|
||||
AWS_S3_CUSTOM_DOMAIN = env("AWS_S3_CUSTOM_DOMAIN", default=None)
|
||||
"""
|
||||
Custom domain to use for your S3 storage.
|
||||
Custom domain for serving your S3 files.
|
||||
|
||||
Useful if your provider offers a CDN-like service for your bucket.
|
||||
|
||||
.. important::
|
||||
|
||||
The URL must not contain a scheme (:attr:`AWS_S3_URL_PROTOCOL` is
|
||||
automatically prepended) nor a trailing slash.
|
||||
"""
|
||||
AWS_S3_URL_PROTOCOL = env("AWS_S3_URL_PROTOCOL", default="https:")
|
||||
"""
|
||||
Protocol to use when constructing the custom domain (see :attr:`AWS_S3_CUSTOM_DOMAIN`)
|
||||
.. important::
|
||||
It must end with a `:`, remove `//`.
|
||||
"""
|
||||
AWS_S3_ENDPOINT_URL = env("AWS_S3_ENDPOINT_URL", default=None)
|
||||
"""
|
||||
|
@ -830,7 +856,7 @@ If you're using password auth (the extra slash is important)
|
|||
.. note::
|
||||
|
||||
If you want to use Redis over unix sockets, you also need to update
|
||||
:attr:`CELERY_BROKER_URL`, because the scheme differ from the one used by
|
||||
:attr:`CELERY_BROKER_URL`, because the scheme differs from the one used by
|
||||
:attr:`CACHE_URL`.
|
||||
|
||||
"""
|
||||
|
@ -881,7 +907,7 @@ to use a different server or use Redis sockets to connect.
|
|||
|
||||
Example:
|
||||
|
||||
- ``redis://127.0.0.1:6379/0``
|
||||
- ``unix://127.0.0.1:6379/0``
|
||||
- ``redis+socket:///run/redis/redis.sock?virtual_host=0``
|
||||
|
||||
"""
|
||||
|
@ -942,13 +968,30 @@ CELERY_BEAT_SCHEDULE = {
|
|||
),
|
||||
"options": {"expires": 60 * 60},
|
||||
},
|
||||
"typesense.build_canonical_index": {
|
||||
"task": "typesense.build_canonical_index",
|
||||
"schedule": crontab(day_of_week="*/2", minute="0", hour="3"),
|
||||
"listenbrainz.trigger_listening_sync_with_listenbrainz": {
|
||||
"task": "listenbrainz.trigger_listening_sync_with_listenbrainz",
|
||||
"schedule": crontab(day_of_week="*", minute="0", hour="3"),
|
||||
"options": {"expires": 60 * 60 * 24},
|
||||
},
|
||||
"listenbrainz.trigger_favorite_sync_with_listenbrainz": {
|
||||
"task": "listenbrainz.trigger_favorite_sync_with_listenbrainz",
|
||||
"schedule": crontab(day_of_week="*", minute="0", hour="3"),
|
||||
"options": {"expires": 60 * 60 * 24},
|
||||
},
|
||||
"tags.update_musicbrainz_genre": {
|
||||
"task": "tags.update_musicbrainz_genre",
|
||||
"schedule": crontab(day_of_month="2", minute="30", hour="3"),
|
||||
"options": {"expires": 60 * 60 * 24},
|
||||
},
|
||||
}
|
||||
|
||||
if env.str("TYPESENSE_API_KEY", default=None):
|
||||
CELERY_BEAT_SCHEDULE["typesense.build_canonical_index"] = {
|
||||
"task": "typesense.build_canonical_index",
|
||||
"schedule": crontab(day_of_week="*/2", minute="0", hour="3"),
|
||||
"options": {"expires": 60 * 60 * 24},
|
||||
}
|
||||
|
||||
if env.bool("ADD_ALBUM_TAGS_FROM_TRACKS", default=True):
|
||||
CELERY_BEAT_SCHEDULE["music.albums_set_tags_from_tracks"] = {
|
||||
"task": "music.albums_set_tags_from_tracks",
|
||||
|
@ -1193,7 +1236,7 @@ if BROWSABLE_API_ENABLED:
|
|||
"rest_framework.renderers.BrowsableAPIRenderer",
|
||||
)
|
||||
|
||||
REST_AUTH_SERIALIZERS = {
|
||||
REST_AUTH = {
|
||||
"PASSWORD_RESET_SERIALIZER": "funkwhale_api.users.serializers.PasswordResetSerializer", # noqa
|
||||
"PASSWORD_RESET_CONFIRM_SERIALIZER": "funkwhale_api.users.serializers.PasswordResetConfirmSerializer", # noqa
|
||||
}
|
||||
|
|
|
@ -2,8 +2,7 @@
|
|||
Local settings
|
||||
|
||||
- Run in Debug mode
|
||||
- Use console backend for e-mails
|
||||
- Add Django Debug Toolbar
|
||||
- Add Django Debug Toolbar when INTERNAL_IPS are given and match the request
|
||||
- Add django-extensions as app
|
||||
"""
|
||||
|
||||
|
@ -25,11 +24,6 @@ SECRET_KEY = env(
|
|||
"DJANGO_SECRET_KEY", default="mc$&b=5j#6^bv7tld1gyjp2&+^-qrdy=0sw@r5sua*1zp4fmxc"
|
||||
)
|
||||
|
||||
# Mail settings
|
||||
# ------------------------------------------------------------------------------
|
||||
EMAIL_HOST = "localhost"
|
||||
EMAIL_PORT = 1025
|
||||
|
||||
# django-debug-toolbar
|
||||
# ------------------------------------------------------------------------------
|
||||
|
||||
|
@ -96,8 +90,6 @@ CELERY_TASK_ALWAYS_EAGER = False
|
|||
|
||||
# Your local stuff: Below this line define 3rd party library settings
|
||||
|
||||
CSRF_TRUSTED_ORIGINS = [o for o in ALLOWED_HOSTS]
|
||||
|
||||
REST_FRAMEWORK["DEFAULT_SCHEMA_CLASS"] = "funkwhale_api.schema.CustomAutoSchema"
|
||||
SPECTACULAR_SETTINGS = {
|
||||
"TITLE": "Funkwhale API",
|
||||
|
@ -150,4 +142,16 @@ MIDDLEWARE = (
|
|||
"funkwhale_api.common.middleware.PymallocMiddleware",
|
||||
) + MIDDLEWARE
|
||||
|
||||
TYPESENSE_API_KEY = "apikey"
|
||||
REST_FRAMEWORK.update(
|
||||
{
|
||||
"TEST_REQUEST_RENDERER_CLASSES": [
|
||||
"rest_framework.renderers.MultiPartRenderer",
|
||||
"rest_framework.renderers.JSONRenderer",
|
||||
"rest_framework.renderers.TemplateHTMLRenderer",
|
||||
"funkwhale_api.playlists.renderers.PlaylistXspfRenderer",
|
||||
],
|
||||
}
|
||||
)
|
||||
|
||||
# allows makemigrations and superuser creation
|
||||
FORCE = env("FORCE", default=1)
|
||||
|
|
|
@ -41,14 +41,6 @@ SECRET_KEY = env("DJANGO_SECRET_KEY")
|
|||
# SESSION_COOKIE_HTTPONLY = True
|
||||
# SECURE_SSL_REDIRECT = env.bool("DJANGO_SECURE_SSL_REDIRECT", default=True)
|
||||
|
||||
# SITE CONFIGURATION
|
||||
# ------------------------------------------------------------------------------
|
||||
# Hosts/domain names that are valid for this site
|
||||
# See https://docs.djangoproject.com/en/1.6/ref/settings/#allowed-hosts
|
||||
CSRF_TRUSTED_ORIGINS = ALLOWED_HOSTS
|
||||
|
||||
# END SITE CONFIGURATION
|
||||
|
||||
# Static Assets
|
||||
# ------------------------
|
||||
STATICFILES_STORAGE = "django.contrib.staticfiles.storage.StaticFilesStorage"
|
||||
|
|
|
@ -1,9 +0,0 @@
|
|||
import os
|
||||
|
||||
os.environ.setdefault("FUNKWHALE_URL", "http://funkwhale.dev")
|
||||
|
||||
from .common import * # noqa
|
||||
|
||||
DEBUG = True
|
||||
SECRET_KEY = "a_super_secret_key!"
|
||||
TYPESENSE_API_KEY = "apikey"
|
|
@ -1,7 +1,6 @@
|
|||
from django.conf import settings
|
||||
from django.conf.urls import url
|
||||
from django.conf.urls.static import static
|
||||
from django.urls import include, path
|
||||
from django.urls import include, path, re_path
|
||||
from django.views import defaults as default_views
|
||||
|
||||
from config import plugins
|
||||
|
@ -10,34 +9,41 @@ from funkwhale_api.common import admin
|
|||
plugins_patterns = plugins.trigger_filter(plugins.URLS, [], enabled=True)
|
||||
|
||||
api_patterns = [
|
||||
url("v1/", include("config.urls.api")),
|
||||
url("v2/", include("config.urls.api_v2")),
|
||||
url("subsonic/", include("config.urls.subsonic")),
|
||||
re_path("v1/", include("config.urls.api")),
|
||||
re_path("v2/", include("config.urls.api_v2")),
|
||||
re_path("subsonic/", include("config.urls.subsonic")),
|
||||
]
|
||||
|
||||
|
||||
urlpatterns = [
|
||||
# Django Admin, use {% url 'admin:index' %}
|
||||
url(settings.ADMIN_URL, admin.site.urls),
|
||||
url(r"^api/", include((api_patterns, "api"), namespace="api")),
|
||||
url(
|
||||
re_path(settings.ADMIN_URL, admin.site.urls),
|
||||
re_path(r"^api/", include((api_patterns, "api"), namespace="api")),
|
||||
re_path(
|
||||
r"^",
|
||||
include(
|
||||
("funkwhale_api.federation.urls", "federation"), namespace="federation"
|
||||
),
|
||||
),
|
||||
url(r"^api/v1/auth/", include("funkwhale_api.users.rest_auth_urls")),
|
||||
url(r"^accounts/", include("allauth.urls")),
|
||||
re_path(
|
||||
r"^api/v1/auth/",
|
||||
include("funkwhale_api.users.rest_auth_urls"),
|
||||
),
|
||||
re_path(
|
||||
r"^api/v2/auth/",
|
||||
include("funkwhale_api.users.rest_auth_urls"),
|
||||
),
|
||||
re_path(r"^accounts/", include("allauth.urls")),
|
||||
] + plugins_patterns
|
||||
|
||||
if settings.DEBUG:
|
||||
# This allows the error pages to be debugged during development, just visit
|
||||
# these url in browser to see how these error pages look like.
|
||||
urlpatterns += [
|
||||
url(r"^400/$", default_views.bad_request),
|
||||
url(r"^403/$", default_views.permission_denied),
|
||||
url(r"^404/$", default_views.page_not_found),
|
||||
url(r"^500/$", default_views.server_error),
|
||||
re_path(r"^400/$", default_views.bad_request),
|
||||
re_path(r"^403/$", default_views.permission_denied),
|
||||
re_path(r"^404/$", default_views.page_not_found),
|
||||
re_path(r"^500/$", default_views.server_error),
|
||||
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
||||
|
||||
if "debug_toolbar" in settings.INSTALLED_APPS:
|
||||
|
@ -49,5 +55,5 @@ if settings.DEBUG:
|
|||
|
||||
if "silk" in settings.INSTALLED_APPS:
|
||||
urlpatterns = [
|
||||
url(r"^api/silk/", include("silk.urls", namespace="silk"))
|
||||
re_path(r"^api/silk/", include("silk.urls", namespace="silk"))
|
||||
] + urlpatterns
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
from django.conf.urls import include, url
|
||||
from django.conf.urls import include
|
||||
from django.urls import re_path
|
||||
|
||||
from funkwhale_api.activity import views as activity_views
|
||||
from funkwhale_api.audio import views as audio_views
|
||||
|
@ -28,61 +29,61 @@ router.register(r"attachments", common_views.AttachmentViewSet, "attachments")
|
|||
v1_patterns = router.urls
|
||||
|
||||
v1_patterns += [
|
||||
url(r"^oembed/$", views.OembedView.as_view(), name="oembed"),
|
||||
url(
|
||||
re_path(r"^oembed/$", views.OembedView.as_view(), name="oembed"),
|
||||
re_path(
|
||||
r"^instance/",
|
||||
include(("funkwhale_api.instance.urls", "instance"), namespace="instance"),
|
||||
),
|
||||
url(
|
||||
re_path(
|
||||
r"^manage/",
|
||||
include(("funkwhale_api.manage.urls", "manage"), namespace="manage"),
|
||||
),
|
||||
url(
|
||||
re_path(
|
||||
r"^moderation/",
|
||||
include(
|
||||
("funkwhale_api.moderation.urls", "moderation"), namespace="moderation"
|
||||
),
|
||||
),
|
||||
url(
|
||||
re_path(
|
||||
r"^federation/",
|
||||
include(
|
||||
("funkwhale_api.federation.api_urls", "federation"), namespace="federation"
|
||||
),
|
||||
),
|
||||
url(
|
||||
re_path(
|
||||
r"^providers/",
|
||||
include(("funkwhale_api.providers.urls", "providers"), namespace="providers"),
|
||||
),
|
||||
url(
|
||||
re_path(
|
||||
r"^favorites/",
|
||||
include(("funkwhale_api.favorites.urls", "favorites"), namespace="favorites"),
|
||||
),
|
||||
url(r"^search$", views.Search.as_view(), name="search"),
|
||||
url(
|
||||
re_path(r"^search$", views.Search.as_view(), name="search"),
|
||||
re_path(
|
||||
r"^radios/",
|
||||
include(("funkwhale_api.radios.urls", "radios"), namespace="radios"),
|
||||
),
|
||||
url(
|
||||
re_path(
|
||||
r"^history/",
|
||||
include(("funkwhale_api.history.urls", "history"), namespace="history"),
|
||||
),
|
||||
url(
|
||||
re_path(
|
||||
r"^",
|
||||
include(("funkwhale_api.users.api_urls", "users"), namespace="users"),
|
||||
),
|
||||
# XXX: remove if Funkwhale 1.1
|
||||
url(
|
||||
re_path(
|
||||
r"^users/",
|
||||
include(("funkwhale_api.users.api_urls", "users"), namespace="users-nested"),
|
||||
),
|
||||
url(
|
||||
re_path(
|
||||
r"^oauth/",
|
||||
include(("funkwhale_api.users.oauth.urls", "oauth"), namespace="oauth"),
|
||||
),
|
||||
url(r"^rate-limit/?$", common_views.RateLimitView.as_view(), name="rate-limit"),
|
||||
url(
|
||||
re_path(r"^rate-limit/?$", common_views.RateLimitView.as_view(), name="rate-limit"),
|
||||
re_path(
|
||||
r"^text-preview/?$", common_views.TextPreviewView.as_view(), name="text-preview"
|
||||
),
|
||||
]
|
||||
|
||||
urlpatterns = [url("", include((v1_patterns, "v1"), namespace="v1"))]
|
||||
urlpatterns = [re_path("", include((v1_patterns, "v1"), namespace="v1"))]
|
||||
|
|
|
@ -1,19 +1,36 @@
|
|||
from django.conf.urls import include, url
|
||||
from django.conf.urls import include
|
||||
from django.urls import re_path
|
||||
|
||||
from funkwhale_api.common import routers as common_routers
|
||||
|
||||
from . import api
|
||||
|
||||
router = common_routers.OptionalSlashRouter()
|
||||
v2_patterns = router.urls
|
||||
|
||||
v2_patterns += [
|
||||
url(
|
||||
re_path(
|
||||
r"^instance/",
|
||||
include(("funkwhale_api.instance.urls_v2", "instance"), namespace="instance"),
|
||||
),
|
||||
url(
|
||||
re_path(
|
||||
r"^radios/",
|
||||
include(("funkwhale_api.radios.urls_v2", "radios"), namespace="radios"),
|
||||
),
|
||||
]
|
||||
|
||||
urlpatterns = [url("", include((v2_patterns, "v2"), namespace="v2"))]
|
||||
v2_paths = {
|
||||
pattern.pattern.regex.pattern
|
||||
for pattern in v2_patterns
|
||||
if hasattr(pattern.pattern, "regex")
|
||||
}
|
||||
|
||||
filtered_v1_patterns = [
|
||||
pattern
|
||||
for pattern in api.v1_patterns
|
||||
if pattern.pattern.regex.pattern not in v2_paths
|
||||
]
|
||||
|
||||
v2_patterns += filtered_v1_patterns
|
||||
|
||||
urlpatterns = [re_path("", include((v2_patterns, "v2"), namespace="v2"))]
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
from django.conf.urls import include, url
|
||||
from django.conf.urls import include
|
||||
from django.urls import re_path
|
||||
from rest_framework import routers
|
||||
from rest_framework.urlpatterns import format_suffix_patterns
|
||||
|
||||
|
@ -8,7 +9,9 @@ subsonic_router = routers.SimpleRouter(trailing_slash=False)
|
|||
subsonic_router.register(r"rest", SubsonicViewSet, basename="subsonic")
|
||||
|
||||
subsonic_patterns = format_suffix_patterns(subsonic_router.urls, allowed=["view"])
|
||||
urlpatterns = [url("", include((subsonic_patterns, "subsonic"), namespace="subsonic"))]
|
||||
urlpatterns = [
|
||||
re_path("", include((subsonic_patterns, "subsonic"), namespace="subsonic"))
|
||||
]
|
||||
|
||||
# urlpatterns = [
|
||||
# url(
|
||||
|
|
|
@ -9,5 +9,5 @@ funkwhale-manage migrate
|
|||
exec gunicorn config.asgi:application \
|
||||
--workers "${FUNKWHALE_WEB_WORKERS-1}" \
|
||||
--worker-class uvicorn.workers.UvicornWorker \
|
||||
--bind 0.0.0.0:5000 \
|
||||
--bind 0.0.0.0:"${FUNKWHALE_API_PORT}" \
|
||||
${GUNICORN_ARGS-}
|
||||
|
|
|
@ -38,13 +38,27 @@ def combined_recent(limit, **kwargs):
|
|||
|
||||
|
||||
def get_activity(user, limit=20):
|
||||
query = fields.privacy_level_query(user, lookup_field="user__privacy_level")
|
||||
query = fields.privacy_level_query(
|
||||
user, "actor__user__privacy_level", "actor__user"
|
||||
)
|
||||
querysets = [
|
||||
Listening.objects.filter(query).select_related(
|
||||
"track", "user", "track__artist", "track__album__artist"
|
||||
Listening.objects.filter(query)
|
||||
.select_related(
|
||||
"track",
|
||||
"actor",
|
||||
)
|
||||
.prefetch_related(
|
||||
"track__artist_credit__artist",
|
||||
"track__album__artist_credit__artist",
|
||||
),
|
||||
TrackFavorite.objects.filter(query).select_related(
|
||||
"track", "user", "track__artist", "track__album__artist"
|
||||
TrackFavorite.objects.filter(query)
|
||||
.select_related(
|
||||
"track",
|
||||
"actor",
|
||||
)
|
||||
.prefetch_related(
|
||||
"track__artist_credit__artist",
|
||||
"track__album__artist_credit__artist",
|
||||
),
|
||||
]
|
||||
records = combined_recent(limit=limit, querysets=querysets)
|
||||
|
|
|
@ -21,7 +21,11 @@ TAG_FILTER = common_filters.MultipleQueryFilter(method=filter_tags)
|
|||
|
||||
class ChannelFilter(moderation_filters.HiddenContentFilterSet):
|
||||
q = fields.SearchFilter(
|
||||
search_fields=["artist__name", "actor__summary", "actor__preferred_username"]
|
||||
search_fields=[
|
||||
"artist_credit__artist__name",
|
||||
"actor__summary",
|
||||
"actor__preferred_username",
|
||||
]
|
||||
)
|
||||
tag = TAG_FILTER
|
||||
scope = common_filters.ActorScopeFilter(actor_field="attributed_to", distinct=True)
|
||||
|
|
|
@ -26,6 +26,7 @@ from funkwhale_api.federation import serializers as federation_serializers
|
|||
from funkwhale_api.federation import utils as federation_utils
|
||||
from funkwhale_api.moderation import mrf
|
||||
from funkwhale_api.music import models as music_models
|
||||
from funkwhale_api.music import tasks
|
||||
from funkwhale_api.music.serializers import COVER_WRITE_FIELD, CoverField
|
||||
from funkwhale_api.tags import models as tags_models
|
||||
from funkwhale_api.tags import serializers as tags_serializers
|
||||
|
@ -246,11 +247,14 @@ class SimpleChannelArtistSerializer(serializers.Serializer):
|
|||
description = common_serializers.ContentSerializer(allow_null=True, required=False)
|
||||
cover = CoverField(allow_null=True, required=False)
|
||||
channel = serializers.UUIDField(allow_null=True, required=False)
|
||||
tracks_count = serializers.IntegerField(source="_tracks_count", required=False)
|
||||
tracks_count = serializers.SerializerMethodField(required=False)
|
||||
tags = serializers.ListField(
|
||||
child=serializers.CharField(), source="_prefetched_tagged_items", required=False
|
||||
)
|
||||
|
||||
def get_tracks_count(self, o) -> int:
|
||||
return getattr(o, "_tracks_count", 0)
|
||||
|
||||
|
||||
class ChannelSerializer(serializers.ModelSerializer):
|
||||
artist = SimpleChannelArtistSerializer()
|
||||
|
@ -749,7 +753,7 @@ class RssFeedItemSerializer(serializers.Serializer):
|
|||
else:
|
||||
existing_track = (
|
||||
music_models.Track.objects.filter(
|
||||
uuid=expected_uuid, artist__channel=channel
|
||||
uuid=expected_uuid, artist_credit__artist__channel=channel
|
||||
)
|
||||
.select_related("description", "attachment_cover")
|
||||
.first()
|
||||
|
@ -765,7 +769,6 @@ class RssFeedItemSerializer(serializers.Serializer):
|
|||
"disc_number": validated_data.get("itunes_season", 1) or 1,
|
||||
"position": validated_data.get("itunes_episode", 1) or 1,
|
||||
"title": validated_data["title"],
|
||||
"artist": channel.artist,
|
||||
}
|
||||
)
|
||||
if "rights" in validated_data:
|
||||
|
@ -801,6 +804,21 @@ class RssFeedItemSerializer(serializers.Serializer):
|
|||
**track_kwargs,
|
||||
defaults=track_defaults,
|
||||
)
|
||||
|
||||
# channel only have one artist so we can safely update artist_credit
|
||||
defaults = {
|
||||
"artist": channel.artist,
|
||||
"credit": channel.artist.name,
|
||||
"joinphrase": "",
|
||||
}
|
||||
query = (
|
||||
Q(artist=channel.artist) & Q(credit=channel.artist.name) & Q(joinphrase="")
|
||||
)
|
||||
artist_credit = tasks.get_best_candidate_or_create(
|
||||
music_models.ArtistCredit, query, defaults, ["artist", "joinphrase"]
|
||||
)
|
||||
track.artist_credit.set([artist_credit[0]])
|
||||
|
||||
# optimisation for reducing SQL queries, because we cannot use select_related with
|
||||
# update or create, so we restore the cache by hand
|
||||
if existing_track:
|
||||
|
|
|
@ -27,7 +27,7 @@ ARTIST_PREFETCH_QS = (
|
|||
"attachment_cover",
|
||||
)
|
||||
.prefetch_related(music_views.TAG_PREFETCH)
|
||||
.annotate(_tracks_count=Count("tracks"))
|
||||
.annotate(_tracks_count=Count("artist_credit__tracks"))
|
||||
)
|
||||
|
||||
|
||||
|
@ -103,7 +103,7 @@ class ChannelViewSet(
|
|||
queryset = super().get_queryset()
|
||||
if self.action == "retrieve":
|
||||
queryset = queryset.annotate(
|
||||
_downloads_count=Sum("artist__tracks__downloads_count")
|
||||
_downloads_count=Sum("artist__artist_credit__tracks__downloads_count")
|
||||
)
|
||||
return queryset
|
||||
|
||||
|
@ -192,7 +192,6 @@ class ChannelViewSet(
|
|||
if object.attributed_to == actors.get_service_actor():
|
||||
# external feed, we redirect to the canonical one
|
||||
return http.HttpResponseRedirect(object.rss_url)
|
||||
|
||||
uploads = (
|
||||
object.library.uploads.playable_by(None)
|
||||
.prefetch_related(
|
||||
|
|
|
@ -49,6 +49,7 @@ def handler_create_user(
|
|||
utils.logger.warn("Unknown permission %s", permission)
|
||||
utils.logger.debug("Creating actor…")
|
||||
user.actor = models.create_actor(user)
|
||||
models.create_user_libraries(user)
|
||||
user.save()
|
||||
return user
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
from allauth.account.utils import send_email_confirmation
|
||||
from allauth.account.models import EmailAddress
|
||||
from django.core.cache import cache
|
||||
from django.utils.translation import ugettext as _
|
||||
from django.utils.translation import gettext as _
|
||||
from oauth2_provider.contrib.rest_framework.authentication import (
|
||||
OAuth2Authentication as BaseOAuth2Authentication,
|
||||
)
|
||||
|
@ -20,9 +20,13 @@ def resend_confirmation_email(request, user):
|
|||
if cache.get(cache_key):
|
||||
return False
|
||||
|
||||
done = send_email_confirmation(request, user)
|
||||
# We do the sending of the conformation by hand because we don't want to pass the request down
|
||||
# to the email rendering, which would cause another UnverifiedEmail Exception and restarts the sending
|
||||
# again and again
|
||||
email = EmailAddress.objects.get_for_user(user, user.email)
|
||||
email.send_confirmation()
|
||||
cache.set(cache_key, True, THROTTLE_DELAY)
|
||||
return done
|
||||
return True
|
||||
|
||||
|
||||
class OAuth2Authentication(BaseOAuth2Authentication):
|
||||
|
|
|
@ -24,8 +24,20 @@ def privacy_level_query(user, lookup_field="privacy_level", user_field="user"):
|
|||
if user.is_anonymous:
|
||||
return models.Q(**{lookup_field: "everyone"})
|
||||
|
||||
return models.Q(**{f"{lookup_field}__in": ["instance", "everyone"]}) | models.Q(
|
||||
**{lookup_field: "me", user_field: user}
|
||||
followers_query = models.Q(
|
||||
**{
|
||||
f"{lookup_field}": "followers",
|
||||
f"{user_field}__actor__in": user.actor.get_approved_followings(),
|
||||
}
|
||||
)
|
||||
# Federated TrackFavorite don't have an user associated with the trackfavorite.actor
|
||||
no_user_query = models.Q(**{f"{user_field}__isnull": True})
|
||||
|
||||
return (
|
||||
models.Q(**{f"{lookup_field}__in": ["instance", "everyone"]})
|
||||
| models.Q(**{lookup_field: "me", user_field: user})
|
||||
| followers_query
|
||||
| no_user_query
|
||||
)
|
||||
|
||||
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
import os
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.management.commands.createsuperuser import (
|
||||
Command as BaseCommand,
|
||||
)
|
||||
|
@ -12,7 +11,8 @@ class Command(BaseCommand):
|
|||
Creating Django Superusers would bypass some of our username checks, which can lead to unexpected behaviour.
|
||||
We therefore prohibit the execution of the command.
|
||||
"""
|
||||
if not os.environ.get("FORCE") == "1":
|
||||
force = settings.FORCE
|
||||
if not force == 1:
|
||||
raise CommandError(
|
||||
"Running createsuperuser on your Funkwhale instance bypasses some of our checks "
|
||||
"which can lead to unexpected behavior of your instance. We therefore suggest to "
|
||||
|
|
|
@ -68,22 +68,33 @@ def create_taggable_items(dependency):
|
|||
|
||||
|
||||
CONFIG = [
|
||||
{
|
||||
"id": "artist_credit",
|
||||
"model": music_models.ArtistCredit,
|
||||
"factory": "music.ArtistCredit",
|
||||
"factory_kwargs": {"joinphrase": ""},
|
||||
"depends_on": [
|
||||
{"field": "artist", "id": "artists", "default_factor": 0.5},
|
||||
],
|
||||
},
|
||||
{
|
||||
"id": "tracks",
|
||||
"model": music_models.Track,
|
||||
"factory": "music.Track",
|
||||
"factory_kwargs": {"artist": None, "album": None},
|
||||
"factory_kwargs": {"album": None},
|
||||
"depends_on": [
|
||||
{"field": "album", "id": "albums", "default_factor": 0.1},
|
||||
{"field": "artist", "id": "artists", "default_factor": 0.05},
|
||||
{"field": "artist_credit", "id": "artist_credit", "default_factor": 0.05},
|
||||
],
|
||||
},
|
||||
{
|
||||
"id": "albums",
|
||||
"model": music_models.Album,
|
||||
"factory": "music.Album",
|
||||
"factory_kwargs": {"artist": None},
|
||||
"depends_on": [{"field": "artist", "id": "artists", "default_factor": 0.3}],
|
||||
"factory_kwargs": {},
|
||||
"depends_on": [
|
||||
{"field": "artist_credit", "id": "artist_credit", "default_factor": 0.3}
|
||||
],
|
||||
},
|
||||
{"id": "artists", "model": music_models.Artist, "factory": "music.Artist"},
|
||||
{
|
||||
|
@ -310,12 +321,23 @@ class Command(BaseCommand):
|
|||
candidates = list(queryset.values_list("pk", flat=True))
|
||||
picked_pks = [random.choice(candidates) for _ in objects]
|
||||
picked_objects = {o.pk: o for o in queryset.filter(pk__in=picked_pks)}
|
||||
|
||||
saved_obj = []
|
||||
for i, obj in enumerate(objects):
|
||||
if create_dependencies:
|
||||
value = random.choice(candidates)
|
||||
else:
|
||||
value = picked_objects[picked_pks[i]]
|
||||
setattr(obj, dependency["field"], value)
|
||||
if dependency["field"] == "artist_credit":
|
||||
obj.save()
|
||||
obj.artist_credit.set([value])
|
||||
saved_obj.append(obj)
|
||||
|
||||
else:
|
||||
setattr(obj, dependency["field"], value)
|
||||
if saved_obj:
|
||||
return saved_obj
|
||||
|
||||
if not handler:
|
||||
objects = row["model"].objects.bulk_create(objects, batch_size=BATCH_SIZE)
|
||||
results[row["id"]] = objects
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
import os
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.management.base import CommandError
|
||||
from django.core.management.commands.makemigrations import Command as BaseCommand
|
||||
|
||||
|
@ -11,8 +10,8 @@ class Command(BaseCommand):
|
|||
|
||||
We ensure the command is disabled, unless a specific env var is provided.
|
||||
"""
|
||||
force = os.environ.get("FORCE") == "1"
|
||||
if not force:
|
||||
force = settings.FORCE
|
||||
if not force == 1:
|
||||
raise CommandError(
|
||||
"Running makemigrations on your Funkwhale instance can have desastrous"
|
||||
" consequences. This command is disabled, and should only be run in "
|
||||
|
|
|
@ -10,7 +10,7 @@ class Command(BaseCommand):
|
|||
|
||||
self.help = "Helper to generate randomized testdata"
|
||||
self.type_choices = {"notifications": self.handle_notifications}
|
||||
self.missing_args_message = f"Please specify one of the following sub-commands: { *self.type_choices.keys(), }"
|
||||
self.missing_args_message = f"Please specify one of the following sub-commands: {*self.type_choices.keys(), }"
|
||||
|
||||
def add_arguments(self, parser):
|
||||
subparsers = parser.add_subparsers(dest="subcommand")
|
||||
|
|
|
@ -60,12 +60,12 @@ class NullsLastSQLCompiler(SQLCompiler):
|
|||
class NullsLastQuery(models.sql.query.Query):
|
||||
"""Use a custom compiler to inject 'NULLS LAST' (for PostgreSQL)."""
|
||||
|
||||
def get_compiler(self, using=None, connection=None):
|
||||
def get_compiler(self, using=None, connection=None, elide_empty=True):
|
||||
if using is None and connection is None:
|
||||
raise ValueError("Need either using or connection")
|
||||
if using:
|
||||
connection = connections[using]
|
||||
return NullsLastSQLCompiler(self, connection, using)
|
||||
return NullsLastSQLCompiler(self, connection, using, elide_empty)
|
||||
|
||||
|
||||
class NullsLastQuerySet(models.QuerySet):
|
||||
|
|
|
@ -56,3 +56,59 @@ class OwnerPermission(BasePermission):
|
|||
if not owner or not request.user.is_authenticated or owner != request.user:
|
||||
raise owner_exception
|
||||
return True
|
||||
|
||||
|
||||
class PrivacyLevelPermission(BasePermission):
|
||||
"""
|
||||
Ensure the request actor have access to the object considering the privacylevel configuration
|
||||
of the user.
|
||||
request.user is None if actor, else its Anonymous if user is not auth.
|
||||
"""
|
||||
|
||||
def has_object_permission(self, request, view, obj):
|
||||
if (
|
||||
not hasattr(obj, "user")
|
||||
and hasattr(obj, "actor")
|
||||
and not obj.actor.is_local
|
||||
):
|
||||
# it's a remote actor object. It should be public.
|
||||
# But we could trigger an update of the remote actor data
|
||||
# to avoid leaking data (#2326)
|
||||
return True
|
||||
|
||||
privacy_level = (
|
||||
obj.actor.user.privacy_level
|
||||
if hasattr(obj, "actor")
|
||||
else obj.user.privacy_level
|
||||
)
|
||||
obj_actor = obj.actor if hasattr(obj, "actor") else obj.user.actor
|
||||
|
||||
if privacy_level == "everyone":
|
||||
return True
|
||||
|
||||
# user is anonymous
|
||||
if hasattr(request, "actor"):
|
||||
request_actor = request.actor
|
||||
elif request.user and request.user.is_authenticated:
|
||||
request_actor = request.user.actor
|
||||
else:
|
||||
return False
|
||||
|
||||
if privacy_level == "instance":
|
||||
# user is local
|
||||
if request.user and hasattr(request.user, "actor"):
|
||||
return True
|
||||
elif hasattr(request, "actor") and request.actor and request.actor.is_local:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
elif privacy_level == "me" and obj_actor == request_actor:
|
||||
return True
|
||||
|
||||
elif privacy_level == "followers" and (
|
||||
request_actor in obj.user.actor.get_approved_followers()
|
||||
):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
|
|
@ -2,7 +2,7 @@ import json
|
|||
|
||||
from django import forms
|
||||
from django.conf import settings
|
||||
from django.contrib.postgres.forms import JSONField
|
||||
from django.forms import JSONField
|
||||
from dynamic_preferences import serializers, types
|
||||
from dynamic_preferences.registries import global_preferences_registry
|
||||
|
||||
|
@ -93,7 +93,6 @@ class SerializedPreference(types.BasePreferenceType):
|
|||
serializer
|
||||
"""
|
||||
|
||||
serializer = JSONSerializer
|
||||
data_serializer_class = None
|
||||
field_class = JSONField
|
||||
widget = forms.Textarea
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -5,8 +5,8 @@ import os
|
|||
import PIL
|
||||
from django.core.exceptions import ObjectDoesNotExist
|
||||
from django.core.files.uploadedfile import SimpleUploadedFile
|
||||
from django.utils.encoding import smart_text
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from django.utils.encoding import smart_str
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from drf_spectacular.types import OpenApiTypes
|
||||
from drf_spectacular.utils import extend_schema_field
|
||||
from rest_framework import serializers
|
||||
|
@ -52,7 +52,7 @@ class RelatedField(serializers.RelatedField):
|
|||
self.fail(
|
||||
"does_not_exist",
|
||||
related_field_name=self.related_field_name,
|
||||
value=smart_text(data),
|
||||
value=smart_str(data),
|
||||
)
|
||||
except (TypeError, ValueError):
|
||||
self.fail("invalid")
|
||||
|
@ -293,7 +293,17 @@ class AttachmentSerializer(serializers.Serializer):
|
|||
file = StripExifImageField(write_only=True)
|
||||
urls = serializers.SerializerMethodField()
|
||||
|
||||
@extend_schema_field(OpenApiTypes.OBJECT)
|
||||
@extend_schema_field(
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"original": {"type": "string"},
|
||||
"small_square_crop": {"type": "string"},
|
||||
"medium_square_crop": {"type": "string"},
|
||||
"large_square_crop": {"type": "string"},
|
||||
},
|
||||
}
|
||||
)
|
||||
def get_urls(self, o):
|
||||
urls = {}
|
||||
urls["source"] = o.url
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import django.dispatch
|
||||
|
||||
mutation_created = django.dispatch.Signal(providing_args=["mutation"])
|
||||
mutation_updated = django.dispatch.Signal(
|
||||
providing_args=["mutation", "old_is_approved", "new_is_approved"]
|
||||
)
|
||||
""" Required args: mutation """
|
||||
mutation_created = django.dispatch.Signal()
|
||||
""" Required args: mutation, old_is_approved, new_is_approved """
|
||||
mutation_updated = django.dispatch.Signal()
|
||||
|
|
|
@ -6,7 +6,7 @@ from django.core.exceptions import ValidationError
|
|||
from django.core.files.images import get_image_dimensions
|
||||
from django.template.defaultfilters import filesizeformat
|
||||
from django.utils.deconstruct import deconstructible
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
|
||||
@deconstructible
|
||||
|
|
|
@ -0,0 +1,13 @@
|
|||
import logging
|
||||
|
||||
from config import plugins
|
||||
from funkwhale_api.contrib.archivedl import tasks
|
||||
|
||||
from .funkwhale_startup import PLUGIN
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@plugins.register_hook(plugins.TRIGGER_THIRD_PARTY_UPLOAD, PLUGIN)
|
||||
def lauch_download(track, conf={}):
|
||||
tasks.archive_download.delay(track_id=track.pk, conf=conf)
|
|
@ -0,0 +1,10 @@
|
|||
from config import plugins
|
||||
|
||||
PLUGIN = plugins.get_plugin_config(
|
||||
name="archivedl",
|
||||
label="Archive-dl",
|
||||
description="",
|
||||
version="0.1",
|
||||
user=False,
|
||||
conf=[],
|
||||
)
|
|
@ -0,0 +1,148 @@
|
|||
import asyncio
|
||||
import hashlib
|
||||
import logging
|
||||
import os
|
||||
import tempfile
|
||||
import urllib.parse
|
||||
|
||||
import requests
|
||||
from django.core.files import File
|
||||
from django.utils import timezone
|
||||
|
||||
from funkwhale_api.federation import actors
|
||||
from funkwhale_api.music import models, utils
|
||||
from funkwhale_api.taskapp import celery
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def create_upload(url, track, files_data):
|
||||
mimetype = f"audio/{files_data.get('format', 'unknown')}"
|
||||
duration = files_data.get("mtime", 0)
|
||||
filesize = files_data.get("size", 0)
|
||||
bitrate = files_data.get("bitrate", 0)
|
||||
|
||||
service_library = models.Library.objects.create(
|
||||
privacy_level="everyone",
|
||||
actor=actors.get_service_actor(),
|
||||
)
|
||||
|
||||
return models.Upload.objects.create(
|
||||
mimetype=mimetype,
|
||||
source=url,
|
||||
third_party_provider="archive-dl",
|
||||
creation_date=timezone.now(),
|
||||
track=track,
|
||||
duration=duration,
|
||||
size=filesize,
|
||||
bitrate=bitrate,
|
||||
library=service_library,
|
||||
from_activity=None,
|
||||
import_status="finished",
|
||||
)
|
||||
|
||||
|
||||
@celery.app.task(name="archivedl.archive_download")
|
||||
@celery.require_instance(models.Track.objects.select_related(), "track")
|
||||
def archive_download(track, conf):
|
||||
artist_name = utils.get_artist_credit_string(track)
|
||||
query = f"mediatype:audio AND title:{track.title} AND creator:{artist_name}"
|
||||
with requests.Session() as session:
|
||||
url = get_search_url(query, page_size=1, page=1)
|
||||
page_data = fetch_json(url, session)
|
||||
for obj in page_data["response"]["docs"]:
|
||||
logger.info(f"launching download item for {str(obj)}")
|
||||
download_item(
|
||||
item_data=obj,
|
||||
session=session,
|
||||
allowed_extensions=utils.SUPPORTED_EXTENSIONS,
|
||||
track=track,
|
||||
)
|
||||
|
||||
|
||||
def fetch_json(url, session):
|
||||
logger.info(f"Fetching {url}...")
|
||||
with session.get(url) as response:
|
||||
return response.json()
|
||||
|
||||
|
||||
def download_item(
|
||||
item_data,
|
||||
session,
|
||||
allowed_extensions,
|
||||
track,
|
||||
):
|
||||
files_data = get_files_data(item_data["identifier"], session)
|
||||
to_download = list(
|
||||
filter_files(
|
||||
files_data["result"],
|
||||
allowed_extensions=allowed_extensions,
|
||||
)
|
||||
)
|
||||
url = f"https://archive.org/download/{item_data['identifier']}/{to_download[0]['name']}"
|
||||
upload = create_upload(url, track, to_download[0])
|
||||
try:
|
||||
with tempfile.TemporaryDirectory() as temp_dir:
|
||||
path = os.path.join(temp_dir, to_download[0]["name"])
|
||||
download_file(
|
||||
path,
|
||||
url=url,
|
||||
session=session,
|
||||
checksum=to_download[0]["sha1"],
|
||||
upload=upload,
|
||||
to_download=to_download,
|
||||
)
|
||||
|
||||
logger.info(f"Finished to download item {item_data['identifier']}...")
|
||||
except Exception as e:
|
||||
upload.delete()
|
||||
raise e
|
||||
|
||||
|
||||
def check_integrity(path, expected_checksum):
|
||||
with open(path, mode="rb") as f:
|
||||
hash = hashlib.sha1()
|
||||
hash.update(f.read())
|
||||
|
||||
return expected_checksum == hash.hexdigest()
|
||||
|
||||
|
||||
def get_files_data(identifier, session):
|
||||
url = f"https://archive.org/metadata/{identifier}/files"
|
||||
logger.info(f"Fetching files data at {url}...")
|
||||
with session.get(url) as response:
|
||||
return response.json()
|
||||
|
||||
|
||||
def download_file(path, url, session, checksum, upload, to_download):
|
||||
if os.path.exists(path) and check_integrity(path, checksum):
|
||||
logger.info(f"Skipping already downloaded file at {path}")
|
||||
return
|
||||
logger.info(f"Downloading file {url}...")
|
||||
with open(path, mode="wb") as f:
|
||||
try:
|
||||
with session.get(url) as response:
|
||||
f.write(response.content)
|
||||
except asyncio.TimeoutError as e:
|
||||
logger.error(f"Timeout error while downloading {url}: {e}")
|
||||
|
||||
with open(path, "rb") as f:
|
||||
upload.audio_file.save(f"{to_download['name']}", File(f))
|
||||
upload.import_status = "finished"
|
||||
upload.url = url
|
||||
upload.save()
|
||||
return upload
|
||||
|
||||
|
||||
def filter_files(files, allowed_extensions):
|
||||
for f in files:
|
||||
if allowed_extensions:
|
||||
extension = os.path.splitext(f["name"])[-1][1:]
|
||||
if extension not in allowed_extensions:
|
||||
continue
|
||||
yield f
|
||||
|
||||
|
||||
def get_search_url(query, page_size, page):
|
||||
q = urllib.parse.urlencode({"q": query})
|
||||
return f"https://archive.org/advancedsearch.php?{q}&sort[]=addeddate+desc&rows={page_size}&page={page}&output=json"
|
|
@ -1,168 +0,0 @@
|
|||
# Copyright (c) 2018 Philipp Wolfer <ph.wolfer@gmail.com>
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining
|
||||
# a copy of this software and associated documentation files (the
|
||||
# "Software"), to deal in the Software without restriction, including
|
||||
# without limitation the rights to use, copy, modify, merge, publish,
|
||||
# distribute, sublicense, and/or sell copies of the Software, and to
|
||||
# permit persons to whom the Software is furnished to do so, subject to
|
||||
# the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be
|
||||
# included in all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
||||
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
||||
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
||||
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
||||
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
import json
|
||||
import logging
|
||||
import ssl
|
||||
import time
|
||||
from http.client import HTTPSConnection
|
||||
|
||||
HOST_NAME = "api.listenbrainz.org"
|
||||
PATH_SUBMIT = "/1/submit-listens"
|
||||
SSL_CONTEXT = ssl.create_default_context()
|
||||
|
||||
|
||||
class Track:
|
||||
"""
|
||||
Represents a single track to submit.
|
||||
|
||||
See https://listenbrainz.readthedocs.io/en/latest/dev/json.html
|
||||
"""
|
||||
|
||||
def __init__(self, artist_name, track_name, release_name=None, additional_info={}):
|
||||
"""
|
||||
Create a new Track instance
|
||||
@param artist_name as str
|
||||
@param track_name as str
|
||||
@param release_name as str
|
||||
@param additional_info as dict
|
||||
"""
|
||||
self.artist_name = artist_name
|
||||
self.track_name = track_name
|
||||
self.release_name = release_name
|
||||
self.additional_info = additional_info
|
||||
|
||||
@staticmethod
|
||||
def from_dict(data):
|
||||
return Track(
|
||||
data["artist_name"],
|
||||
data["track_name"],
|
||||
data.get("release_name", None),
|
||||
data.get("additional_info", {}),
|
||||
)
|
||||
|
||||
def to_dict(self):
|
||||
return {
|
||||
"artist_name": self.artist_name,
|
||||
"track_name": self.track_name,
|
||||
"release_name": self.release_name,
|
||||
"additional_info": self.additional_info,
|
||||
}
|
||||
|
||||
def __repr__(self):
|
||||
return f"Track({self.artist_name}, {self.track_name})"
|
||||
|
||||
|
||||
class ListenBrainzClient:
|
||||
"""
|
||||
Submit listens to ListenBrainz.org.
|
||||
|
||||
See https://listenbrainz.readthedocs.io/en/latest/dev/api.html
|
||||
"""
|
||||
|
||||
def __init__(self, user_token, logger=logging.getLogger(__name__)):
|
||||
self.__next_request_time = 0
|
||||
self.user_token = user_token
|
||||
self.logger = logger
|
||||
|
||||
def listen(self, listened_at, track):
|
||||
"""
|
||||
Submit a listen for a track
|
||||
@param listened_at as int
|
||||
@param entry as Track
|
||||
"""
|
||||
payload = _get_payload(track, listened_at)
|
||||
return self._submit("single", [payload])
|
||||
|
||||
def playing_now(self, track):
|
||||
"""
|
||||
Submit a playing now notification for a track
|
||||
@param track as Track
|
||||
"""
|
||||
payload = _get_payload(track)
|
||||
return self._submit("playing_now", [payload])
|
||||
|
||||
def import_tracks(self, tracks):
|
||||
"""
|
||||
Import a list of tracks as (listened_at, Track) pairs
|
||||
@param track as [(int, Track)]
|
||||
"""
|
||||
payload = _get_payload_many(tracks)
|
||||
return self._submit("import", payload)
|
||||
|
||||
def _submit(self, listen_type, payload, retry=0):
|
||||
self._wait_for_ratelimit()
|
||||
self.logger.debug("ListenBrainz %s: %r", listen_type, payload)
|
||||
data = {"listen_type": listen_type, "payload": payload}
|
||||
headers = {
|
||||
"Authorization": "Token %s" % self.user_token,
|
||||
"Content-Type": "application/json",
|
||||
}
|
||||
body = json.dumps(data)
|
||||
conn = HTTPSConnection(HOST_NAME, context=SSL_CONTEXT)
|
||||
conn.request("POST", PATH_SUBMIT, body, headers)
|
||||
response = conn.getresponse()
|
||||
response_text = response.read()
|
||||
try:
|
||||
response_data = json.loads(response_text)
|
||||
except json.decoder.JSONDecodeError:
|
||||
response_data = response_text
|
||||
|
||||
self._handle_ratelimit(response)
|
||||
log_msg = f"Response {response.status}: {response_data!r}"
|
||||
if response.status == 429 and retry < 5: # Too Many Requests
|
||||
self.logger.warning(log_msg)
|
||||
return self._submit(listen_type, payload, retry + 1)
|
||||
elif response.status == 200:
|
||||
self.logger.debug(log_msg)
|
||||
else:
|
||||
self.logger.error(log_msg)
|
||||
return response
|
||||
|
||||
def _wait_for_ratelimit(self):
|
||||
now = time.time()
|
||||
if self.__next_request_time > now:
|
||||
delay = self.__next_request_time - now
|
||||
self.logger.debug("Rate limit applies, delay %d", delay)
|
||||
time.sleep(delay)
|
||||
|
||||
def _handle_ratelimit(self, response):
|
||||
remaining = int(response.getheader("X-RateLimit-Remaining", 0))
|
||||
reset_in = int(response.getheader("X-RateLimit-Reset-In", 0))
|
||||
self.logger.debug("X-RateLimit-Remaining: %i", remaining)
|
||||
self.logger.debug("X-RateLimit-Reset-In: %i", reset_in)
|
||||
if remaining == 0:
|
||||
self.__next_request_time = time.time() + reset_in
|
||||
|
||||
|
||||
def _get_payload_many(tracks):
|
||||
payload = []
|
||||
for listened_at, track in tracks:
|
||||
data = _get_payload(track, listened_at)
|
||||
payload.append(data)
|
||||
return payload
|
||||
|
||||
|
||||
def _get_payload(track, listened_at=None):
|
||||
data = {"track_metadata": track.to_dict()}
|
||||
if listened_at is not None:
|
||||
data["listened_at"] = listened_at
|
||||
return data
|
|
@ -1,27 +1,31 @@
|
|||
import liblistenbrainz
|
||||
|
||||
import funkwhale_api
|
||||
from config import plugins
|
||||
from funkwhale_api.favorites import models as favorites_models
|
||||
from funkwhale_api.history import models as history_models
|
||||
|
||||
from .client import ListenBrainzClient, Track
|
||||
from . import tasks
|
||||
from .funkwhale_startup import PLUGIN
|
||||
|
||||
|
||||
@plugins.register_hook(plugins.LISTENING_CREATED, PLUGIN)
|
||||
def submit_listen(listening, conf, **kwargs):
|
||||
user_token = conf["user_token"]
|
||||
if not user_token:
|
||||
if not user_token and not conf["submit_listenings"]:
|
||||
return
|
||||
|
||||
logger = PLUGIN["logger"]
|
||||
logger.info("Submitting listen to ListenBrainz")
|
||||
client = ListenBrainzClient(user_token=user_token, logger=logger)
|
||||
track = get_track(listening.track)
|
||||
client.listen(int(listening.creation_date.timestamp()), track)
|
||||
client = liblistenbrainz.ListenBrainz()
|
||||
client.set_auth_token(user_token)
|
||||
listen = get_lb_listen(listening)
|
||||
|
||||
client.submit_single_listen(listen)
|
||||
|
||||
|
||||
def get_track(track):
|
||||
artist = track.artist.name
|
||||
title = track.title
|
||||
album = None
|
||||
def get_lb_listen(listening):
|
||||
track = listening.track
|
||||
additional_info = {
|
||||
"media_player": "Funkwhale",
|
||||
"media_player_version": funkwhale_api.__version__,
|
||||
|
@ -36,15 +40,97 @@ def get_track(track):
|
|||
|
||||
if track.album:
|
||||
if track.album.title:
|
||||
album = track.album.title
|
||||
release_name = track.album.title
|
||||
if track.album.mbid:
|
||||
additional_info["release_mbid"] = str(track.album.mbid)
|
||||
|
||||
if track.artist.mbid:
|
||||
additional_info["artist_mbids"] = [str(track.artist.mbid)]
|
||||
mbids = [ac.artist.mbid for ac in track.artist_credit.all() if ac.artist.mbid]
|
||||
if mbids:
|
||||
additional_info["artist_mbids"] = mbids
|
||||
|
||||
upload = track.uploads.filter(duration__gte=0).first()
|
||||
if upload:
|
||||
additional_info["duration"] = upload.duration
|
||||
|
||||
return Track(artist, title, album, additional_info)
|
||||
return liblistenbrainz.Listen(
|
||||
track_name=track.title,
|
||||
listened_at=listening.creation_date.timestamp(),
|
||||
artist_name=track.get_artist_credit_string,
|
||||
release_name=release_name,
|
||||
additional_info=additional_info,
|
||||
)
|
||||
|
||||
|
||||
@plugins.register_hook(plugins.FAVORITE_CREATED, PLUGIN)
|
||||
def submit_favorite_creation(track_favorite, conf, **kwargs):
|
||||
user_token = conf["user_token"]
|
||||
if not user_token or not conf["submit_favorites"]:
|
||||
return
|
||||
logger = PLUGIN["logger"]
|
||||
logger.info("Submitting favorite to ListenBrainz")
|
||||
client = liblistenbrainz.ListenBrainz()
|
||||
track = track_favorite.track
|
||||
if not track.mbid:
|
||||
logger.warning(
|
||||
"This tracks doesn't have a mbid. Feedback will not be submitted to Listenbrainz"
|
||||
)
|
||||
return
|
||||
client.submit_user_feedback(1, track.mbid)
|
||||
|
||||
|
||||
@plugins.register_hook(plugins.FAVORITE_DELETED, PLUGIN)
|
||||
def submit_favorite_deletion(track_favorite, conf, **kwargs):
|
||||
user_token = conf["user_token"]
|
||||
if not user_token or not conf["submit_favorites"]:
|
||||
return
|
||||
logger = PLUGIN["logger"]
|
||||
logger.info("Submitting favorite deletion to ListenBrainz")
|
||||
client = liblistenbrainz.ListenBrainz()
|
||||
track = track_favorite.track
|
||||
if not track.mbid:
|
||||
logger.warning(
|
||||
"This tracks doesn't have a mbid. Feedback will not be submitted to Listenbrainz"
|
||||
)
|
||||
return
|
||||
client.submit_user_feedback(0, track.mbid)
|
||||
|
||||
|
||||
@plugins.register_hook(plugins.LISTENING_SYNC, PLUGIN)
|
||||
def sync_listenings_from_listenbrainz(user, conf):
|
||||
user_name = conf["user_name"]
|
||||
|
||||
if not user_name or not conf["sync_listenings"]:
|
||||
return
|
||||
logger = PLUGIN["logger"]
|
||||
logger.info("Getting listenings from ListenBrainz")
|
||||
try:
|
||||
last_ts = (
|
||||
history_models.Listening.objects.filter(actor=user.actor)
|
||||
.filter(source="Listenbrainz")
|
||||
.latest("creation_date")
|
||||
.values_list("creation_date", flat=True)
|
||||
).timestamp()
|
||||
except funkwhale_api.history.models.Listening.DoesNotExist:
|
||||
tasks.import_listenbrainz_listenings(user, user_name, 0)
|
||||
return
|
||||
|
||||
tasks.import_listenbrainz_listenings(user, user_name, last_ts)
|
||||
|
||||
|
||||
@plugins.register_hook(plugins.FAVORITE_SYNC, PLUGIN)
|
||||
def sync_favorites_from_listenbrainz(user, conf):
|
||||
user_name = conf["user_name"]
|
||||
|
||||
if not user_name or not conf["sync_favorites"]:
|
||||
return
|
||||
try:
|
||||
last_ts = (
|
||||
favorites_models.TrackFavorite.objects.filter(actor=user.actor)
|
||||
.filter(source="Listenbrainz")
|
||||
.latest("creation_date")
|
||||
.creation_date.timestamp()
|
||||
)
|
||||
except favorites_models.TrackFavorite.DoesNotExist:
|
||||
tasks.import_listenbrainz_favorites(user, user_name, 0)
|
||||
return
|
||||
|
||||
tasks.import_listenbrainz_favorites(user, user_name, last_ts)
|
||||
|
|
|
@ -3,7 +3,7 @@ from config import plugins
|
|||
PLUGIN = plugins.get_plugin_config(
|
||||
name="listenbrainz",
|
||||
label="ListenBrainz",
|
||||
description="A plugin that allows you to submit your listens to ListenBrainz.",
|
||||
description="A plugin that allows you to submit or sync your listens and favorites to ListenBrainz.",
|
||||
homepage="https://docs.funkwhale.audio/users/builtinplugins.html#listenbrainz-plugin", # noqa
|
||||
version="0.3",
|
||||
user=True,
|
||||
|
@ -13,6 +13,45 @@ PLUGIN = plugins.get_plugin_config(
|
|||
"type": "text",
|
||||
"label": "Your ListenBrainz user token",
|
||||
"help": "You can find your user token in your ListenBrainz profile at https://listenbrainz.org/profile/",
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "user_name",
|
||||
"type": "text",
|
||||
"required": False,
|
||||
"label": "Your ListenBrainz user name.",
|
||||
"help": "Required for importing listenings and favorites with ListenBrainz \
|
||||
but not to send activities",
|
||||
},
|
||||
{
|
||||
"name": "submit_listenings",
|
||||
"type": "boolean",
|
||||
"default": True,
|
||||
"label": "Enable listening submission to ListenBrainz",
|
||||
"help": "If enabled, your listenings from Funkwhale will be imported into ListenBrainz.",
|
||||
},
|
||||
{
|
||||
"name": "sync_listenings",
|
||||
"type": "boolean",
|
||||
"default": False,
|
||||
"label": "Enable listenings sync",
|
||||
"help": "If enabled, your listening from ListenBrainz will be imported into Funkwhale. This means they \
|
||||
will be used along with Funkwhale listenings to filter out recently listened content or \
|
||||
generate recommendations",
|
||||
},
|
||||
{
|
||||
"name": "sync_favorites",
|
||||
"type": "boolean",
|
||||
"default": False,
|
||||
"label": "Enable favorite sync",
|
||||
"help": "If enabled, your favorites from ListenBrainz will be imported into Funkwhale. This means they \
|
||||
will be used along with Funkwhale favorites (UI display, federation activity)",
|
||||
},
|
||||
{
|
||||
"name": "submit_favorites",
|
||||
"type": "boolean",
|
||||
"default": False,
|
||||
"label": "Enable favorite submission to ListenBrainz services",
|
||||
"help": "If enabled, your favorites from Funkwhale will be submitted to ListenBrainz",
|
||||
},
|
||||
],
|
||||
)
|
||||
|
|
|
@ -0,0 +1,165 @@
|
|||
import datetime
|
||||
|
||||
import liblistenbrainz
|
||||
from django.utils import timezone
|
||||
|
||||
from config import plugins
|
||||
from funkwhale_api.favorites import models as favorites_models
|
||||
from funkwhale_api.history import models as history_models
|
||||
from funkwhale_api.music import models as music_models
|
||||
from funkwhale_api.taskapp import celery
|
||||
from funkwhale_api.users import models
|
||||
|
||||
from .funkwhale_startup import PLUGIN
|
||||
|
||||
|
||||
@celery.app.task(name="listenbrainz.trigger_listening_sync_with_listenbrainz")
|
||||
def trigger_listening_sync_with_listenbrainz():
|
||||
now = timezone.now()
|
||||
active_month = now - datetime.timedelta(days=30)
|
||||
users = (
|
||||
models.User.objects.filter(plugins__code="listenbrainz")
|
||||
.filter(plugins__conf__sync_listenings=True)
|
||||
.filter(last_activity__gte=active_month)
|
||||
)
|
||||
for user in users:
|
||||
plugins.trigger_hook(
|
||||
plugins.LISTENING_SYNC,
|
||||
user=user,
|
||||
confs=plugins.get_confs(user),
|
||||
)
|
||||
|
||||
|
||||
@celery.app.task(name="listenbrainz.trigger_favorite_sync_with_listenbrainz")
|
||||
def trigger_favorite_sync_with_listenbrainz():
|
||||
now = timezone.now()
|
||||
active_month = now - datetime.timedelta(days=30)
|
||||
users = (
|
||||
models.User.objects.filter(plugins__code="listenbrainz")
|
||||
.filter(plugins__conf__sync_listenings=True)
|
||||
.filter(last_activity__gte=active_month)
|
||||
)
|
||||
for user in users:
|
||||
plugins.trigger_hook(
|
||||
plugins.FAVORITE_SYNC,
|
||||
user=user,
|
||||
confs=plugins.get_confs(user),
|
||||
)
|
||||
|
||||
|
||||
@celery.app.task(name="listenbrainz.import_listenbrainz_listenings")
|
||||
def import_listenbrainz_listenings(user, user_name, since):
|
||||
client = liblistenbrainz.ListenBrainz()
|
||||
response = client.get_listens(username=user_name, min_ts=since, count=100)
|
||||
listens = response["payload"]["listens"]
|
||||
while listens:
|
||||
add_lb_listenings_to_db(listens, user)
|
||||
new_ts = max(
|
||||
listens,
|
||||
key=lambda obj: datetime.datetime.fromtimestamp(
|
||||
obj.listened_at, datetime.timezone.utc
|
||||
),
|
||||
)
|
||||
response = client.get_listens(username=user_name, min_ts=new_ts, count=100)
|
||||
listens = response["payload"]["listens"]
|
||||
|
||||
|
||||
def add_lb_listenings_to_db(listens, user):
|
||||
logger = PLUGIN["logger"]
|
||||
fw_listens = []
|
||||
for listen in listens:
|
||||
if (
|
||||
listen.additional_info.get("submission_client")
|
||||
and listen.additional_info.get("submission_client")
|
||||
== "Funkwhale ListenBrainz plugin"
|
||||
and history_models.Listening.objects.filter(
|
||||
creation_date=datetime.datetime.fromtimestamp(
|
||||
listen.listened_at, datetime.timezone.utc
|
||||
)
|
||||
).exists()
|
||||
):
|
||||
logger.info(
|
||||
f"Listen with ts {listen.listened_at} skipped because already in db"
|
||||
)
|
||||
continue
|
||||
|
||||
mbid = (
|
||||
listen.mbid_mapping
|
||||
if hasattr(listen, "mbid_mapping")
|
||||
else listen.recording_mbid
|
||||
)
|
||||
|
||||
if not mbid:
|
||||
logger.info("Received listening that doesn't have a mbid. Skipping...")
|
||||
|
||||
try:
|
||||
track = music_models.Track.objects.get(mbid=mbid)
|
||||
except music_models.Track.DoesNotExist:
|
||||
logger.info(
|
||||
"Received listening that doesn't exist in fw database. Skipping..."
|
||||
)
|
||||
continue
|
||||
|
||||
user = user
|
||||
fw_listen = history_models.Listening(
|
||||
creation_date=datetime.datetime.fromtimestamp(
|
||||
listen.listened_at, datetime.timezone.utc
|
||||
),
|
||||
track=track,
|
||||
actor=user.actor,
|
||||
source="Listenbrainz",
|
||||
)
|
||||
fw_listens.append(fw_listen)
|
||||
|
||||
history_models.Listening.objects.bulk_create(fw_listens)
|
||||
|
||||
|
||||
@celery.app.task(name="listenbrainz.import_listenbrainz_favorites")
|
||||
def import_listenbrainz_favorites(user, user_name, since):
|
||||
client = liblistenbrainz.ListenBrainz()
|
||||
response = client.get_user_feedback(username=user_name)
|
||||
offset = 0
|
||||
while response["feedback"]:
|
||||
count = response["count"]
|
||||
offset = offset + count
|
||||
last_sync = min(
|
||||
response["feedback"],
|
||||
key=lambda obj: datetime.datetime.fromtimestamp(
|
||||
obj["created"], datetime.timezone.utc
|
||||
),
|
||||
)["created"]
|
||||
add_lb_feedback_to_db(response["feedback"], user)
|
||||
if last_sync <= since or count == 0:
|
||||
return
|
||||
response = client.get_user_feedback(username=user_name, offset=offset)
|
||||
|
||||
|
||||
def add_lb_feedback_to_db(feedbacks, user):
|
||||
logger = PLUGIN["logger"]
|
||||
for feedback in feedbacks:
|
||||
try:
|
||||
track = music_models.Track.objects.get(mbid=feedback["recording_mbid"])
|
||||
except music_models.Track.DoesNotExist:
|
||||
logger.info(
|
||||
"Received feedback track that doesn't exist in fw database. Skipping..."
|
||||
)
|
||||
continue
|
||||
|
||||
if feedback["score"] == 1:
|
||||
favorites_models.TrackFavorite.objects.get_or_create(
|
||||
actor=user.actor,
|
||||
creation_date=datetime.datetime.fromtimestamp(
|
||||
feedback["created"], datetime.timezone.utc
|
||||
),
|
||||
track=track,
|
||||
source="Listenbrainz",
|
||||
)
|
||||
elif feedback["score"] == 0:
|
||||
try:
|
||||
favorites_models.TrackFavorite.objects.get(
|
||||
actor=user.actor, track=track
|
||||
).delete()
|
||||
except favorites_models.TrackFavorite.DoesNotExist:
|
||||
continue
|
||||
elif feedback["score"] == -1:
|
||||
logger.info("Funkwhale doesn't support disliked tracks")
|
|
@ -37,7 +37,7 @@ def get_payload(listening, api_key, conf):
|
|||
# See https://github.com/krateng/maloja/blob/master/API.md
|
||||
payload = {
|
||||
"key": api_key,
|
||||
"artists": [track.artist.name],
|
||||
"artists": [artist.name for artist in track.artist_credit.get_artists_list()],
|
||||
"title": track.title,
|
||||
"time": int(listening.creation_date.timestamp()),
|
||||
"nofix": bool(conf.get("nofix")),
|
||||
|
@ -46,8 +46,10 @@ def get_payload(listening, api_key, conf):
|
|||
if track.album:
|
||||
if track.album.title:
|
||||
payload["album"] = track.album.title
|
||||
if track.album.artist:
|
||||
payload["albumartists"] = [track.album.artist.name]
|
||||
if track.album.artist_credit.all():
|
||||
payload["albumartists"] = [
|
||||
artist.name for artist in track.album.artist_credit.get_artists_list()
|
||||
]
|
||||
|
||||
upload = track.uploads.filter(duration__gte=0).first()
|
||||
if upload:
|
||||
|
|
|
@ -29,7 +29,7 @@ def forward_to_scrobblers(listening, conf, **kwargs):
|
|||
(username + " " + password).encode("utf-8")
|
||||
).hexdigest()
|
||||
cache_key = "lastfm:sessionkey:{}".format(
|
||||
":".join([str(listening.user.pk), hashed_auth])
|
||||
":".join([str(listening.actor.pk), hashed_auth])
|
||||
)
|
||||
PLUGIN["logger"].info("Forwarding scrobble to %s", LASTFM_SCROBBLER_URL)
|
||||
session_key = PLUGIN["cache"].get(cache_key)
|
||||
|
|
|
@ -84,7 +84,7 @@ def get_scrobble_payload(track, date, suffix="[0]"):
|
|||
"""
|
||||
upload = track.uploads.filter(duration__gte=0).first()
|
||||
data = {
|
||||
f"a{suffix}": track.artist.name,
|
||||
f"a{suffix}": track.get_artist_credit_string,
|
||||
f"t{suffix}": track.title,
|
||||
f"l{suffix}": upload.duration if upload else 0,
|
||||
f"b{suffix}": (track.album.title if track.album else "") or "",
|
||||
|
@ -103,7 +103,7 @@ def get_scrobble2_payload(track, date, suffix="[0]"):
|
|||
"""
|
||||
upload = track.uploads.filter(duration__gte=0).first()
|
||||
data = {
|
||||
"artist": track.artist.name,
|
||||
"artist": track.get_artist_credit_string,
|
||||
"track": track.title,
|
||||
"chosenByUser": 1,
|
||||
}
|
||||
|
|
|
@ -314,9 +314,12 @@ class FunkwhaleProvider(internet_provider.Provider):
|
|||
not random enough
|
||||
"""
|
||||
|
||||
def federation_url(self, prefix="", local=False):
|
||||
def federation_url(self, prefix="", obj_uuid=None, local=False):
|
||||
if not obj_uuid:
|
||||
obj_uuid = uuid.uuid4()
|
||||
|
||||
def path_generator():
|
||||
return f"{prefix}/{uuid.uuid4()}"
|
||||
return f"{prefix}/{obj_uuid}"
|
||||
|
||||
domain = settings.FEDERATION_HOSTNAME if local else self.domain_name()
|
||||
protocol = "https"
|
||||
|
|
|
@ -8,7 +8,7 @@ record.registry.register_serializer(serializers.TrackFavoriteActivitySerializer)
|
|||
|
||||
@record.registry.register_consumer("favorites.TrackFavorite")
|
||||
def broadcast_track_favorite_to_instance_activity(data, obj):
|
||||
if obj.user.privacy_level not in ["instance", "everyone"]:
|
||||
if obj.actor.user.privacy_level not in ["instance", "everyone"]:
|
||||
return
|
||||
|
||||
channels.group_send(
|
||||
|
|
|
@ -5,5 +5,5 @@ from . import models
|
|||
|
||||
@admin.register(models.TrackFavorite)
|
||||
class TrackFavoriteAdmin(admin.ModelAdmin):
|
||||
list_display = ["user", "track", "creation_date"]
|
||||
list_select_related = ["user", "track"]
|
||||
list_display = ["actor", "track", "creation_date"]
|
||||
list_select_related = ["actor", "track"]
|
||||
|
|
|
@ -1,14 +1,28 @@
|
|||
import factory
|
||||
from django.conf import settings
|
||||
|
||||
from funkwhale_api.factories import NoUpdateOnCreate, registry
|
||||
from funkwhale_api.federation import models
|
||||
from funkwhale_api.federation.factories import ActorFactory
|
||||
from funkwhale_api.music.factories import TrackFactory
|
||||
from funkwhale_api.users.factories import UserFactory
|
||||
|
||||
|
||||
@registry.register
|
||||
class TrackFavorite(NoUpdateOnCreate, factory.django.DjangoModelFactory):
|
||||
track = factory.SubFactory(TrackFactory)
|
||||
user = factory.SubFactory(UserFactory)
|
||||
actor = factory.SubFactory(ActorFactory)
|
||||
fid = factory.Faker("federation_url")
|
||||
uuid = factory.Faker("uuid4")
|
||||
|
||||
class Meta:
|
||||
model = "favorites.TrackFavorite"
|
||||
|
||||
@factory.post_generation
|
||||
def local(self, create, extracted, **kwargs):
|
||||
if not extracted and not kwargs:
|
||||
return
|
||||
domain = models.Domain.objects.get_or_create(name=settings.FEDERATION_HOSTNAME)[
|
||||
0
|
||||
]
|
||||
self.fid = f"https://{domain}/federation/music/favorite/{self.uuid}"
|
||||
self.save(update_fields=["fid"])
|
||||
|
|
|
@ -9,7 +9,7 @@ class TrackFavoriteFilter(moderation_filters.HiddenContentFilterSet):
|
|||
q = fields.SearchFilter(
|
||||
search_fields=["track__title", "track__artist__name", "track__album__title"]
|
||||
)
|
||||
scope = common_filters.ActorScopeFilter(actor_field="user__actor", distinct=True)
|
||||
scope = common_filters.ActorScopeFilter(actor_field="actor", distinct=True)
|
||||
|
||||
class Meta:
|
||||
model = models.TrackFavorite
|
||||
|
|
|
@ -0,0 +1,18 @@
|
|||
# Generated by Django 3.2.20 on 2023-12-09 14:25
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('favorites', '0001_initial'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='trackfavorite',
|
||||
name='source',
|
||||
field=models.CharField(blank=True, max_length=100, null=True),
|
||||
),
|
||||
]
|
|
@ -0,0 +1,102 @@
|
|||
# Generated by Django 4.2.9 on 2024-03-28 23:32
|
||||
|
||||
import uuid
|
||||
|
||||
from django.db import migrations, models, transaction
|
||||
import django.db.models.deletion
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
from funkwhale_api.federation import utils
|
||||
from django.urls import reverse
|
||||
|
||||
|
||||
def gen_uuid(apps, schema_editor):
|
||||
MyModel = apps.get_model("favorites", "TrackFavorite")
|
||||
for row in MyModel.objects.all():
|
||||
unique_uuid = uuid.uuid4()
|
||||
while MyModel.objects.filter(uuid=unique_uuid).exists():
|
||||
unique_uuid = uuid.uuid4()
|
||||
|
||||
fid = utils.full_url(
|
||||
reverse("federation:music:likes-detail", kwargs={"uuid": unique_uuid})
|
||||
)
|
||||
row.uuid = unique_uuid
|
||||
row.fid = fid
|
||||
row.save(update_fields=["uuid", "fid"])
|
||||
|
||||
|
||||
def get_user_actor(apps, schema_editor):
|
||||
MyModel = apps.get_model("favorites", "TrackFavorite")
|
||||
for row in MyModel.objects.all():
|
||||
actor = row.user.actor
|
||||
row.actor = actor
|
||||
row.save(update_fields=["actor"])
|
||||
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("favorites", "0002_trackfavorite_source"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="trackfavorite",
|
||||
name="actor",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="track_favorites",
|
||||
to="federation.actor",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="trackfavorite",
|
||||
name="fid",
|
||||
field=models.URLField(
|
||||
db_index=True,
|
||||
default="https://default.fid",
|
||||
max_length=500,
|
||||
unique=True,
|
||||
),
|
||||
preserve_default=False,
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="trackfavorite",
|
||||
name="url",
|
||||
field=models.URLField(blank=True, max_length=500, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="trackfavorite",
|
||||
name="uuid",
|
||||
field=models.UUIDField(null=True),
|
||||
),
|
||||
migrations.RunPython(gen_uuid, reverse_code=migrations.RunPython.noop),
|
||||
migrations.AlterField(
|
||||
model_name="trackfavorite",
|
||||
name="uuid",
|
||||
field=models.UUIDField(default=uuid.uuid4, unique=True, null=False),
|
||||
),
|
||||
migrations.RunPython(get_user_actor, reverse_code=migrations.RunPython.noop),
|
||||
migrations.AlterField(
|
||||
model_name="trackfavorite",
|
||||
name="actor",
|
||||
field=models.ForeignKey(
|
||||
blank=False,
|
||||
null=False,
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="track_favorites",
|
||||
to="federation.actor",
|
||||
), ),
|
||||
migrations.AlterUniqueTogether(
|
||||
name="trackfavorite",
|
||||
unique_together={("track", "actor")},
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="trackfavorite",
|
||||
name="user",
|
||||
),
|
||||
|
||||
]
|
|
@ -1,26 +1,91 @@
|
|||
import uuid
|
||||
|
||||
from django.db import models
|
||||
from django.urls import reverse
|
||||
from django.utils import timezone
|
||||
|
||||
from funkwhale_api.common import fields
|
||||
from funkwhale_api.common import models as common_models
|
||||
from funkwhale_api.federation import models as federation_models
|
||||
from funkwhale_api.federation import utils as federation_utils
|
||||
from funkwhale_api.music.models import Track
|
||||
|
||||
FAVORITE_PRIVACY_LEVEL_CHOICES = [
|
||||
(k, l) for k, l in fields.PRIVACY_LEVEL_CHOICES if k != "followers"
|
||||
]
|
||||
|
||||
class TrackFavorite(models.Model):
|
||||
|
||||
class TrackFavoriteQuerySet(models.QuerySet, common_models.LocalFromFidQuerySet):
|
||||
def viewable_by(self, actor):
|
||||
if actor is None:
|
||||
return self.filter(actor__user__privacy_level="everyone")
|
||||
|
||||
if hasattr(actor, "user"):
|
||||
me_query = models.Q(actor__user__privacy_level="me", actor=actor)
|
||||
me_query = models.Q(actor__user__privacy_level="me", actor=actor)
|
||||
|
||||
instance_query = models.Q(
|
||||
actor__user__privacy_level="instance", actor__domain=actor.domain
|
||||
)
|
||||
instance_actor_query = models.Q(
|
||||
actor__user__privacy_level="instance", actor__domain=actor.domain
|
||||
)
|
||||
|
||||
return self.filter(
|
||||
me_query
|
||||
| instance_query
|
||||
| instance_actor_query
|
||||
| models.Q(actor__user__privacy_level="everyone")
|
||||
)
|
||||
|
||||
|
||||
class TrackFavorite(federation_models.FederationMixin):
|
||||
uuid = models.UUIDField(default=uuid.uuid4, unique=True)
|
||||
creation_date = models.DateTimeField(default=timezone.now)
|
||||
user = models.ForeignKey(
|
||||
"users.User", related_name="track_favorites", on_delete=models.CASCADE
|
||||
actor = models.ForeignKey(
|
||||
"federation.Actor",
|
||||
related_name="track_favorites",
|
||||
on_delete=models.CASCADE,
|
||||
null=False,
|
||||
blank=False,
|
||||
)
|
||||
track = models.ForeignKey(
|
||||
Track, related_name="track_favorites", on_delete=models.CASCADE
|
||||
)
|
||||
source = models.CharField(max_length=100, null=True, blank=True)
|
||||
|
||||
federation_namespace = "likes"
|
||||
objects = TrackFavoriteQuerySet.as_manager()
|
||||
|
||||
class Meta:
|
||||
unique_together = ("track", "user")
|
||||
unique_together = ("track", "actor")
|
||||
|
||||
ordering = ("-creation_date",)
|
||||
|
||||
@classmethod
|
||||
def add(cls, track, user):
|
||||
favorite, created = cls.objects.get_or_create(user=user, track=track)
|
||||
def add(cls, track, actor):
|
||||
favorite, created = cls.objects.get_or_create(actor=actor, track=track)
|
||||
return favorite
|
||||
|
||||
def get_activity_url(self):
|
||||
return f"{self.user.get_activity_url()}/favorites/tracks/{self.pk}"
|
||||
return f"{self.actor.get_absolute_url()}/favorites/tracks/{self.pk}"
|
||||
|
||||
def get_absolute_url(self):
|
||||
return f"/library/tracks/{self.track.pk}"
|
||||
|
||||
def get_federation_id(self):
|
||||
if self.fid:
|
||||
return self.fid
|
||||
|
||||
return federation_utils.full_url(
|
||||
reverse(
|
||||
f"federation:music:{self.federation_namespace}-detail",
|
||||
kwargs={"uuid": self.uuid},
|
||||
)
|
||||
)
|
||||
|
||||
def save(self, **kwargs):
|
||||
if not self.pk and not self.fid:
|
||||
self.fid = self.get_federation_id()
|
||||
|
||||
return super().save(**kwargs)
|
||||
|
|
|
@ -1,10 +1,8 @@
|
|||
from drf_spectacular.utils import extend_schema_field
|
||||
from rest_framework import serializers
|
||||
|
||||
from funkwhale_api.activity import serializers as activity_serializers
|
||||
from funkwhale_api.federation import serializers as federation_serializers
|
||||
from funkwhale_api.music.serializers import TrackActivitySerializer, TrackSerializer
|
||||
from funkwhale_api.users.serializers import UserActivitySerializer, UserBasicSerializer
|
||||
|
||||
from . import models
|
||||
|
||||
|
@ -12,35 +10,24 @@ from . import models
|
|||
class TrackFavoriteActivitySerializer(activity_serializers.ModelSerializer):
|
||||
type = serializers.SerializerMethodField()
|
||||
object = TrackActivitySerializer(source="track")
|
||||
actor = UserActivitySerializer(source="user")
|
||||
actor = federation_serializers.APIActorSerializer(read_only=True)
|
||||
published = serializers.DateTimeField(source="creation_date")
|
||||
|
||||
class Meta:
|
||||
model = models.TrackFavorite
|
||||
fields = ["id", "local_id", "object", "type", "actor", "published"]
|
||||
|
||||
def get_actor(self, obj):
|
||||
return UserActivitySerializer(obj.user).data
|
||||
|
||||
def get_type(self, obj):
|
||||
return "Like"
|
||||
|
||||
|
||||
class UserTrackFavoriteSerializer(serializers.ModelSerializer):
|
||||
track = TrackSerializer(read_only=True)
|
||||
user = UserBasicSerializer(read_only=True)
|
||||
actor = serializers.SerializerMethodField()
|
||||
actor = federation_serializers.APIActorSerializer(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = models.TrackFavorite
|
||||
fields = ("id", "user", "track", "creation_date", "actor")
|
||||
actor = serializers.SerializerMethodField()
|
||||
|
||||
@extend_schema_field(federation_serializers.APIActorSerializer)
|
||||
def get_actor(self, obj):
|
||||
actor = obj.user.actor
|
||||
if actor:
|
||||
return federation_serializers.APIActorSerializer(actor).data
|
||||
fields = ("id", "actor", "track", "creation_date", "actor")
|
||||
|
||||
|
||||
class UserTrackFavoriteWriteSerializer(serializers.ModelSerializer):
|
||||
|
|
|
@ -4,8 +4,10 @@ from rest_framework import mixins, status, viewsets
|
|||
from rest_framework.decorators import action
|
||||
from rest_framework.response import Response
|
||||
|
||||
from config import plugins
|
||||
from funkwhale_api.activity import record
|
||||
from funkwhale_api.common import fields, permissions
|
||||
from funkwhale_api.federation import routes
|
||||
from funkwhale_api.music import utils as music_utils
|
||||
from funkwhale_api.music.models import Track
|
||||
from funkwhale_api.users.oauth import permissions as oauth_permissions
|
||||
|
@ -22,7 +24,7 @@ class TrackFavoriteViewSet(
|
|||
filterset_class = filters.TrackFavoriteFilter
|
||||
serializer_class = serializers.UserTrackFavoriteSerializer
|
||||
queryset = models.TrackFavorite.objects.all().select_related(
|
||||
"user__actor__attachment_icon"
|
||||
"actor__attachment_icon"
|
||||
)
|
||||
permission_classes = [
|
||||
oauth_permissions.ScopePermission,
|
||||
|
@ -31,6 +33,7 @@ class TrackFavoriteViewSet(
|
|||
required_scope = "favorites"
|
||||
anonymous_policy = "setting"
|
||||
owner_checks = ["write"]
|
||||
owner_field = "actor.user"
|
||||
|
||||
def get_serializer_class(self):
|
||||
if self.request.method.lower() in ["head", "get", "options"]:
|
||||
|
@ -44,7 +47,20 @@ class TrackFavoriteViewSet(
|
|||
instance = self.perform_create(serializer)
|
||||
serializer = self.get_serializer(instance=instance)
|
||||
headers = self.get_success_headers(serializer.data)
|
||||
plugins.trigger_hook(
|
||||
plugins.FAVORITE_CREATED,
|
||||
track_favorite=serializer.instance,
|
||||
confs=plugins.get_confs(self.request.user),
|
||||
)
|
||||
record.send(instance)
|
||||
routes.outbox.dispatch(
|
||||
{"type": "Like", "object": {"type": "Track"}},
|
||||
context={
|
||||
"track": instance.track,
|
||||
"actor": instance.actor,
|
||||
"id": instance.fid,
|
||||
},
|
||||
)
|
||||
return Response(
|
||||
serializer.data, status=status.HTTP_201_CREATED, headers=headers
|
||||
)
|
||||
|
@ -52,19 +68,30 @@ class TrackFavoriteViewSet(
|
|||
def get_queryset(self):
|
||||
queryset = super().get_queryset()
|
||||
queryset = queryset.filter(
|
||||
fields.privacy_level_query(self.request.user, "user__privacy_level")
|
||||
fields.privacy_level_query(
|
||||
self.request.user, "actor__user__privacy_level", "actor__user"
|
||||
)
|
||||
)
|
||||
tracks = Track.objects.with_playable_uploads(
|
||||
music_utils.get_actor_from_request(self.request)
|
||||
).select_related(
|
||||
"artist", "album__artist", "attributed_to", "album__attachment_cover"
|
||||
tracks = (
|
||||
Track.objects.with_playable_uploads(
|
||||
music_utils.get_actor_from_request(self.request)
|
||||
)
|
||||
.prefetch_related(
|
||||
"artist_credit__artist",
|
||||
"album__artist_credit__artist",
|
||||
)
|
||||
.select_related(
|
||||
"attributed_to",
|
||||
"album__attachment_cover",
|
||||
)
|
||||
)
|
||||
|
||||
queryset = queryset.prefetch_related(Prefetch("track", queryset=tracks))
|
||||
return queryset
|
||||
|
||||
def perform_create(self, serializer):
|
||||
track = Track.objects.get(pk=serializer.data["track"])
|
||||
favorite = models.TrackFavorite.add(track=track, user=self.request.user)
|
||||
favorite = models.TrackFavorite.add(track=track, actor=self.request.user.actor)
|
||||
return favorite
|
||||
|
||||
@extend_schema(operation_id="unfavorite_track")
|
||||
|
@ -72,10 +99,19 @@ class TrackFavoriteViewSet(
|
|||
def remove(self, request, *args, **kwargs):
|
||||
try:
|
||||
pk = int(request.data["track"])
|
||||
favorite = request.user.track_favorites.get(track__pk=pk)
|
||||
favorite = request.user.actor.track_favorites.get(track__pk=pk)
|
||||
except (AttributeError, ValueError, models.TrackFavorite.DoesNotExist):
|
||||
return Response({}, status=400)
|
||||
routes.outbox.dispatch(
|
||||
{"type": "Dislike", "object": {"type": "Track"}},
|
||||
context={"favorite": favorite},
|
||||
)
|
||||
favorite.delete()
|
||||
plugins.trigger_hook(
|
||||
plugins.FAVORITE_DELETED,
|
||||
track_favorite=favorite,
|
||||
confs=plugins.get_confs(self.request.user),
|
||||
)
|
||||
return Response([], status=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
@extend_schema(
|
||||
|
@ -92,7 +128,9 @@ class TrackFavoriteViewSet(
|
|||
if not request.user.is_authenticated:
|
||||
return Response({"results": [], "count": 0}, status=401)
|
||||
|
||||
favorites = request.user.track_favorites.values("id", "track").order_by("id")
|
||||
favorites = request.user.actor.track_favorites.values("id", "track").order_by(
|
||||
"id"
|
||||
)
|
||||
payload = serializers.AllFavoriteSerializer(favorites).data
|
||||
|
||||
return Response(payload, status=200)
|
||||
|
|
|
@ -119,6 +119,9 @@ def should_reject(fid, actor_id=None, payload={}):
|
|||
|
||||
@transaction.atomic
|
||||
def receive(activity, on_behalf_of, inbox_actor=None):
|
||||
"""
|
||||
Receive an activity, find his recipients and save it to the database before dispatching it
|
||||
"""
|
||||
from funkwhale_api.moderation import mrf
|
||||
|
||||
from . import models, serializers, tasks
|
||||
|
@ -223,6 +226,9 @@ class InboxRouter(Router):
|
|||
"""
|
||||
from . import api_serializers, models
|
||||
|
||||
logger.debug(
|
||||
f"[federation] Inbox dispatch payload : {payload} with context : {context}"
|
||||
)
|
||||
handlers = self.get_matching_handlers(payload)
|
||||
for handler in handlers:
|
||||
if call_handlers:
|
||||
|
@ -293,6 +299,59 @@ def schedule_key_rotation(actor_id, delay):
|
|||
tasks.rotate_actor_key.apply_async(kwargs={"actor_id": actor_id}, countdown=delay)
|
||||
|
||||
|
||||
def activity_pass_user_privacy_level(context, routing):
|
||||
TYPE_FOLLOW_USER_PRIVACY_LEVEL = ["Listen", "Like", "Create"]
|
||||
TYPE_IGNORE_USER_PRIVACY_LEVEL = ["Delete", "Accept", "Follow"]
|
||||
MUSIC_OBJECT_TYPE = ["Audio", "Track", "Album", "Artist"]
|
||||
|
||||
actor = context.get("actor", False)
|
||||
type = routing.get("type", False)
|
||||
object_type = routing.get("object", {}).get("type", None)
|
||||
|
||||
if not actor:
|
||||
logger.warning(
|
||||
"No actor provided in activity context : \
|
||||
we cannot follow actor.privacy_level, activity will be sent by default."
|
||||
)
|
||||
|
||||
# We do not consider music metadata has private
|
||||
if object_type in MUSIC_OBJECT_TYPE:
|
||||
return True
|
||||
|
||||
if type:
|
||||
if type in TYPE_IGNORE_USER_PRIVACY_LEVEL:
|
||||
return True
|
||||
if type in TYPE_FOLLOW_USER_PRIVACY_LEVEL and actor and actor.is_local:
|
||||
if actor.user.privacy_level in [
|
||||
"me",
|
||||
"instance",
|
||||
]:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def activity_pass_object_privacy_level(context, routing):
|
||||
MUSIC_OBJECT_TYPE = ["Audio", "Track", "Album", "Artist"]
|
||||
|
||||
# we only support playlist federation for now
|
||||
object = context.get("playlist", False)
|
||||
|
||||
obj_privacy_level = object.privacy_level if object else None
|
||||
object_type = routing.get("object", {}).get("type", None)
|
||||
|
||||
# We do not consider music metadata has private
|
||||
if object_type in MUSIC_OBJECT_TYPE:
|
||||
return True
|
||||
|
||||
if object and obj_privacy_level and obj_privacy_level in ["me", "instance"]:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
class OutboxRouter(Router):
|
||||
@transaction.atomic
|
||||
def dispatch(self, routing, context):
|
||||
|
@ -305,6 +364,7 @@ class OutboxRouter(Router):
|
|||
|
||||
from . import models, tasks
|
||||
|
||||
logger.debug(f"[federation] Outbox dispatch context : {context}")
|
||||
allow_list_enabled = preferences.get("moderation__allow_list_enabled")
|
||||
allowed_domains = None
|
||||
if allow_list_enabled:
|
||||
|
@ -314,6 +374,18 @@ class OutboxRouter(Router):
|
|||
)
|
||||
)
|
||||
|
||||
if activity_pass_user_privacy_level(context, routing) is False:
|
||||
logger.info(
|
||||
"[federation] Discarding outbox dispatch due to user privacy_level"
|
||||
)
|
||||
return
|
||||
|
||||
if activity_pass_object_privacy_level(context, routing) is False:
|
||||
logger.info(
|
||||
"[federation] Discarding outbox dispatch due to object privacy_level"
|
||||
)
|
||||
return
|
||||
|
||||
for route, handler in self.routes:
|
||||
if not match_route(route, routing):
|
||||
continue
|
||||
|
@ -397,6 +469,7 @@ class OutboxRouter(Router):
|
|||
)
|
||||
|
||||
for a in activities:
|
||||
logger.info(f"[federation] OUtbox sending activity : {a.pk}")
|
||||
funkwhale_utils.on_commit(tasks.dispatch_outbox.delay, activity_id=a.pk)
|
||||
return activities
|
||||
|
||||
|
@ -554,12 +627,6 @@ def get_actors_from_audience(urls):
|
|||
final_query, Q(pk__in=actor_follows.values_list("actor", flat=True))
|
||||
)
|
||||
|
||||
library_follows = models.LibraryFollow.objects.filter(
|
||||
queries["followed"], approved=True
|
||||
)
|
||||
final_query = funkwhale_utils.join_queries_or(
|
||||
final_query, Q(pk__in=library_follows.values_list("actor", flat=True))
|
||||
)
|
||||
if not final_query:
|
||||
return models.Actor.objects.none()
|
||||
return models.Actor.objects.filter(final_query)
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import datetime
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from django.conf import settings
|
||||
from django.core import validators
|
||||
|
@ -55,7 +56,6 @@ class LibrarySerializer(serializers.ModelSerializer):
|
|||
"uuid",
|
||||
"actor",
|
||||
"name",
|
||||
"description",
|
||||
"creation_date",
|
||||
"uploads_count",
|
||||
"privacy_level",
|
||||
|
@ -97,6 +97,30 @@ class LibraryFollowSerializer(serializers.ModelSerializer):
|
|||
return federation_serializers.APIActorSerializer(o.actor).data
|
||||
|
||||
|
||||
class FollowSerializer(serializers.ModelSerializer):
|
||||
target = common_serializers.RelatedField(
|
||||
"fid", federation_serializers.APIActorSerializer(), required=True
|
||||
)
|
||||
actor = serializers.SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
model = models.Follow
|
||||
fields = ["creation_date", "actor", "uuid", "target", "approved"]
|
||||
read_only_fields = ["uuid", "actor", "approved", "creation_date"]
|
||||
|
||||
def validate_target(self, v):
|
||||
request_actor = self.context["actor"]
|
||||
if v == request_actor:
|
||||
raise serializers.ValidationError("You cannot follow yourself")
|
||||
if v.received_follows.filter(actor=request_actor).exists():
|
||||
raise serializers.ValidationError("You are already following this user")
|
||||
return v
|
||||
|
||||
@extend_schema_field(federation_serializers.APIActorSerializer)
|
||||
def get_actor(self, o):
|
||||
return federation_serializers.APIActorSerializer(o.actor).data
|
||||
|
||||
|
||||
def serialize_generic_relation(activity, obj):
|
||||
data = {"type": obj._meta.label}
|
||||
if data["type"] == "federation.Actor":
|
||||
|
@ -106,9 +130,11 @@ def serialize_generic_relation(activity, obj):
|
|||
|
||||
if data["type"] == "music.Library":
|
||||
data["name"] = obj.name
|
||||
if data["type"] == "federation.LibraryFollow":
|
||||
if (
|
||||
data["type"] == "federation.LibraryFollow"
|
||||
or data["type"] == "federation.Follow"
|
||||
):
|
||||
data["approved"] = obj.approved
|
||||
|
||||
return data
|
||||
|
||||
|
||||
|
@ -178,6 +204,17 @@ FETCH_OBJECT_CONFIG = {
|
|||
FETCH_OBJECT_FIELD = common_fields.GenericRelation(FETCH_OBJECT_CONFIG)
|
||||
|
||||
|
||||
def convert_url_to_webginfer(url):
|
||||
parsed_url = urlparse(url)
|
||||
domain = parsed_url.netloc # e.g., "node1.funkwhale.test"
|
||||
path_parts = parsed_url.path.strip("/").split("/")
|
||||
# Ensure the path is in the expected format
|
||||
if len(path_parts) > 0 and path_parts[0].startswith("@"):
|
||||
username = path_parts[0][1:] # Remove the '@'
|
||||
return f"{username}@{domain}"
|
||||
return None
|
||||
|
||||
|
||||
class FetchSerializer(serializers.ModelSerializer):
|
||||
actor = federation_serializers.APIActorSerializer(read_only=True)
|
||||
object = serializers.CharField(write_only=True)
|
||||
|
@ -207,6 +244,10 @@ class FetchSerializer(serializers.ModelSerializer):
|
|||
]
|
||||
|
||||
def validate_object(self, value):
|
||||
if value.startswith("https://"):
|
||||
converted = convert_url_to_webginfer(value)
|
||||
if converted:
|
||||
value = converted
|
||||
# if value is a webginfer lookup, we craft a special url
|
||||
if value.startswith("@"):
|
||||
value = value.lstrip("@")
|
||||
|
|
|
@ -5,6 +5,7 @@ from . import api_views
|
|||
router = routers.OptionalSlashRouter()
|
||||
router.register(r"fetches", api_views.FetchViewSet, "fetches")
|
||||
router.register(r"follows/library", api_views.LibraryFollowViewSet, "library-follows")
|
||||
router.register(r"follows/user", api_views.UserFollowViewSet, "user-follows")
|
||||
router.register(r"inbox", api_views.InboxItemViewSet, "inbox")
|
||||
router.register(r"libraries", api_views.LibraryViewSet, "libraries")
|
||||
router.register(r"domains", api_views.DomainViewSet, "domains")
|
||||
|
|
|
@ -311,3 +311,106 @@ class ActorViewSet(mixins.RetrieveModelMixin, viewsets.GenericViewSet):
|
|||
filter_uploads=lambda o, uploads: uploads.filter(library__actor=o)
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@extend_schema_view(
|
||||
list=extend_schema(operation_id="get_federation_received_follows"),
|
||||
create=extend_schema(operation_id="create_federation_user_follow"),
|
||||
)
|
||||
class UserFollowViewSet(
|
||||
mixins.CreateModelMixin,
|
||||
mixins.ListModelMixin,
|
||||
mixins.RetrieveModelMixin,
|
||||
mixins.DestroyModelMixin,
|
||||
viewsets.GenericViewSet,
|
||||
):
|
||||
lookup_field = "uuid"
|
||||
queryset = (
|
||||
models.Follow.objects.all()
|
||||
.order_by("-creation_date")
|
||||
.select_related("actor", "target")
|
||||
.filter(actor__type="Person")
|
||||
)
|
||||
serializer_class = api_serializers.FollowSerializer
|
||||
permission_classes = [oauth_permissions.ScopePermission]
|
||||
required_scope = "follows"
|
||||
ordering_fields = ("creation_date",)
|
||||
|
||||
@extend_schema(operation_id="get_federation_user_follow")
|
||||
def retrieve(self, request, *args, **kwargs):
|
||||
return super().retrieve(request, *args, **kwargs)
|
||||
|
||||
@extend_schema(operation_id="delete_federation_user_follow")
|
||||
def destroy(self, request, uuid=None):
|
||||
return super().destroy(request, uuid)
|
||||
|
||||
def get_queryset(self):
|
||||
qs = super().get_queryset()
|
||||
return qs.filter(
|
||||
Q(target=self.request.user.actor) | Q(actor=self.request.user.actor)
|
||||
).exclude(approved=False)
|
||||
|
||||
def perform_create(self, serializer):
|
||||
follow = serializer.save(actor=self.request.user.actor)
|
||||
routes.outbox.dispatch({"type": "Follow"}, context={"follow": follow})
|
||||
|
||||
@transaction.atomic
|
||||
def perform_destroy(self, instance):
|
||||
routes.outbox.dispatch(
|
||||
{"type": "Undo", "object": {"type": "Follow"}}, context={"follow": instance}
|
||||
)
|
||||
instance.delete()
|
||||
|
||||
def get_serializer_context(self):
|
||||
context = super().get_serializer_context()
|
||||
context["actor"] = self.request.user.actor
|
||||
return context
|
||||
|
||||
@extend_schema(
|
||||
operation_id="accept_federation_user_follow",
|
||||
responses={404: None, 204: None},
|
||||
)
|
||||
@decorators.action(methods=["post"], detail=True)
|
||||
def accept(self, request, *args, **kwargs):
|
||||
try:
|
||||
follow = self.queryset.get(
|
||||
target=self.request.user.actor, uuid=kwargs["uuid"]
|
||||
)
|
||||
except models.Follow.DoesNotExist:
|
||||
return response.Response({}, status=404)
|
||||
update_follow(follow, approved=True)
|
||||
return response.Response(status=204)
|
||||
|
||||
@extend_schema(operation_id="reject_federation_user_follow")
|
||||
@decorators.action(methods=["post"], detail=True)
|
||||
def reject(self, request, *args, **kwargs):
|
||||
try:
|
||||
follow = self.queryset.get(
|
||||
target=self.request.user.actor, uuid=kwargs["uuid"]
|
||||
)
|
||||
except models.Follow.DoesNotExist:
|
||||
return response.Response({}, status=404)
|
||||
|
||||
update_follow(follow, approved=False)
|
||||
return response.Response(status=204)
|
||||
|
||||
@extend_schema(operation_id="get_all_federation_library_follows")
|
||||
@decorators.action(methods=["get"], detail=False)
|
||||
def all(self, request, *args, **kwargs):
|
||||
"""
|
||||
Return all the subscriptions of the current user, with only limited data
|
||||
to have a performant endpoint and avoid lots of queries just to display
|
||||
subscription status in the UI
|
||||
"""
|
||||
follows = list(
|
||||
self.get_queryset().values_list("uuid", "target__fid", "approved")
|
||||
)
|
||||
|
||||
payload = {
|
||||
"results": [
|
||||
{"uuid": str(u[0]), "actor": str(u[1]), "approved": u[2]}
|
||||
for u in follows
|
||||
],
|
||||
"count": len(follows),
|
||||
}
|
||||
return response.Response(payload, status=200)
|
||||
|
|
|
@ -81,11 +81,12 @@ class SignatureAuthentication(authentication.BaseAuthentication):
|
|||
fetch_delay = 24 * 3600
|
||||
now = timezone.now()
|
||||
last_fetch = actor.domain.nodeinfo_fetch_date
|
||||
if not last_fetch or (
|
||||
last_fetch < (now - datetime.timedelta(seconds=fetch_delay))
|
||||
):
|
||||
tasks.update_domain_nodeinfo(domain_name=actor.domain.name)
|
||||
actor.domain.refresh_from_db()
|
||||
if not actor.domain.is_local:
|
||||
if not last_fetch or (
|
||||
last_fetch < (now - datetime.timedelta(seconds=fetch_delay))
|
||||
):
|
||||
tasks.update_domain_nodeinfo(domain_name=actor.domain.name)
|
||||
actor.domain.refresh_from_db()
|
||||
return actor
|
||||
|
||||
def authenticate(self, request):
|
||||
|
|
|
@ -293,7 +293,10 @@ CONTEXTS = [
|
|||
"Album": "fw:Album",
|
||||
"Track": "fw:Track",
|
||||
"Artist": "fw:Artist",
|
||||
"ArtistCredit": "fw:ArtistCredit",
|
||||
"Library": "fw:Library",
|
||||
"Playlist": "fw:Playlist",
|
||||
"PlaylistTrack": "fw:PlaylistTrack",
|
||||
"bitrate": {"@id": "fw:bitrate", "@type": "xsd:nonNegativeInteger"},
|
||||
"size": {"@id": "fw:size", "@type": "xsd:nonNegativeInteger"},
|
||||
"position": {"@id": "fw:position", "@type": "xsd:nonNegativeInteger"},
|
||||
|
@ -302,13 +305,23 @@ CONTEXTS = [
|
|||
"track": {"@id": "fw:track", "@type": "@id"},
|
||||
"cover": {"@id": "fw:cover", "@type": "as:Link"},
|
||||
"album": {"@id": "fw:album", "@type": "@id"},
|
||||
"artist": {"@id": "fw:artist", "@type": "@id"},
|
||||
"artists": {"@id": "fw:artists", "@type": "@id", "@container": "@list"},
|
||||
"artist_credit": {
|
||||
"@id": "fw:artist_credit",
|
||||
"@type": "@id",
|
||||
"@container": "@list",
|
||||
},
|
||||
"joinphrase": {"@id": "fw:joinphrase", "@type": "xsd:string"},
|
||||
"credit": {"@id": "fw:credit", "@type": "xsd:string"},
|
||||
"index": {"@id": "fw:index", "@type": "xsd:nonNegativeInteger"},
|
||||
"released": {"@id": "fw:released", "@type": "xsd:date"},
|
||||
"musicbrainzId": "fw:musicbrainzId",
|
||||
"license": {"@id": "fw:license", "@type": "@id"},
|
||||
"copyright": "fw:copyright",
|
||||
"category": "schema:category",
|
||||
"language": "schema:inLanguage",
|
||||
"playlist": {"@id": "fw:playlist", "@type": "@id"},
|
||||
}
|
||||
},
|
||||
},
|
||||
|
|
|
@ -128,11 +128,6 @@ class ActorFactory(NoUpdateOnCreate, factory.django.DjangoModelFactory):
|
|||
class Meta:
|
||||
model = models.Actor
|
||||
|
||||
class Params:
|
||||
with_real_keys = factory.Trait(
|
||||
keys=factory.LazyFunction(keys.get_key_pair),
|
||||
)
|
||||
|
||||
@factory.post_generation
|
||||
def local(self, create, extracted, **kwargs):
|
||||
if not extracted and not kwargs:
|
||||
|
@ -153,6 +148,26 @@ class ActorFactory(NoUpdateOnCreate, factory.django.DjangoModelFactory):
|
|||
extracted.actor = self
|
||||
extracted.save(update_fields=["user"])
|
||||
else:
|
||||
user = UserFactory(actor=self, **kwargs)
|
||||
user.actor = self
|
||||
user.save()
|
||||
|
||||
@factory.post_generation
|
||||
def user(self, create, extracted, **kwargs):
|
||||
"""
|
||||
Handle the creation or assignment of the related user instance.
|
||||
If `actor__user` is passed, it will be linked; otherwise, no user is created.
|
||||
"""
|
||||
if not create:
|
||||
return
|
||||
|
||||
if extracted: # If a User instance is provided
|
||||
extracted.actor = self
|
||||
extracted.save(update_fields=["actor"])
|
||||
elif kwargs:
|
||||
from funkwhale_api.users.factories import UserFactory
|
||||
|
||||
# Create a User linked to this Actor
|
||||
self.user = UserFactory(actor=self, **kwargs)
|
||||
|
||||
|
||||
|
@ -170,22 +185,25 @@ class FollowFactory(NoUpdateOnCreate, factory.django.DjangoModelFactory):
|
|||
|
||||
@registry.register
|
||||
class MusicLibraryFactory(NoUpdateOnCreate, factory.django.DjangoModelFactory):
|
||||
uuid = factory.Faker("uuid4")
|
||||
actor = factory.SubFactory(ActorFactory)
|
||||
privacy_level = "me"
|
||||
name = factory.Faker("sentence")
|
||||
description = factory.Faker("sentence")
|
||||
name = privacy_level
|
||||
uploads_count = 0
|
||||
fid = factory.Faker("federation_url")
|
||||
followers_url = factory.LazyAttribute(
|
||||
lambda o: o.fid + "/followers" if o.fid else None
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = "music.Library"
|
||||
|
||||
class Params:
|
||||
local = factory.Trait(
|
||||
fid=None, actor=factory.SubFactory(ActorFactory, local=True)
|
||||
fid=factory.Faker(
|
||||
"federation_url",
|
||||
local=True,
|
||||
prefix="federation/music/libraries",
|
||||
obj_uuid=factory.SelfAttribute("..uuid"),
|
||||
),
|
||||
actor=factory.SubFactory(ActorFactory, local=True),
|
||||
)
|
||||
|
||||
|
||||
|
|
|
@ -191,7 +191,6 @@ def prepare_for_serializer(payload, config, fallbacks={}):
|
|||
value = noop
|
||||
if not aliases:
|
||||
continue
|
||||
|
||||
for a in aliases:
|
||||
try:
|
||||
value = get_value(
|
||||
|
@ -279,7 +278,6 @@ class JsonLdSerializer(serializers.Serializer):
|
|||
for field in dereferenced_fields:
|
||||
for i in get_ids(data[field]):
|
||||
dereferenced_ids.add(i)
|
||||
|
||||
if dereferenced_ids:
|
||||
try:
|
||||
loop = asyncio.get_event_loop()
|
||||
|
|
|
@ -9,7 +9,7 @@ MODELS = [
|
|||
(music_models.Album, ["fid"]),
|
||||
(music_models.Track, ["fid"]),
|
||||
(music_models.Upload, ["fid"]),
|
||||
(music_models.Library, ["fid", "followers_url"]),
|
||||
(music_models.Library, ["fid"]),
|
||||
(
|
||||
federation_models.Actor,
|
||||
[
|
||||
|
|
|
@ -218,7 +218,6 @@ class Actor(models.Model):
|
|||
on_delete=models.SET_NULL,
|
||||
related_name="iconed_actor",
|
||||
)
|
||||
|
||||
objects = ActorQuerySet.as_manager()
|
||||
|
||||
class Meta:
|
||||
|
@ -251,9 +250,15 @@ class Actor(models.Model):
|
|||
follows = self.received_follows.filter(approved=True)
|
||||
return self.followers.filter(pk__in=follows.values_list("actor", flat=True))
|
||||
|
||||
def get_approved_followings(self):
|
||||
follows = self.emitted_follows.filter(approved=True)
|
||||
return Actor.objects.filter(pk__in=follows.values_list("target", flat=True))
|
||||
|
||||
def should_autoapprove_follow(self, actor):
|
||||
if self.get_channel():
|
||||
return True
|
||||
if self.user.privacy_level == "public":
|
||||
return True
|
||||
return False
|
||||
|
||||
def get_user(self):
|
||||
|
|
|
@ -3,7 +3,10 @@ import uuid
|
|||
|
||||
from django.db.models import Q
|
||||
|
||||
from funkwhale_api.favorites import models as favorites_models
|
||||
from funkwhale_api.history import models as history_models
|
||||
from funkwhale_api.music import models as music_models
|
||||
from funkwhale_api.playlists import models as playlist_models
|
||||
|
||||
from . import activity, actors, models, serializers
|
||||
|
||||
|
@ -163,7 +166,7 @@ def outbox_follow(context):
|
|||
def outbox_create_audio(context):
|
||||
upload = context["upload"]
|
||||
channel = upload.library.get_channel()
|
||||
followers_target = channel.actor if channel else upload.library
|
||||
followers_target = channel.actor if channel else upload.library.actor
|
||||
actor = channel.actor if channel else upload.library.actor
|
||||
if channel:
|
||||
serializer = serializers.ChannelCreateUploadSerializer(upload)
|
||||
|
@ -293,7 +296,7 @@ def inbox_delete_audio(payload, context):
|
|||
upload_fids = [payload["object"]["id"]]
|
||||
|
||||
query = Q(fid__in=upload_fids) & (
|
||||
Q(library__actor=actor) | Q(track__artist__channel__actor=actor)
|
||||
Q(library__actor=actor) | Q(track__artist_credit__artist__channel__actor=actor)
|
||||
)
|
||||
candidates = music_models.Upload.objects.filter(query)
|
||||
|
||||
|
@ -307,8 +310,8 @@ def outbox_delete_audio(context):
|
|||
uploads = context["uploads"]
|
||||
library = uploads[0].library
|
||||
channel = library.get_channel()
|
||||
followers_target = channel.actor if channel else library
|
||||
actor = channel.actor if channel else library.actor
|
||||
followers_target = channel.actor if channel else actor
|
||||
serializer = serializers.ActivitySerializer(
|
||||
{
|
||||
"type": "Delete",
|
||||
|
@ -577,7 +580,9 @@ def inbox_delete_album(payload, context):
|
|||
logger.debug("Discarding deletion of empty library")
|
||||
return
|
||||
|
||||
query = Q(fid=album_id) & (Q(attributed_to=actor) | Q(artist__channel__actor=actor))
|
||||
query = Q(fid=album_id) & (
|
||||
Q(attributed_to=actor) | Q(artist_credit__artist__channel__actor=actor)
|
||||
)
|
||||
try:
|
||||
album = music_models.Album.objects.get(query)
|
||||
except music_models.Album.DoesNotExist:
|
||||
|
@ -590,9 +595,10 @@ def inbox_delete_album(payload, context):
|
|||
@outbox.register({"type": "Delete", "object.type": "Album"})
|
||||
def outbox_delete_album(context):
|
||||
album = context["album"]
|
||||
album_artist = album.artist_credit.all()[0].artist
|
||||
actor = (
|
||||
album.artist.channel.actor
|
||||
if album.artist.get_channel()
|
||||
album_artist.channel.actor
|
||||
if album_artist.get_channel()
|
||||
else album.attributed_to
|
||||
)
|
||||
actor = actor or actors.get_service_actor()
|
||||
|
@ -608,3 +614,231 @@ def outbox_delete_album(context):
|
|||
to=[activity.PUBLIC_ADDRESS, {"type": "instances_with_followers"}],
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
@outbox.register({"type": "Like", "object.type": "Track"})
|
||||
def outbox_create_track_favorite(context):
|
||||
track = context["track"]
|
||||
actor = context["actor"]
|
||||
|
||||
serializer = serializers.ActivitySerializer(
|
||||
{
|
||||
"type": "Like",
|
||||
"id": context["id"],
|
||||
"object": {"type": "Track", "id": track.fid},
|
||||
}
|
||||
)
|
||||
yield {
|
||||
"type": "Like",
|
||||
"actor": actor,
|
||||
"payload": with_recipients(
|
||||
serializer.data,
|
||||
to=[{"type": "followers", "target": actor}],
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
@outbox.register({"type": "Dislike", "object.type": "Track"})
|
||||
def outbox_delete_favorite(context):
|
||||
favorite = context["favorite"]
|
||||
actor = favorite.actor
|
||||
serializer = serializers.ActivitySerializer(
|
||||
{"type": "Dislike", "object": {"type": "Track", "id": favorite.track.fid}}
|
||||
)
|
||||
yield {
|
||||
"type": "Dislike",
|
||||
"actor": actor,
|
||||
"payload": with_recipients(
|
||||
serializer.data,
|
||||
to=[{"type": "followers", "target": actor}],
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
@inbox.register({"type": "Like", "object.type": "Track"})
|
||||
def inbox_create_favorite(payload, context):
|
||||
serializer = serializers.TrackFavoriteSerializer(data=payload)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
instance = serializer.save()
|
||||
return {"object": instance}
|
||||
|
||||
|
||||
@inbox.register({"type": "Dislike", "object.type": "Track"})
|
||||
def inbox_delete_favorite(payload, context):
|
||||
actor = context["actor"]
|
||||
track_id = payload["object"].get("id")
|
||||
|
||||
query = Q(track__fid=track_id) & Q(actor=actor)
|
||||
try:
|
||||
favorite = favorites_models.TrackFavorite.objects.get(query)
|
||||
except favorites_models.TrackFavorite.DoesNotExist:
|
||||
logger.debug(
|
||||
"Discarding deletion of unkwnown favorite with track : %s", track_id
|
||||
)
|
||||
return
|
||||
favorite.delete()
|
||||
|
||||
|
||||
# to do : test listening routes and broadcast
|
||||
|
||||
|
||||
@outbox.register({"type": "Listen", "object.type": "Track"})
|
||||
def outbox_create_listening(context):
|
||||
track = context["track"]
|
||||
actor = context["actor"]
|
||||
|
||||
serializer = serializers.ActivitySerializer(
|
||||
{
|
||||
"type": "Listen",
|
||||
"id": context["id"],
|
||||
"object": {"type": "Track", "id": track.fid},
|
||||
}
|
||||
)
|
||||
yield {
|
||||
"type": "Listen",
|
||||
"actor": actor,
|
||||
"payload": with_recipients(
|
||||
serializer.data,
|
||||
to=[{"type": "followers", "target": actor}],
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
@outbox.register({"type": "Delete", "object.type": "Listen"})
|
||||
def outbox_delete_listening(context):
|
||||
listening = context["listening"]
|
||||
actor = listening.actor
|
||||
serializer = serializers.ActivitySerializer(
|
||||
{"type": "Delete", "object": {"type": "Listen", "id": listening.fid}}
|
||||
)
|
||||
yield {
|
||||
"type": "Delete",
|
||||
"actor": actor,
|
||||
"payload": with_recipients(
|
||||
serializer.data,
|
||||
to=[{"type": "followers", "target": actor}],
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
@inbox.register({"type": "Listen", "object.type": "Track"})
|
||||
def inbox_create_listening(payload, context):
|
||||
serializer = serializers.ListeningSerializer(data=payload)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
instance = serializer.save()
|
||||
return {"object": instance}
|
||||
|
||||
|
||||
@inbox.register({"type": "Delete", "object.type": "Listen"})
|
||||
def inbox_delete_listening(payload, context):
|
||||
actor = context["actor"]
|
||||
listening_id = payload["object"].get("id")
|
||||
|
||||
query = Q(fid=listening_id) & Q(actor=actor)
|
||||
try:
|
||||
favorite = history_models.Listening.objects.get(query)
|
||||
except history_models.Listening.DoesNotExist:
|
||||
logger.debug("Discarding deletion of unkwnown listening %s", listening_id)
|
||||
return
|
||||
favorite.delete()
|
||||
|
||||
|
||||
@outbox.register({"type": "Create", "object.type": "Playlist"})
|
||||
def outbox_create_playlist(context):
|
||||
playlist = context["playlist"]
|
||||
|
||||
serializer = serializers.ActivitySerializer(
|
||||
{
|
||||
"type": "Create",
|
||||
"actor": playlist.actor,
|
||||
"id": playlist.fid,
|
||||
"object": serializers.PlaylistSerializer(playlist).data,
|
||||
}
|
||||
)
|
||||
yield {
|
||||
"type": "Create",
|
||||
"actor": playlist.actor,
|
||||
"payload": with_recipients(
|
||||
serializer.data,
|
||||
to=[{"type": "followers", "target": playlist.actor}],
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
@outbox.register({"type": "Delete", "object.type": "Playlist"})
|
||||
def outbox_delete_playlist(context):
|
||||
playlist = context["playlist"]
|
||||
actor = playlist.actor
|
||||
serializer = serializers.ActivitySerializer(
|
||||
{"type": "Delete", "object": {"type": "Playlist", "id": playlist.fid}}
|
||||
)
|
||||
yield {
|
||||
"type": "Delete",
|
||||
"actor": actor,
|
||||
"payload": with_recipients(
|
||||
serializer.data,
|
||||
to=[activity.PUBLIC_ADDRESS, {"type": "instances_with_followers"}],
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
@inbox.register({"type": "Create", "object.type": "Playlist"})
|
||||
def inbox_create_playlist(payload, context):
|
||||
serializer = serializers.PlaylistSerializer(data=payload["object"])
|
||||
serializer.is_valid(raise_exception=True)
|
||||
instance = serializer.save()
|
||||
return {"object": instance}
|
||||
|
||||
|
||||
@inbox.register({"type": "Delete", "object.type": "Playlist"})
|
||||
def inbox_delete_playlist(payload, context):
|
||||
actor = context["actor"]
|
||||
playlist_id = payload["object"].get("id")
|
||||
|
||||
query = Q(fid=playlist_id) & Q(actor=actor)
|
||||
try:
|
||||
playlist = playlist_models.Playlist.objects.get(query)
|
||||
except playlist_models.Playlist.DoesNotExist:
|
||||
logger.debug("Discarding deletion of unkwnown listening %s", playlist_id)
|
||||
return
|
||||
playlist.playlist_tracks.all().delete()
|
||||
playlist.delete()
|
||||
|
||||
|
||||
@inbox.register({"type": "Update", "object.type": "Playlist"})
|
||||
def inbox_update_playlist(payload, context):
|
||||
actor = context["actor"]
|
||||
playlist_id = payload["object"].get("id")
|
||||
|
||||
if not actor.playlists.filter(fid=playlist_id).exists():
|
||||
logger.debug("Discarding update of unkwnown playlist_id %s", playlist_id)
|
||||
return
|
||||
|
||||
serializer = serializers.PlaylistSerializer(data=payload["object"])
|
||||
if serializer.is_valid(raise_exception=True):
|
||||
playlist = serializer.save()
|
||||
# we trigger a scan since we use this activity to avoid sending many PlaylistTracks activities
|
||||
playlist.schedule_scan(actors.get_service_actor())
|
||||
return
|
||||
else:
|
||||
logger.debug(
|
||||
"Discarding update of playlist_id %s because of payload errors: %s",
|
||||
playlist_id,
|
||||
serializer.errors,
|
||||
)
|
||||
|
||||
|
||||
@outbox.register({"type": "Update", "object.type": "Playlist"})
|
||||
def outbox_update_playlist(context):
|
||||
playlist = context["playlist"]
|
||||
serializer = serializers.ActivitySerializer(
|
||||
{"type": "Update", "object": serializers.PlaylistSerializer(playlist).data}
|
||||
)
|
||||
yield {
|
||||
"type": "Update",
|
||||
"actor": playlist.actor,
|
||||
"payload": with_recipients(
|
||||
serializer.data,
|
||||
to=[{"type": "followers", "target": playlist.actor}],
|
||||
),
|
||||
}
|
||||
|
|
|
@ -1,27 +1,31 @@
|
|||
import logging
|
||||
import os
|
||||
import re
|
||||
import urllib.parse
|
||||
import uuid
|
||||
|
||||
from django.core.exceptions import ObjectDoesNotExist
|
||||
from django.core.paginator import Paginator
|
||||
from django.db import transaction
|
||||
from django.db.models import Q
|
||||
from django.urls import reverse
|
||||
from django.utils import timezone
|
||||
from rest_framework import serializers
|
||||
|
||||
from funkwhale_api.common import models as common_models
|
||||
from funkwhale_api.common import utils as common_utils
|
||||
from funkwhale_api.favorites import models as favorites_models
|
||||
from funkwhale_api.federation import activity, actors, contexts, jsonld, models, utils
|
||||
from funkwhale_api.history import models as history_models
|
||||
from funkwhale_api.moderation import models as moderation_models
|
||||
from funkwhale_api.moderation import serializers as moderation_serializers
|
||||
from funkwhale_api.moderation import signals as moderation_signals
|
||||
from funkwhale_api.music import licenses
|
||||
from funkwhale_api.music import models as music_models
|
||||
from funkwhale_api.music import tasks as music_tasks
|
||||
from funkwhale_api.playlists import models as playlists_models
|
||||
from funkwhale_api.tags import models as tags_models
|
||||
|
||||
from . import activity, actors, contexts, jsonld, models, utils
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
|
@ -340,9 +344,11 @@ class ActorSerializer(jsonld.JsonLdSerializer):
|
|||
ret["url"] = [
|
||||
{
|
||||
"type": "Link",
|
||||
"href": instance.channel.get_absolute_url()
|
||||
if instance.channel.artist.is_local
|
||||
else instance.get_absolute_url(),
|
||||
"href": (
|
||||
instance.channel.get_absolute_url()
|
||||
if instance.channel.artist.is_local
|
||||
else instance.get_absolute_url()
|
||||
),
|
||||
"mediaType": "text/html",
|
||||
},
|
||||
{
|
||||
|
@ -436,9 +442,11 @@ class ActorSerializer(jsonld.JsonLdSerializer):
|
|||
common_utils.attach_file(
|
||||
actor,
|
||||
"attachment_icon",
|
||||
{"url": new_value["url"], "mimetype": new_value.get("mediaType")}
|
||||
if new_value
|
||||
else None,
|
||||
(
|
||||
{"url": new_value["url"], "mimetype": new_value.get("mediaType")}
|
||||
if new_value
|
||||
else None
|
||||
),
|
||||
)
|
||||
|
||||
rss_url = get_by_media_type(
|
||||
|
@ -491,9 +499,11 @@ def create_or_update_channel(actor, rss_url, attributed_to_fid, **validated_data
|
|||
common_utils.attach_file(
|
||||
artist,
|
||||
"attachment_cover",
|
||||
{"url": new_value["url"], "mimetype": new_value.get("mediaType")}
|
||||
if new_value
|
||||
else None,
|
||||
(
|
||||
{"url": new_value["url"], "mimetype": new_value.get("mediaType")}
|
||||
if new_value
|
||||
else None
|
||||
),
|
||||
)
|
||||
tags = [t["name"] for t in validated_data.get("tags", []) or []]
|
||||
tags_models.set_tags(artist, *tags)
|
||||
|
@ -644,7 +654,6 @@ class FollowSerializer(serializers.Serializer):
|
|||
|
||||
def save(self, **kwargs):
|
||||
target = self.validated_data["object"]
|
||||
|
||||
if target._meta.label == "music.Library":
|
||||
follow_class = models.LibraryFollow
|
||||
else:
|
||||
|
@ -812,7 +821,9 @@ class UndoFollowSerializer(serializers.Serializer):
|
|||
actor=validated_data["actor"], target=target
|
||||
).get()
|
||||
except follow_class.DoesNotExist:
|
||||
raise serializers.ValidationError("No follow to remove")
|
||||
raise serializers.ValidationError(
|
||||
f"No follow to remove follow_class = {follow_class}"
|
||||
)
|
||||
return validated_data
|
||||
|
||||
def to_representation(self, instance):
|
||||
|
@ -879,7 +890,6 @@ class ActivitySerializer(serializers.Serializer):
|
|||
object_serializer = OBJECT_SERIALIZERS[type]
|
||||
except KeyError:
|
||||
raise serializers.ValidationError(f"Unsupported type {type}")
|
||||
|
||||
serializer = object_serializer(data=value)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
return serializer.data
|
||||
|
@ -964,7 +974,7 @@ class PaginatedCollectionSerializer(jsonld.JsonLdSerializer):
|
|||
first = common_utils.set_query_parameter(conf["id"], page=1)
|
||||
current = first
|
||||
last = common_utils.set_query_parameter(conf["id"], page=paginator.num_pages)
|
||||
d = {
|
||||
data = {
|
||||
"id": conf["id"],
|
||||
"attributedTo": conf["actor"].fid,
|
||||
"totalItems": paginator.count,
|
||||
|
@ -973,10 +983,10 @@ class PaginatedCollectionSerializer(jsonld.JsonLdSerializer):
|
|||
"first": first,
|
||||
"last": last,
|
||||
}
|
||||
d.update(get_additional_fields(conf))
|
||||
data.update(get_additional_fields(conf))
|
||||
if self.context.get("include_ap_context", True):
|
||||
d["@context"] = jsonld.get_default_context()
|
||||
return d
|
||||
data["@context"] = jsonld.get_default_context()
|
||||
return data
|
||||
|
||||
|
||||
class LibrarySerializer(PaginatedCollectionSerializer):
|
||||
|
@ -986,8 +996,6 @@ class LibrarySerializer(PaginatedCollectionSerializer):
|
|||
actor = serializers.URLField(max_length=500, required=False)
|
||||
attributedTo = serializers.URLField(max_length=500, required=False)
|
||||
name = serializers.CharField()
|
||||
summary = serializers.CharField(allow_blank=True, allow_null=True, required=False)
|
||||
followers = serializers.URLField(max_length=500)
|
||||
audience = serializers.ChoiceField(
|
||||
choices=["", "./", None, "https://www.w3.org/ns/activitystreams#Public"],
|
||||
required=False,
|
||||
|
@ -1004,9 +1012,7 @@ class LibrarySerializer(PaginatedCollectionSerializer):
|
|||
PAGINATED_COLLECTION_JSONLD_MAPPING,
|
||||
{
|
||||
"name": jsonld.first_val(contexts.AS.name),
|
||||
"summary": jsonld.first_val(contexts.AS.summary),
|
||||
"audience": jsonld.first_id(contexts.AS.audience),
|
||||
"followers": jsonld.first_id(contexts.AS.followers),
|
||||
"actor": jsonld.first_id(contexts.AS.actor),
|
||||
"attributedTo": jsonld.first_id(contexts.AS.attributedTo),
|
||||
},
|
||||
|
@ -1028,7 +1034,6 @@ class LibrarySerializer(PaginatedCollectionSerializer):
|
|||
conf = {
|
||||
"id": library.fid,
|
||||
"name": library.name,
|
||||
"summary": library.description,
|
||||
"page_size": 100,
|
||||
"attributedTo": library.actor,
|
||||
"actor": library.actor,
|
||||
|
@ -1039,7 +1044,6 @@ class LibrarySerializer(PaginatedCollectionSerializer):
|
|||
r["audience"] = (
|
||||
contexts.AS.Public if library.privacy_level == "everyone" else ""
|
||||
)
|
||||
r["followers"] = library.followers_url
|
||||
return r
|
||||
|
||||
def create(self, validated_data):
|
||||
|
@ -1059,8 +1063,6 @@ class LibrarySerializer(PaginatedCollectionSerializer):
|
|||
defaults={
|
||||
"uploads_count": validated_data["totalItems"],
|
||||
"name": validated_data["name"],
|
||||
"description": validated_data.get("summary"),
|
||||
"followers_url": validated_data["followers"],
|
||||
"privacy_level": privacy[validated_data["audience"]],
|
||||
},
|
||||
)
|
||||
|
@ -1221,12 +1223,22 @@ class MusicEntitySerializer(jsonld.JsonLdSerializer):
|
|||
self.updateable_fields, validated_data, instance
|
||||
)
|
||||
updated_fields = self.validate_updated_data(instance, updated_fields)
|
||||
|
||||
set_ac = False
|
||||
if "artist_credit" in updated_fields:
|
||||
artist_credit = updated_fields.pop("artist_credit")
|
||||
set_ac = True
|
||||
|
||||
if creating:
|
||||
instance, created = self.Meta.model.objects.get_or_create(
|
||||
fid=validated_data["id"], defaults=updated_fields
|
||||
)
|
||||
if set_ac:
|
||||
instance.artist_credit.set(artist_credit)
|
||||
else:
|
||||
music_tasks.update_library_entity(instance, updated_fields)
|
||||
obj = music_tasks.update_library_entity(instance, updated_fields)
|
||||
if set_ac:
|
||||
obj.artist_credit.set(artist_credit)
|
||||
|
||||
tags = [t["name"] for t in validated_data.get("tags", []) or []]
|
||||
tags_models.set_tags(instance, *tags)
|
||||
|
@ -1288,7 +1300,6 @@ class ArtistSerializer(MusicEntitySerializer):
|
|||
MUSIC_ENTITY_JSONLD_MAPPING,
|
||||
{
|
||||
"released": jsonld.first_val(contexts.FW.released),
|
||||
"artists": jsonld.first_attr(contexts.FW.artists, "@list"),
|
||||
"image": jsonld.first_obj(contexts.AS.image),
|
||||
},
|
||||
)
|
||||
|
@ -1300,9 +1311,9 @@ class ArtistSerializer(MusicEntitySerializer):
|
|||
"name": instance.name,
|
||||
"published": instance.creation_date.isoformat(),
|
||||
"musicbrainzId": str(instance.mbid) if instance.mbid else None,
|
||||
"attributedTo": instance.attributed_to.fid
|
||||
if instance.attributed_to
|
||||
else None,
|
||||
"attributedTo": (
|
||||
instance.attributed_to.fid if instance.attributed_to else None
|
||||
),
|
||||
"tag": self.get_tags_repr(instance),
|
||||
}
|
||||
include_content(d, instance.description)
|
||||
|
@ -1314,12 +1325,53 @@ class ArtistSerializer(MusicEntitySerializer):
|
|||
create = MusicEntitySerializer.update_or_create
|
||||
|
||||
|
||||
class ArtistCreditSerializer(jsonld.JsonLdSerializer):
|
||||
artist = ArtistSerializer()
|
||||
joinphrase = serializers.CharField(
|
||||
trim_whitespace=False, required=False, allow_null=True, allow_blank=True
|
||||
)
|
||||
credit = serializers.CharField(
|
||||
trim_whitespace=False, required=False, allow_null=True, allow_blank=True
|
||||
)
|
||||
published = serializers.DateTimeField()
|
||||
id = serializers.URLField(max_length=500)
|
||||
|
||||
updateable_fields = [
|
||||
("credit", "credit"),
|
||||
("artist", "artist"),
|
||||
("joinphrase", "joinphrase"),
|
||||
]
|
||||
|
||||
class Meta:
|
||||
model = music_models.ArtistCredit
|
||||
jsonld_mapping = {
|
||||
"artist": jsonld.first_obj(contexts.FW.artist),
|
||||
"credit": jsonld.first_val(contexts.FW.credit),
|
||||
"index": jsonld.first_val(contexts.FW.index),
|
||||
"joinphrase": jsonld.first_val(contexts.FW.joinphrase),
|
||||
"published": jsonld.first_val(contexts.AS.published),
|
||||
}
|
||||
|
||||
def to_representation(self, instance):
|
||||
data = {
|
||||
"type": "ArtistCredit",
|
||||
"id": instance.fid,
|
||||
"artist": ArtistSerializer(
|
||||
instance.artist, context={"include_ap_context": False}
|
||||
).data,
|
||||
"joinphrase": instance.joinphrase,
|
||||
"credit": instance.credit,
|
||||
"index": instance.index,
|
||||
"published": instance.creation_date.isoformat(),
|
||||
}
|
||||
if self.context.get("include_ap_context", self.parent is None):
|
||||
data["@context"] = jsonld.get_default_context()
|
||||
return data
|
||||
|
||||
|
||||
class AlbumSerializer(MusicEntitySerializer):
|
||||
released = serializers.DateField(allow_null=True, required=False)
|
||||
artists = serializers.ListField(
|
||||
child=MultipleSerializer(allowed=[BasicActorSerializer, ArtistSerializer]),
|
||||
min_length=1,
|
||||
)
|
||||
artist_credit = serializers.ListField(child=ArtistCreditSerializer(), min_length=1)
|
||||
image = ImageSerializer(
|
||||
allowed_mimetypes=["image/*"],
|
||||
allow_null=True,
|
||||
|
@ -1332,7 +1384,7 @@ class AlbumSerializer(MusicEntitySerializer):
|
|||
("musicbrainzId", "mbid"),
|
||||
("attributedTo", "attributed_to"),
|
||||
("released", "release_date"),
|
||||
("_artist", "artist"),
|
||||
("artist_credit", "artist_credit"),
|
||||
]
|
||||
|
||||
class Meta:
|
||||
|
@ -1341,62 +1393,60 @@ class AlbumSerializer(MusicEntitySerializer):
|
|||
MUSIC_ENTITY_JSONLD_MAPPING,
|
||||
{
|
||||
"released": jsonld.first_val(contexts.FW.released),
|
||||
"artists": jsonld.first_attr(contexts.FW.artists, "@list"),
|
||||
"artist_credit": jsonld.first_attr(contexts.FW.artist_credit, "@list"),
|
||||
"image": jsonld.first_obj(contexts.AS.image),
|
||||
},
|
||||
)
|
||||
|
||||
def to_representation(self, instance):
|
||||
d = {
|
||||
data = {
|
||||
"type": "Album",
|
||||
"id": instance.fid,
|
||||
"name": instance.title,
|
||||
"published": instance.creation_date.isoformat(),
|
||||
"musicbrainzId": str(instance.mbid) if instance.mbid else None,
|
||||
"released": instance.release_date.isoformat()
|
||||
if instance.release_date
|
||||
else None,
|
||||
"attributedTo": instance.attributed_to.fid
|
||||
if instance.attributed_to
|
||||
else None,
|
||||
"released": (
|
||||
instance.release_date.isoformat() if instance.release_date else None
|
||||
),
|
||||
"attributedTo": (
|
||||
instance.attributed_to.fid if instance.attributed_to else None
|
||||
),
|
||||
"tag": self.get_tags_repr(instance),
|
||||
}
|
||||
if instance.artist.get_channel():
|
||||
d["artists"] = [
|
||||
{
|
||||
"type": instance.artist.channel.actor.type,
|
||||
"id": instance.artist.channel.actor.fid,
|
||||
}
|
||||
]
|
||||
else:
|
||||
d["artists"] = [
|
||||
ArtistSerializer(
|
||||
instance.artist, context={"include_ap_context": False}
|
||||
).data
|
||||
]
|
||||
include_content(d, instance.description)
|
||||
|
||||
data["artist_credit"] = ArtistCreditSerializer(
|
||||
instance.artist_credit.all(),
|
||||
context={"include_ap_context": False},
|
||||
many=True,
|
||||
).data
|
||||
include_content(data, instance.description)
|
||||
if instance.attachment_cover:
|
||||
include_image(d, instance.attachment_cover)
|
||||
include_image(data, instance.attachment_cover)
|
||||
|
||||
if self.context.get("include_ap_context", self.parent is None):
|
||||
d["@context"] = jsonld.get_default_context()
|
||||
return d
|
||||
data["@context"] = jsonld.get_default_context()
|
||||
return data
|
||||
|
||||
def validate(self, data):
|
||||
validated_data = super().validate(data)
|
||||
if not self.parent:
|
||||
artist_data = validated_data["artists"][0]
|
||||
if artist_data.get("type", "Artist") == "Artist":
|
||||
validated_data["_artist"] = utils.retrieve_ap_object(
|
||||
artist_data["id"],
|
||||
actor=self.context.get("fetch_actor"),
|
||||
queryset=music_models.Artist,
|
||||
serializer_class=ArtistSerializer,
|
||||
)
|
||||
artist_credit_data = validated_data["artist_credit"]
|
||||
if artist_credit_data[0]["artist"].get("type", "Artist") == "Artist":
|
||||
acs = []
|
||||
for ac in validated_data["artist_credit"]:
|
||||
acs.append(
|
||||
utils.retrieve_ap_object(
|
||||
ac["id"],
|
||||
actor=self.context.get("fetch_actor"),
|
||||
queryset=music_models.ArtistCredit,
|
||||
serializer_class=ArtistCreditSerializer,
|
||||
)
|
||||
)
|
||||
validated_data["artist_credit"] = acs
|
||||
else:
|
||||
# we have an actor as an artist, so it's a channel
|
||||
actor = actors.get_actor(artist_data["id"])
|
||||
validated_data["_artist"] = actor.channel.artist
|
||||
actor = actors.get_actor(artist_credit_data[0]["artist"]["id"])
|
||||
validated_data["artist_credit"] = [{"artist": actor.channel.artist}]
|
||||
|
||||
return validated_data
|
||||
|
||||
|
@ -1406,7 +1456,7 @@ class AlbumSerializer(MusicEntitySerializer):
|
|||
class TrackSerializer(MusicEntitySerializer):
|
||||
position = serializers.IntegerField(min_value=0, allow_null=True, required=False)
|
||||
disc = serializers.IntegerField(min_value=1, allow_null=True, required=False)
|
||||
artists = serializers.ListField(child=ArtistSerializer(), min_length=1)
|
||||
artist_credit = serializers.ListField(child=ArtistCreditSerializer(), min_length=1)
|
||||
album = AlbumSerializer()
|
||||
license = serializers.URLField(allow_null=True, required=False)
|
||||
copyright = serializers.CharField(allow_null=True, required=False)
|
||||
|
@ -1434,7 +1484,7 @@ class TrackSerializer(MusicEntitySerializer):
|
|||
MUSIC_ENTITY_JSONLD_MAPPING,
|
||||
{
|
||||
"album": jsonld.first_obj(contexts.FW.album),
|
||||
"artists": jsonld.first_attr(contexts.FW.artists, "@list"),
|
||||
"artist_credit": jsonld.first_attr(contexts.FW.artist_credit, "@list"),
|
||||
"copyright": jsonld.first_val(contexts.FW.copyright),
|
||||
"disc": jsonld.first_val(contexts.FW.disc),
|
||||
"license": jsonld.first_id(contexts.FW.license),
|
||||
|
@ -1444,7 +1494,7 @@ class TrackSerializer(MusicEntitySerializer):
|
|||
)
|
||||
|
||||
def to_representation(self, instance):
|
||||
d = {
|
||||
data = {
|
||||
"type": "Track",
|
||||
"id": instance.fid,
|
||||
"name": instance.title,
|
||||
|
@ -1452,29 +1502,32 @@ class TrackSerializer(MusicEntitySerializer):
|
|||
"musicbrainzId": str(instance.mbid) if instance.mbid else None,
|
||||
"position": instance.position,
|
||||
"disc": instance.disc_number,
|
||||
"license": instance.local_license["identifiers"][0]
|
||||
if instance.local_license
|
||||
else None,
|
||||
"license": (
|
||||
instance.local_license["identifiers"][0]
|
||||
if instance.local_license
|
||||
else None
|
||||
),
|
||||
"copyright": instance.copyright if instance.copyright else None,
|
||||
"artists": [
|
||||
ArtistSerializer(
|
||||
instance.artist, context={"include_ap_context": False}
|
||||
).data
|
||||
],
|
||||
"artist_credit": ArtistCreditSerializer(
|
||||
instance.artist_credit.all(),
|
||||
context={"include_ap_context": False},
|
||||
many=True,
|
||||
).data,
|
||||
"album": AlbumSerializer(
|
||||
instance.album, context={"include_ap_context": False}
|
||||
).data,
|
||||
"attributedTo": instance.attributed_to.fid
|
||||
if instance.attributed_to
|
||||
else None,
|
||||
"attributedTo": (
|
||||
instance.attributed_to.fid if instance.attributed_to else None
|
||||
),
|
||||
"tag": self.get_tags_repr(instance),
|
||||
}
|
||||
include_content(d, instance.description)
|
||||
include_image(d, instance.attachment_cover)
|
||||
include_content(data, instance.description)
|
||||
include_image(data, instance.attachment_cover)
|
||||
if self.context.get("include_ap_context", self.parent is None):
|
||||
d["@context"] = jsonld.get_default_context()
|
||||
return d
|
||||
data["@context"] = jsonld.get_default_context()
|
||||
return data
|
||||
|
||||
@transaction.atomic
|
||||
def create(self, validated_data):
|
||||
from funkwhale_api.music import tasks as music_tasks
|
||||
|
||||
|
@ -1490,18 +1543,21 @@ class TrackSerializer(MusicEntitySerializer):
|
|||
validated_data, "album.attributedTo", permissive=True
|
||||
)
|
||||
)
|
||||
artists = (
|
||||
common_utils.recursive_getattr(validated_data, "artists", permissive=True)
|
||||
or []
|
||||
)
|
||||
album_artists = (
|
||||
artist_credit = (
|
||||
common_utils.recursive_getattr(
|
||||
validated_data, "album.artists", permissive=True
|
||||
validated_data, "artist_credit", permissive=True
|
||||
)
|
||||
or []
|
||||
)
|
||||
for artist in artists + album_artists:
|
||||
actors_to_fetch.add(artist.get("attributedTo"))
|
||||
album_artists_credit = (
|
||||
common_utils.recursive_getattr(
|
||||
validated_data, "album.artist_credit", permissive=True
|
||||
)
|
||||
or []
|
||||
)
|
||||
|
||||
for ac in artist_credit + album_artists_credit:
|
||||
actors_to_fetch.add(ac["artist"].get("attributedTo"))
|
||||
|
||||
for url in actors_to_fetch:
|
||||
if not url:
|
||||
|
@ -1514,8 +1570,9 @@ class TrackSerializer(MusicEntitySerializer):
|
|||
from_activity = self.context.get("activity")
|
||||
if from_activity:
|
||||
metadata["from_activity_id"] = from_activity.pk
|
||||
track = music_tasks.get_track_from_import_metadata(metadata, update_cover=True)
|
||||
|
||||
track = music_tasks.get_track_from_import_metadata(
|
||||
metadata, update_cover=True, query_mb=False
|
||||
)
|
||||
return track
|
||||
|
||||
def update(self, obj, validated_data):
|
||||
|
@ -1524,6 +1581,50 @@ class TrackSerializer(MusicEntitySerializer):
|
|||
return super().update(obj, validated_data)
|
||||
|
||||
|
||||
def duration_int_to_xml(duration):
|
||||
if not duration:
|
||||
return None
|
||||
|
||||
multipliers = {"S": 1, "M": 60, "H": 3600, "D": 86400}
|
||||
ret = "P"
|
||||
days, seconds = divmod(int(duration), multipliers["D"])
|
||||
ret += f"{days:d}DT" if days > 0 else "T"
|
||||
hours, seconds = divmod(seconds, multipliers["H"])
|
||||
ret += f"{hours:d}H" if hours > 0 else ""
|
||||
minutes, seconds = divmod(seconds, multipliers["M"])
|
||||
ret += f"{minutes:d}M" if minutes > 0 else ""
|
||||
ret += f"{seconds:d}S" if seconds > 0 or ret == "PT" else ""
|
||||
return ret
|
||||
|
||||
|
||||
class DayTimeDurationSerializer(serializers.DurationField):
|
||||
multipliers = {"S": 1, "M": 60, "H": 3600, "D": 86400}
|
||||
|
||||
def to_internal_value(self, value):
|
||||
if isinstance(value, float):
|
||||
return value
|
||||
|
||||
parsed = re.match(
|
||||
r"P([0-9]+D)?T([0-9]+H)?([0-9]+M)?([0-9]+(?:\.[0-9]+)?S)?", str(value)
|
||||
)
|
||||
if parsed is not None:
|
||||
return int(
|
||||
sum(
|
||||
[
|
||||
self.multipliers[s[-1]] * float("0" + s[:-1])
|
||||
for s in parsed.groups()
|
||||
if s is not None
|
||||
]
|
||||
)
|
||||
)
|
||||
self.fail(
|
||||
"invalid", format="https://www.w3.org/TR/xmlschema11-2/#dayTimeDuration"
|
||||
)
|
||||
|
||||
def to_representation(self, value):
|
||||
duration_int_to_xml(value)
|
||||
|
||||
|
||||
class UploadSerializer(jsonld.JsonLdSerializer):
|
||||
type = serializers.ChoiceField(choices=[contexts.AS.Audio])
|
||||
id = serializers.URLField(max_length=500)
|
||||
|
@ -1533,7 +1634,7 @@ class UploadSerializer(jsonld.JsonLdSerializer):
|
|||
updated = serializers.DateTimeField(required=False, allow_null=True)
|
||||
bitrate = serializers.IntegerField(min_value=0)
|
||||
size = serializers.IntegerField(min_value=0)
|
||||
duration = serializers.IntegerField(min_value=0)
|
||||
duration = DayTimeDurationSerializer(min_value=0)
|
||||
|
||||
track = TrackSerializer(required=True)
|
||||
|
||||
|
@ -1645,7 +1746,7 @@ class UploadSerializer(jsonld.JsonLdSerializer):
|
|||
"published": instance.creation_date.isoformat(),
|
||||
"bitrate": instance.bitrate,
|
||||
"size": instance.size,
|
||||
"duration": instance.duration,
|
||||
"duration": duration_int_to_xml(instance.duration),
|
||||
"url": [
|
||||
{
|
||||
"href": utils.full_url(instance.listen_url_no_download),
|
||||
|
@ -1659,9 +1760,11 @@ class UploadSerializer(jsonld.JsonLdSerializer):
|
|||
},
|
||||
],
|
||||
"track": TrackSerializer(track, context={"include_ap_context": False}).data,
|
||||
"to": contexts.AS.Public
|
||||
if instance.library.privacy_level == "everyone"
|
||||
else "",
|
||||
"to": (
|
||||
contexts.AS.Public
|
||||
if instance.library.privacy_level == "everyone"
|
||||
else ""
|
||||
),
|
||||
"attributedTo": instance.library.actor.fid,
|
||||
}
|
||||
if instance.modification_date:
|
||||
|
@ -1780,7 +1883,7 @@ class ChannelOutboxSerializer(PaginatedCollectionSerializer):
|
|||
"actor": channel.actor,
|
||||
"items": channel.library.uploads.for_federation()
|
||||
.order_by("-creation_date")
|
||||
.filter(track__artist=channel.artist),
|
||||
.filter(track__artist_credit__artist=channel.artist),
|
||||
"type": "OrderedCollection",
|
||||
}
|
||||
r = super().to_representation(conf)
|
||||
|
@ -1793,7 +1896,7 @@ class ChannelUploadSerializer(jsonld.JsonLdSerializer):
|
|||
url = LinkListSerializer(keep_mediatype=["audio/*"], min_length=1)
|
||||
name = serializers.CharField()
|
||||
published = serializers.DateTimeField(required=False)
|
||||
duration = serializers.IntegerField(min_value=0, required=False)
|
||||
duration = DayTimeDurationSerializer(required=False)
|
||||
position = serializers.IntegerField(min_value=0, allow_null=True, required=False)
|
||||
disc = serializers.IntegerField(min_value=1, allow_null=True, required=False)
|
||||
album = serializers.URLField(max_length=500, required=False)
|
||||
|
@ -1850,7 +1953,7 @@ class ChannelUploadSerializer(jsonld.JsonLdSerializer):
|
|||
actor=actors.get_service_actor(),
|
||||
serializer_class=AlbumSerializer,
|
||||
queryset=music_models.Album.objects.filter(
|
||||
artist__channel=self.context["channel"]
|
||||
artist_credit__artist__channel=self.context["channel"]
|
||||
),
|
||||
)
|
||||
|
||||
|
@ -1881,9 +1984,9 @@ class ChannelUploadSerializer(jsonld.JsonLdSerializer):
|
|||
"name": upload.track.title,
|
||||
"attributedTo": upload.library.channel.actor.fid,
|
||||
"published": upload.creation_date.isoformat(),
|
||||
"to": contexts.AS.Public
|
||||
if upload.library.privacy_level == "everyone"
|
||||
else "",
|
||||
"to": (
|
||||
contexts.AS.Public if upload.library.privacy_level == "everyone" else ""
|
||||
),
|
||||
"url": [
|
||||
{
|
||||
"type": "Link",
|
||||
|
@ -1902,7 +2005,7 @@ class ChannelUploadSerializer(jsonld.JsonLdSerializer):
|
|||
if upload.track.local_license:
|
||||
data["license"] = upload.track.local_license["identifiers"][0]
|
||||
|
||||
include_if_not_none(data, upload.duration, "duration")
|
||||
include_if_not_none(data, duration_int_to_xml(upload.duration), "duration")
|
||||
include_if_not_none(data, upload.track.position, "position")
|
||||
include_if_not_none(data, upload.track.disc_number, "disc")
|
||||
include_if_not_none(data, upload.track.copyright, "copyright")
|
||||
|
@ -1929,7 +2032,6 @@ class ChannelUploadSerializer(jsonld.JsonLdSerializer):
|
|||
now = timezone.now()
|
||||
track_defaults = {
|
||||
"fid": validated_data["id"],
|
||||
"artist": channel.artist,
|
||||
"position": validated_data.get("position", 1),
|
||||
"disc_number": validated_data.get("disc", 1),
|
||||
"title": validated_data["name"],
|
||||
|
@ -1942,17 +2044,42 @@ class ChannelUploadSerializer(jsonld.JsonLdSerializer):
|
|||
track_defaults["license"] = licenses.match(validated_data["license"])
|
||||
|
||||
track, created = music_models.Track.objects.update_or_create(
|
||||
artist__channel=channel, fid=validated_data["id"], defaults=track_defaults
|
||||
fid=validated_data["id"],
|
||||
defaults=track_defaults,
|
||||
)
|
||||
|
||||
# only one artist_credit per channel
|
||||
query = (
|
||||
Q(
|
||||
artist=channel.artist,
|
||||
)
|
||||
& Q(credit__iexact=channel.artist.name)
|
||||
& Q(joinphrase="")
|
||||
)
|
||||
defaults = {
|
||||
"artist": channel.artist,
|
||||
"joinphrase": "",
|
||||
"credit": channel.artist.name,
|
||||
}
|
||||
|
||||
ac_obj = music_tasks.get_best_candidate_or_create(
|
||||
music_models.ArtistCredit,
|
||||
query,
|
||||
defaults=defaults,
|
||||
sort_fields=["mbid", "fid"],
|
||||
)
|
||||
track.artist_credit.set([ac_obj[0].id])
|
||||
|
||||
if "image" in validated_data:
|
||||
new_value = self.validated_data["image"]
|
||||
common_utils.attach_file(
|
||||
track,
|
||||
"attachment_cover",
|
||||
{"url": new_value["url"], "mimetype": new_value.get("mediaType")}
|
||||
if new_value
|
||||
else None,
|
||||
(
|
||||
{"url": new_value["url"], "mimetype": new_value.get("mediaType")}
|
||||
if new_value
|
||||
else None
|
||||
),
|
||||
)
|
||||
|
||||
common_utils.attach_content(
|
||||
|
@ -2076,3 +2203,254 @@ class IndexSerializer(jsonld.JsonLdSerializer):
|
|||
if self.context.get("include_ap_context", True):
|
||||
d["@context"] = jsonld.get_default_context()
|
||||
return d
|
||||
|
||||
|
||||
class TrackFavoriteSerializer(jsonld.JsonLdSerializer):
|
||||
type = serializers.ChoiceField(choices=[contexts.AS.Like])
|
||||
id = serializers.URLField(max_length=500)
|
||||
object = serializers.URLField(max_length=500)
|
||||
actor = serializers.URLField(max_length=500)
|
||||
|
||||
class Meta:
|
||||
jsonld_mapping = {
|
||||
"object": jsonld.first_id(contexts.AS.object),
|
||||
"actor": jsonld.first_id(contexts.AS.actor),
|
||||
}
|
||||
|
||||
def to_representation(self, favorite):
|
||||
payload = {
|
||||
"type": "Like",
|
||||
"id": favorite.fid,
|
||||
"actor": favorite.actor.fid,
|
||||
"object": favorite.track.fid,
|
||||
}
|
||||
if self.context.get("include_ap_context", True):
|
||||
payload["@context"] = jsonld.get_default_context()
|
||||
return payload
|
||||
|
||||
def create(self, validated_data):
|
||||
actor = actors.get_actor(validated_data["actor"])
|
||||
track = utils.retrieve_ap_object(
|
||||
validated_data["object"],
|
||||
actor=actors.get_service_actor(),
|
||||
serializer_class=TrackSerializer,
|
||||
)
|
||||
return favorites_models.TrackFavorite.objects.create(
|
||||
fid=validated_data.get("id"),
|
||||
uuid=uuid.uuid4(),
|
||||
actor=actor,
|
||||
track=track,
|
||||
)
|
||||
|
||||
|
||||
class ListeningSerializer(jsonld.JsonLdSerializer):
|
||||
type = serializers.ChoiceField(choices=[contexts.AS.Listen])
|
||||
id = serializers.URLField(max_length=500)
|
||||
object = serializers.URLField(max_length=500)
|
||||
actor = serializers.URLField(max_length=500)
|
||||
|
||||
class Meta:
|
||||
jsonld_mapping = {
|
||||
"object": jsonld.first_id(contexts.AS.object),
|
||||
"actor": jsonld.first_id(contexts.AS.actor),
|
||||
}
|
||||
|
||||
def to_representation(self, listening):
|
||||
payload = {
|
||||
"type": "Listen",
|
||||
"id": listening.fid,
|
||||
"actor": listening.actor.fid,
|
||||
"object": listening.track.fid,
|
||||
}
|
||||
if self.context.get("include_ap_context", True):
|
||||
payload["@context"] = jsonld.get_default_context()
|
||||
return payload
|
||||
|
||||
def create(self, validated_data):
|
||||
actor = actors.get_actor(validated_data["actor"])
|
||||
track = utils.retrieve_ap_object(
|
||||
validated_data["object"],
|
||||
actor=actors.get_service_actor(),
|
||||
serializer_class=TrackSerializer,
|
||||
)
|
||||
return history_models.Listening.objects.create(
|
||||
fid=validated_data.get("id"),
|
||||
uuid=validated_data["id"].rstrip("/").split("/")[-1],
|
||||
actor=actor,
|
||||
track=track,
|
||||
)
|
||||
|
||||
|
||||
class PlaylistTrackSerializer(jsonld.JsonLdSerializer):
|
||||
type = serializers.ChoiceField(choices=[contexts.FW.PlaylistTrack])
|
||||
id = serializers.URLField(max_length=500)
|
||||
track = serializers.URLField(max_length=500)
|
||||
index = serializers.IntegerField()
|
||||
creation_date = serializers.DateTimeField()
|
||||
playlist = serializers.URLField(max_length=500, required=False)
|
||||
|
||||
class Meta:
|
||||
model = playlists_models.PlaylistTrack
|
||||
jsonld_mapping = {
|
||||
"track": jsonld.first_id(contexts.FW.track),
|
||||
"playlist": jsonld.first_id(contexts.FW.playlist),
|
||||
"index": jsonld.first_val(contexts.FW.index),
|
||||
"creation_date": jsonld.first_val(contexts.AS.published),
|
||||
}
|
||||
|
||||
def to_representation(self, plt):
|
||||
payload = {
|
||||
"type": "PlaylistTrack",
|
||||
"id": plt.fid,
|
||||
"track": plt.track.fid,
|
||||
"index": plt.index,
|
||||
"attributedTo": plt.playlist.actor.fid,
|
||||
"published": plt.creation_date.isoformat(),
|
||||
}
|
||||
if self.context.get("include_ap_context", True):
|
||||
payload["@context"] = jsonld.get_default_context()
|
||||
|
||||
if self.context.get("include_playlist", True):
|
||||
payload["playlist"] = plt.playlist.fid
|
||||
return payload
|
||||
|
||||
def create(self, validated_data):
|
||||
track = utils.retrieve_ap_object(
|
||||
validated_data["track"],
|
||||
actor=self.context.get("fetch_actor"),
|
||||
queryset=music_models.Track,
|
||||
serializer_class=TrackSerializer,
|
||||
)
|
||||
playlist = utils.retrieve_ap_object(
|
||||
validated_data["playlist"],
|
||||
actor=self.context.get("fetch_actor"),
|
||||
queryset=playlists_models.Playlist,
|
||||
serializer_class=PlaylistTrackSerializer,
|
||||
)
|
||||
|
||||
defaults = {
|
||||
"track": track,
|
||||
"index": validated_data["index"],
|
||||
"creation_date": validated_data["creation_date"],
|
||||
"playlist": playlist,
|
||||
}
|
||||
|
||||
plt, created = playlists_models.PlaylistTrack.objects.update_or_create(
|
||||
defaults,
|
||||
**{
|
||||
"uuid": validated_data["id"].rstrip("/").split("/")[-1],
|
||||
"fid": validated_data["id"],
|
||||
},
|
||||
)
|
||||
|
||||
return plt
|
||||
|
||||
|
||||
class PlaylistSerializer(jsonld.JsonLdSerializer):
|
||||
"""
|
||||
Used for playlist activities
|
||||
"""
|
||||
|
||||
type = serializers.ChoiceField(choices=[contexts.FW.Playlist, contexts.AS.Create])
|
||||
id = serializers.URLField(max_length=500)
|
||||
uuid = serializers.UUIDField(required=False)
|
||||
name = serializers.CharField(required=False)
|
||||
attributedTo = serializers.URLField(max_length=500, required=False)
|
||||
published = serializers.DateTimeField(required=False)
|
||||
updated = serializers.DateTimeField(required=False)
|
||||
audience = serializers.ChoiceField(
|
||||
choices=[None, "https://www.w3.org/ns/activitystreams#Public"],
|
||||
required=False,
|
||||
allow_null=True,
|
||||
allow_blank=True,
|
||||
)
|
||||
updateable_fields = [
|
||||
("name", "title"),
|
||||
("attributedTo", "attributed_to"),
|
||||
]
|
||||
|
||||
class Meta:
|
||||
model = playlists_models.Playlist
|
||||
jsonld_mapping = common_utils.concat_dicts(
|
||||
MUSIC_ENTITY_JSONLD_MAPPING,
|
||||
{
|
||||
"updated": jsonld.first_val(contexts.AS.published),
|
||||
"audience": jsonld.first_id(contexts.AS.audience),
|
||||
"attributedTo": jsonld.first_id(contexts.AS.attributedTo),
|
||||
},
|
||||
)
|
||||
|
||||
def to_representation(self, playlist):
|
||||
payload = {
|
||||
"type": "Playlist",
|
||||
"id": playlist.fid,
|
||||
"name": playlist.name,
|
||||
"attributedTo": playlist.actor.fid,
|
||||
"published": playlist.creation_date.isoformat(),
|
||||
"audience": playlist.privacy_level,
|
||||
}
|
||||
payload["audience"] = (
|
||||
contexts.AS.Public if playlist.privacy_level == "everyone" else ""
|
||||
)
|
||||
if playlist.modification_date:
|
||||
payload["updated"] = playlist.modification_date.isoformat()
|
||||
if self.context.get("include_ap_context", True):
|
||||
payload["@context"] = jsonld.get_default_context()
|
||||
return payload
|
||||
|
||||
def create(self, validated_data):
|
||||
actor = utils.retrieve_ap_object(
|
||||
validated_data["attributedTo"],
|
||||
actor=self.context.get("fetch_actor"),
|
||||
queryset=models.Actor,
|
||||
serializer_class=ActorSerializer,
|
||||
)
|
||||
ap_to_fw_data = {
|
||||
"actor": actor,
|
||||
"name": validated_data["name"],
|
||||
"creation_date": validated_data["published"],
|
||||
"privacy_level": validated_data["audience"],
|
||||
}
|
||||
playlist, created = playlists_models.Playlist.objects.update_or_create(
|
||||
defaults=ap_to_fw_data,
|
||||
**{
|
||||
"fid": validated_data["id"],
|
||||
"uuid": validated_data.get(
|
||||
"uuid", validated_data["id"].rstrip("/").split("/")[-1]
|
||||
),
|
||||
},
|
||||
)
|
||||
return playlist
|
||||
|
||||
def validate(self, data):
|
||||
validated_data = super().validate(data)
|
||||
if validated_data["audience"] not in [
|
||||
"https://www.w3.org/ns/activitystreams#Public",
|
||||
"everyone",
|
||||
]:
|
||||
raise serializers.ValidationError("Privacy_level must be everyone")
|
||||
|
||||
validated_data["audience"] = "everyone"
|
||||
return validated_data
|
||||
|
||||
|
||||
class PlaylistCollectionSerializer(PaginatedCollectionSerializer):
|
||||
"""
|
||||
Used for the federation view.
|
||||
"""
|
||||
|
||||
type = serializers.ChoiceField(choices=[contexts.FW.Playlist])
|
||||
|
||||
def to_representation(self, playlist):
|
||||
conf = {
|
||||
"id": playlist.fid,
|
||||
"name": playlist.name,
|
||||
"page_size": 100,
|
||||
"actor": playlist.actor,
|
||||
"items": playlist.playlist_tracks.order_by("index").prefetch_related(
|
||||
"tracks",
|
||||
),
|
||||
"type": "Playlist",
|
||||
}
|
||||
r = super().to_representation(conf)
|
||||
return r
|
||||
|
|
|
@ -30,7 +30,7 @@ def verify_date(raw_date):
|
|||
ts = parse_http_date(raw_date)
|
||||
except ValueError as e:
|
||||
raise forms.ValidationError(str(e))
|
||||
dt = datetime.datetime.utcfromtimestamp(ts)
|
||||
dt = datetime.datetime.fromtimestamp(ts, datetime.timezone.utc)
|
||||
dt = dt.replace(tzinfo=ZoneInfo("UTC"))
|
||||
delta = datetime.timedelta(seconds=DATE_HEADER_VALID_FOR)
|
||||
now = timezone.now()
|
||||
|
|
|
@ -5,6 +5,7 @@ import os
|
|||
|
||||
import requests
|
||||
from django.conf import settings
|
||||
from django.core.cache import cache
|
||||
from django.db import transaction
|
||||
from django.db.models import F, Q
|
||||
from django.db.models.deletion import Collector
|
||||
|
@ -18,6 +19,7 @@ from funkwhale_api.common import preferences, session
|
|||
from funkwhale_api.common import utils as common_utils
|
||||
from funkwhale_api.moderation import mrf
|
||||
from funkwhale_api.music import models as music_models
|
||||
from funkwhale_api.playlists import models as playlists_models
|
||||
from funkwhale_api.taskapp import celery
|
||||
|
||||
from . import (
|
||||
|
@ -665,3 +667,14 @@ def check_single_remote_instance_availability(domain):
|
|||
domain.reachable = False
|
||||
domain.save()
|
||||
return domain.reachable
|
||||
|
||||
|
||||
@celery.app.task(name="federation.trigger_playlist_ap_update")
|
||||
def trigger_playlist_ap_update(playlist):
|
||||
for playlist_uuid in cache.get("playlists_for_ap_update"):
|
||||
routes.outbox.dispatch(
|
||||
{"type": "Update", "object": {"type": "Playlist"}},
|
||||
context={
|
||||
"playlist": playlists_models.Playlist.objects.get(uuid=playlist_uuid)
|
||||
},
|
||||
)
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
from django.conf.urls import include, url
|
||||
from django.conf.urls import include
|
||||
from django.urls import re_path
|
||||
from rest_framework import routers
|
||||
|
||||
from . import views
|
||||
|
@ -16,13 +17,18 @@ router.register(r".well-known", views.WellKnownViewSet, "well-known")
|
|||
music_router.register(r"libraries", views.MusicLibraryViewSet, "libraries")
|
||||
music_router.register(r"uploads", views.MusicUploadViewSet, "uploads")
|
||||
music_router.register(r"artists", views.MusicArtistViewSet, "artists")
|
||||
music_router.register(r"artistcredit", views.MusicArtistCreditViewSet, "artistcredit")
|
||||
music_router.register(r"albums", views.MusicAlbumViewSet, "albums")
|
||||
music_router.register(r"tracks", views.MusicTrackViewSet, "tracks")
|
||||
|
||||
music_router.register(r"likes", views.TrackFavoriteViewSet, "likes")
|
||||
music_router.register(r"listenings", views.ListeningsViewSet, "listenings")
|
||||
music_router.register(r"playlists", views.PlaylistViewSet, "playlists")
|
||||
|
||||
index_router.register(r"index", views.IndexViewSet, "index")
|
||||
|
||||
urlpatterns = router.urls + [
|
||||
url("federation/music/", include((music_router.urls, "music"), namespace="music")),
|
||||
url("federation/", include((index_router.urls, "index"), namespace="index")),
|
||||
re_path(
|
||||
"federation/music/", include((music_router.urls, "music"), namespace="music")
|
||||
),
|
||||
re_path("federation/", include((index_router.urls, "index"), namespace="index")),
|
||||
]
|
||||
|
|
|
@ -7,12 +7,16 @@ from django.urls import reverse
|
|||
from rest_framework import exceptions, mixins, permissions, response, viewsets
|
||||
from rest_framework.decorators import action
|
||||
|
||||
from funkwhale_api.common import permissions as common_permissions
|
||||
from funkwhale_api.common import preferences
|
||||
from funkwhale_api.common import utils as common_utils
|
||||
from funkwhale_api.favorites import models as favorites_models
|
||||
from funkwhale_api.federation import utils as federation_utils
|
||||
from funkwhale_api.history import models as history_models
|
||||
from funkwhale_api.moderation import models as moderation_models
|
||||
from funkwhale_api.music import models as music_models
|
||||
from funkwhale_api.music import utils as music_utils
|
||||
from funkwhale_api.playlists import models as playlists_models
|
||||
|
||||
from . import (
|
||||
activity,
|
||||
|
@ -161,7 +165,9 @@ class ActorViewSet(FederationMixin, mixins.RetrieveModelMixin, viewsets.GenericV
|
|||
"actor": channel.actor,
|
||||
"items": channel.library.uploads.for_federation()
|
||||
.order_by("-creation_date")
|
||||
.prefetch_related("library__channel__actor", "track__artist"),
|
||||
.prefetch_related(
|
||||
"library__channel__actor", "track__artist_credit__artist"
|
||||
),
|
||||
"item_serializer": serializers.ChannelCreateUploadSerializer,
|
||||
}
|
||||
return get_collection_response(
|
||||
|
@ -170,17 +176,115 @@ class ActorViewSet(FederationMixin, mixins.RetrieveModelMixin, viewsets.GenericV
|
|||
collection_serializer=serializers.ChannelOutboxSerializer(channel),
|
||||
)
|
||||
|
||||
@action(methods=["get"], detail=True)
|
||||
@action(
|
||||
methods=["get"],
|
||||
detail=True,
|
||||
permission_classes=[common_permissions.PrivacyLevelPermission],
|
||||
)
|
||||
def followers(self, request, *args, **kwargs):
|
||||
self.get_object()
|
||||
# XXX to implement
|
||||
return response.Response({})
|
||||
actor = self.get_object()
|
||||
followers = list(actor.get_approved_followers())
|
||||
conf = {
|
||||
"id": federation_utils.full_url(
|
||||
reverse(
|
||||
"federation:actors-followers",
|
||||
kwargs={"preferred_username": actor.preferred_username},
|
||||
)
|
||||
),
|
||||
"items": followers,
|
||||
"item_serializer": serializers.ActorSerializer,
|
||||
"page_size": 100,
|
||||
"actor": None,
|
||||
}
|
||||
response = get_collection_response(
|
||||
conf=conf,
|
||||
querystring=request.GET,
|
||||
collection_serializer=serializers.IndexSerializer(conf),
|
||||
)
|
||||
return response
|
||||
|
||||
@action(methods=["get"], detail=True)
|
||||
@action(
|
||||
methods=["get"],
|
||||
detail=True,
|
||||
permission_classes=[common_permissions.PrivacyLevelPermission],
|
||||
)
|
||||
def following(self, request, *args, **kwargs):
|
||||
self.get_object()
|
||||
# XXX to implement
|
||||
return response.Response({})
|
||||
actor = self.get_object()
|
||||
followings = list(
|
||||
actor.emitted_follows.filter(approved=True).values_list("target", flat=True)
|
||||
)
|
||||
conf = {
|
||||
"id": federation_utils.full_url(
|
||||
reverse(
|
||||
"federation:actors-following",
|
||||
kwargs={"preferred_username": actor.preferred_username},
|
||||
)
|
||||
),
|
||||
"items": followings,
|
||||
"item_serializer": serializers.ActorSerializer,
|
||||
"page_size": 100,
|
||||
"actor": None,
|
||||
}
|
||||
response = get_collection_response(
|
||||
conf=conf,
|
||||
querystring=request.GET,
|
||||
collection_serializer=serializers.IndexSerializer(conf),
|
||||
)
|
||||
return response
|
||||
|
||||
@action(
|
||||
methods=["get"],
|
||||
detail=True,
|
||||
permission_classes=[common_permissions.PrivacyLevelPermission],
|
||||
)
|
||||
def listens(self, request, *args, **kwargs):
|
||||
actor = self.get_object()
|
||||
listenings = history_models.Listening.objects.filter(actor=actor)
|
||||
conf = {
|
||||
"id": federation_utils.full_url(
|
||||
reverse(
|
||||
"federation:actors-listens",
|
||||
kwargs={"preferred_username": actor.preferred_username},
|
||||
)
|
||||
),
|
||||
"items": listenings,
|
||||
"item_serializer": serializers.ListeningSerializer,
|
||||
"page_size": 100,
|
||||
"actor": None,
|
||||
}
|
||||
response = get_collection_response(
|
||||
conf=conf,
|
||||
querystring=request.GET,
|
||||
collection_serializer=serializers.IndexSerializer(conf),
|
||||
)
|
||||
return response
|
||||
|
||||
@action(
|
||||
methods=["get"],
|
||||
detail=True,
|
||||
permission_classes=[common_permissions.PrivacyLevelPermission],
|
||||
)
|
||||
def likes(self, request, *args, **kwargs):
|
||||
actor = self.get_object()
|
||||
likes = favorites_models.TrackFavorite.objects.filter(actor=actor)
|
||||
conf = {
|
||||
"id": federation_utils.full_url(
|
||||
reverse(
|
||||
"federation:actors-likes",
|
||||
kwargs={"preferred_username": actor.preferred_username},
|
||||
)
|
||||
),
|
||||
"items": likes,
|
||||
"item_serializer": serializers.TrackFavoriteSerializer,
|
||||
"page_size": 100,
|
||||
"actor": None,
|
||||
}
|
||||
response = get_collection_response(
|
||||
conf=conf,
|
||||
querystring=request.GET,
|
||||
collection_serializer=serializers.IndexSerializer(conf),
|
||||
)
|
||||
return response
|
||||
|
||||
|
||||
class EditViewSet(FederationMixin, mixins.RetrieveModelMixin, viewsets.GenericViewSet):
|
||||
|
@ -283,28 +387,27 @@ class MusicLibraryViewSet(
|
|||
"id": lb.get_federation_id(),
|
||||
"actor": lb.actor,
|
||||
"name": lb.name,
|
||||
"summary": lb.description,
|
||||
"items": lb.uploads.for_federation()
|
||||
.order_by("-creation_date")
|
||||
.prefetch_related(
|
||||
Prefetch(
|
||||
"track",
|
||||
queryset=music_models.Track.objects.select_related(
|
||||
"album__artist__attributed_to",
|
||||
"artist__attributed_to",
|
||||
"artist__attachment_cover",
|
||||
"attachment_cover",
|
||||
"album__attributed_to",
|
||||
"attributed_to",
|
||||
"album__attachment_cover",
|
||||
"album__artist__attachment_cover",
|
||||
"description",
|
||||
).prefetch_related(
|
||||
"album__artist_credit__artist__attributed_to",
|
||||
"artist_credit__artist__attributed_to",
|
||||
"artist_credit__artist__attachment_cover",
|
||||
"tagged_items__tag",
|
||||
"album__tagged_items__tag",
|
||||
"album__artist__tagged_items__tag",
|
||||
"artist__tagged_items__tag",
|
||||
"artist__description",
|
||||
"album__artist_credit__artist__tagged_items__tag",
|
||||
"album__artist_credit__artist__attachment_cover",
|
||||
"artist_credit__artist__tagged_items__tag",
|
||||
"artist_credit__artist__description",
|
||||
"album__description",
|
||||
),
|
||||
)
|
||||
|
@ -331,15 +434,20 @@ class MusicUploadViewSet(
|
|||
):
|
||||
authentication_classes = [authentication.SignatureAuthentication]
|
||||
renderer_classes = renderers.get_ap_renderers()
|
||||
queryset = music_models.Upload.objects.local().select_related(
|
||||
"library__actor",
|
||||
"track__artist",
|
||||
"track__album__artist",
|
||||
"track__description",
|
||||
"track__album__attachment_cover",
|
||||
"track__album__artist__attachment_cover",
|
||||
"track__artist__attachment_cover",
|
||||
"track__attachment_cover",
|
||||
queryset = (
|
||||
music_models.Upload.objects.local()
|
||||
.select_related(
|
||||
"library__actor",
|
||||
"track__description",
|
||||
"track__album__attachment_cover",
|
||||
"track__attachment_cover",
|
||||
)
|
||||
.prefetch_related(
|
||||
"track__artist_credit__artist",
|
||||
"track__album__artist_credit__artist",
|
||||
"track__album__artist_credit__artist__attachment_cover",
|
||||
"track__artist_credit__artist__attachment_cover",
|
||||
)
|
||||
)
|
||||
serializer_class = serializers.UploadSerializer
|
||||
lookup_field = "uuid"
|
||||
|
@ -393,13 +501,35 @@ class MusicArtistViewSet(
|
|||
return response.Response(serializer.data)
|
||||
|
||||
|
||||
class MusicArtistCreditViewSet(
|
||||
FederationMixin, mixins.RetrieveModelMixin, viewsets.GenericViewSet
|
||||
):
|
||||
authentication_classes = [authentication.SignatureAuthentication]
|
||||
renderer_classes = renderers.get_ap_renderers()
|
||||
queryset = music_models.ArtistCredit.objects.local().prefetch_related("artist")
|
||||
serializer_class = serializers.ArtistCreditSerializer
|
||||
lookup_field = "uuid"
|
||||
|
||||
def retrieve(self, request, *args, **kwargs):
|
||||
instance = self.get_object()
|
||||
serializer = self.get_serializer(instance)
|
||||
return response.Response(serializer.data)
|
||||
|
||||
|
||||
class MusicAlbumViewSet(
|
||||
FederationMixin, mixins.RetrieveModelMixin, viewsets.GenericViewSet
|
||||
):
|
||||
authentication_classes = [authentication.SignatureAuthentication]
|
||||
renderer_classes = renderers.get_ap_renderers()
|
||||
queryset = music_models.Album.objects.local().select_related(
|
||||
"artist__description", "description", "artist__attachment_cover"
|
||||
queryset = (
|
||||
music_models.Album.objects.local()
|
||||
.prefetch_related(
|
||||
"artist_credit__artist__description",
|
||||
"artist_credit__artist__attachment_cover",
|
||||
)
|
||||
.select_related(
|
||||
"description",
|
||||
)
|
||||
)
|
||||
serializer_class = serializers.AlbumSerializer
|
||||
lookup_field = "uuid"
|
||||
|
@ -418,16 +548,22 @@ class MusicTrackViewSet(
|
|||
):
|
||||
authentication_classes = [authentication.SignatureAuthentication]
|
||||
renderer_classes = renderers.get_ap_renderers()
|
||||
queryset = music_models.Track.objects.local().select_related(
|
||||
"album__artist",
|
||||
"album__description",
|
||||
"artist__description",
|
||||
"description",
|
||||
"attachment_cover",
|
||||
"album__artist__attachment_cover",
|
||||
"album__attachment_cover",
|
||||
"artist__attachment_cover",
|
||||
queryset = (
|
||||
music_models.Track.objects.local()
|
||||
.select_related(
|
||||
"album__description",
|
||||
"description",
|
||||
"attachment_cover",
|
||||
"album__attachment_cover",
|
||||
)
|
||||
.prefetch_related(
|
||||
"album__artist_credit__artist",
|
||||
"artist_credit__artist__description",
|
||||
"artist_credit__artist__attachment_cover",
|
||||
"album__artist_credit__artist__attachment_cover",
|
||||
)
|
||||
)
|
||||
|
||||
serializer_class = serializers.TrackSerializer
|
||||
lookup_field = "uuid"
|
||||
|
||||
|
@ -527,3 +663,74 @@ class IndexViewSet(FederationMixin, viewsets.GenericViewSet):
|
|||
)
|
||||
|
||||
return response.Response({}, status=200)
|
||||
|
||||
|
||||
class TrackFavoriteViewSet(
|
||||
FederationMixin, mixins.RetrieveModelMixin, viewsets.GenericViewSet
|
||||
):
|
||||
authentication_classes = [authentication.SignatureAuthentication]
|
||||
permission_classes = [common_permissions.PrivacyLevelPermission]
|
||||
renderer_classes = renderers.get_ap_renderers()
|
||||
queryset = favorites_models.TrackFavorite.objects.local().select_related(
|
||||
"track", "actor"
|
||||
)
|
||||
serializer_class = serializers.TrackFavoriteSerializer
|
||||
lookup_field = "uuid"
|
||||
|
||||
def retrieve(self, request, *args, **kwargs):
|
||||
instance = self.get_object()
|
||||
if utils.should_redirect_ap_to_html(request.headers.get("accept")):
|
||||
return redirect_to_html(instance.get_absolute_url())
|
||||
|
||||
serializer = self.get_serializer(instance)
|
||||
return response.Response(serializer.data)
|
||||
|
||||
|
||||
class ListeningsViewSet(
|
||||
FederationMixin, mixins.RetrieveModelMixin, viewsets.GenericViewSet
|
||||
):
|
||||
authentication_classes = [authentication.SignatureAuthentication]
|
||||
permission_classes = [common_permissions.PrivacyLevelPermission]
|
||||
renderer_classes = renderers.get_ap_renderers()
|
||||
queryset = history_models.Listening.objects.local().select_related("track", "actor")
|
||||
serializer_class = serializers.ListeningSerializer
|
||||
lookup_field = "uuid"
|
||||
|
||||
def retrieve(self, request, *args, **kwargs):
|
||||
instance = self.get_object()
|
||||
if utils.should_redirect_ap_to_html(request.headers.get("accept")):
|
||||
return redirect_to_html(instance.get_absolute_url())
|
||||
|
||||
serializer = self.get_serializer(instance)
|
||||
return response.Response(serializer.data)
|
||||
|
||||
|
||||
class PlaylistViewSet(
|
||||
FederationMixin, mixins.RetrieveModelMixin, viewsets.GenericViewSet
|
||||
):
|
||||
authentication_classes = [authentication.SignatureAuthentication]
|
||||
permission_classes = [common_permissions.PrivacyLevelPermission]
|
||||
renderer_classes = renderers.get_ap_renderers()
|
||||
queryset = playlists_models.Playlist.objects.local().select_related("actor")
|
||||
serializer_class = serializers.PlaylistCollectionSerializer
|
||||
lookup_field = "uuid"
|
||||
|
||||
def retrieve(self, request, *args, **kwargs):
|
||||
playlist = self.get_object()
|
||||
if utils.should_redirect_ap_to_html(request.headers.get("accept")):
|
||||
return redirect_to_html(playlist.get_absolute_url())
|
||||
|
||||
conf = {
|
||||
"id": playlist.fid,
|
||||
"actor": playlist.actor,
|
||||
"name": playlist.name,
|
||||
"items": playlist.playlist_tracks.order_by("index").prefetch_related(
|
||||
"track",
|
||||
),
|
||||
"item_serializer": serializers.PlaylistTrackSerializer,
|
||||
}
|
||||
return get_collection_response(
|
||||
conf=conf,
|
||||
querystring=request.GET,
|
||||
collection_serializer=serializers.PlaylistCollectionSerializer(playlist),
|
||||
)
|
||||
|
|
|
@ -8,7 +8,7 @@ record.registry.register_serializer(serializers.ListeningActivitySerializer)
|
|||
|
||||
@record.registry.register_consumer("history.Listening")
|
||||
def broadcast_listening_to_instance_activity(data, obj):
|
||||
if obj.user.privacy_level not in ["instance", "everyone"]:
|
||||
if obj.actor.user.privacy_level not in ["instance", "everyone"]:
|
||||
return
|
||||
|
||||
channels.group_send(
|
||||
|
|
|
@ -5,6 +5,6 @@ from . import models
|
|||
|
||||
@admin.register(models.Listening)
|
||||
class ListeningAdmin(admin.ModelAdmin):
|
||||
list_display = ["track", "creation_date", "user", "session_key"]
|
||||
search_fields = ["track__name", "user__username"]
|
||||
list_select_related = ["user", "track"]
|
||||
list_display = ["track", "creation_date", "actor", "session_key"]
|
||||
search_fields = ["track__name", "actor__user__username"]
|
||||
list_select_related = ["actor", "track"]
|
||||
|
|
|
@ -1,14 +1,28 @@
|
|||
import factory
|
||||
from django.conf import settings
|
||||
|
||||
from funkwhale_api.factories import NoUpdateOnCreate, registry
|
||||
from funkwhale_api.federation import models
|
||||
from funkwhale_api.federation.factories import ActorFactory
|
||||
from funkwhale_api.music import factories
|
||||
from funkwhale_api.users.factories import UserFactory
|
||||
|
||||
|
||||
@registry.register
|
||||
class ListeningFactory(NoUpdateOnCreate, factory.django.DjangoModelFactory):
|
||||
user = factory.SubFactory(UserFactory)
|
||||
actor = factory.SubFactory(ActorFactory)
|
||||
track = factory.SubFactory(factories.TrackFactory)
|
||||
fid = factory.Faker("federation_url")
|
||||
uuid = factory.Faker("uuid4")
|
||||
|
||||
class Meta:
|
||||
model = "history.Listening"
|
||||
|
||||
@factory.post_generation
|
||||
def local(self, create, extracted, **kwargs):
|
||||
if not extracted and not kwargs:
|
||||
return
|
||||
domain = models.Domain.objects.get_or_create(name=settings.FEDERATION_HOSTNAME)[
|
||||
0
|
||||
]
|
||||
self.fid = f"https://{domain}/federation/music/favorite/{self.uuid}"
|
||||
self.save(update_fields=["fid"])
|
||||
|
|
|
@ -7,9 +7,9 @@ from . import models
|
|||
|
||||
|
||||
class ListeningFilter(moderation_filters.HiddenContentFilterSet):
|
||||
username = django_filters.CharFilter("user__username")
|
||||
domain = django_filters.CharFilter("user__actor__domain_id")
|
||||
scope = common_filters.ActorScopeFilter(actor_field="user__actor", distinct=True)
|
||||
username = django_filters.CharFilter("actor__user__username")
|
||||
domain = django_filters.CharFilter("actor__domain_id")
|
||||
scope = common_filters.ActorScopeFilter(actor_field="actor", distinct=True)
|
||||
|
||||
class Meta:
|
||||
model = models.Listening
|
||||
|
|
|
@ -0,0 +1,18 @@
|
|||
# Generated by Django 3.2.20 on 2023-12-09 14:23
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('history', '0002_auto_20180325_1433'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='listening',
|
||||
name='source',
|
||||
field=models.CharField(blank=True, max_length=100, null=True),
|
||||
),
|
||||
]
|
|
@ -0,0 +1,107 @@
|
|||
import uuid
|
||||
from django.db import migrations, models
|
||||
from django.urls import reverse
|
||||
|
||||
from funkwhale_api.federation import utils
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
def get_user_actor(apps, schema_editor):
|
||||
MyModel = apps.get_model("history", "Listening")
|
||||
for row in MyModel.objects.all():
|
||||
actor = row.user.actor
|
||||
row.actor = actor
|
||||
row.save(update_fields=["actor"])
|
||||
|
||||
|
||||
def gen_uuid(apps, schema_editor):
|
||||
MyModel = apps.get_model("history", "Listening")
|
||||
for row in MyModel.objects.all():
|
||||
unique_uuid = uuid.uuid4()
|
||||
while MyModel.objects.filter(uuid=unique_uuid).exists():
|
||||
unique_uuid = uuid.uuid4()
|
||||
|
||||
fid = utils.full_url(
|
||||
reverse("federation:music:listenings-detail", kwargs={"uuid": unique_uuid})
|
||||
)
|
||||
row.uuid = unique_uuid
|
||||
row.fid = fid
|
||||
row.save(update_fields=["uuid", "fid"])
|
||||
|
||||
|
||||
def get_user_actor(apps, schema_editor):
|
||||
MyModel = apps.get_model("history", "Listening")
|
||||
for row in MyModel.objects.all():
|
||||
actor = row.user.actor
|
||||
row.actor = actor
|
||||
row.save(update_fields=["actor"])
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("history", "0003_listening_source"),
|
||||
("federation", "0028_auto_20221027_1141"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="listening",
|
||||
name="actor",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="listenings",
|
||||
to="federation.actor",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="listening",
|
||||
name="fid",
|
||||
field=models.URLField(
|
||||
max_length=500,
|
||||
null=True,
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="listening",
|
||||
name="url",
|
||||
field=models.URLField(blank=True, max_length=500, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="listening",
|
||||
name="uuid",
|
||||
field=models.UUIDField(default=uuid.uuid4, null=True),
|
||||
),
|
||||
migrations.RunPython(gen_uuid, reverse_code=migrations.RunPython.noop),
|
||||
migrations.AlterField(
|
||||
model_name="listening",
|
||||
name="uuid",
|
||||
field=models.UUIDField(default=uuid.uuid4, unique=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="listening",
|
||||
name="fid",
|
||||
field=models.URLField(
|
||||
unique=True,
|
||||
db_index=True,
|
||||
max_length=500,
|
||||
),
|
||||
),
|
||||
migrations.RunPython(get_user_actor, reverse_code=migrations.RunPython.noop),
|
||||
migrations.RemoveField(
|
||||
model_name="listening",
|
||||
name="user",
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="listening",
|
||||
name="actor",
|
||||
field=models.ForeignKey(
|
||||
blank=False,
|
||||
null=False,
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="listenings",
|
||||
to="federation.actor",
|
||||
),
|
||||
),
|
||||
]
|
|
@ -1,25 +1,59 @@
|
|||
import uuid
|
||||
|
||||
from django.db import models
|
||||
from django.urls import reverse
|
||||
from django.utils import timezone
|
||||
|
||||
from funkwhale_api.common import models as common_models
|
||||
from funkwhale_api.federation import models as federation_models
|
||||
from funkwhale_api.federation import utils as federation_utils
|
||||
from funkwhale_api.music.models import Track
|
||||
|
||||
|
||||
class Listening(models.Model):
|
||||
class ListeningQuerySet(models.QuerySet, common_models.LocalFromFidQuerySet):
|
||||
pass
|
||||
|
||||
|
||||
class Listening(federation_models.FederationMixin):
|
||||
uuid = models.UUIDField(default=uuid.uuid4, unique=True)
|
||||
creation_date = models.DateTimeField(default=timezone.now, null=True, blank=True)
|
||||
track = models.ForeignKey(
|
||||
Track, related_name="listenings", on_delete=models.CASCADE
|
||||
)
|
||||
user = models.ForeignKey(
|
||||
"users.User",
|
||||
actor = models.ForeignKey(
|
||||
"federation.Actor",
|
||||
related_name="listenings",
|
||||
null=True,
|
||||
blank=True,
|
||||
on_delete=models.CASCADE,
|
||||
null=False,
|
||||
blank=False,
|
||||
)
|
||||
session_key = models.CharField(max_length=100, null=True, blank=True)
|
||||
source = models.CharField(max_length=100, null=True, blank=True)
|
||||
federation_namespace = "listenings"
|
||||
objects = ListeningQuerySet.as_manager()
|
||||
|
||||
class Meta:
|
||||
ordering = ("-creation_date",)
|
||||
|
||||
def get_activity_url(self):
|
||||
return f"{self.user.get_activity_url()}/listenings/tracks/{self.pk}"
|
||||
return f"{self.actor.get_absolute_url()}/listenings/tracks/{self.pk}"
|
||||
|
||||
def get_absolute_url(self):
|
||||
return f"/library/tracks/{self.track.pk}"
|
||||
|
||||
def get_federation_id(self):
|
||||
if self.fid:
|
||||
return self.fid
|
||||
|
||||
return federation_utils.full_url(
|
||||
reverse(
|
||||
f"federation:music:{self.federation_namespace}-detail",
|
||||
kwargs={"uuid": self.uuid},
|
||||
)
|
||||
)
|
||||
|
||||
def save(self, **kwargs):
|
||||
if not self.pk and not self.fid:
|
||||
self.fid = self.get_federation_id()
|
||||
|
||||
return super().save(**kwargs)
|
||||
|
|
|
@ -1,10 +1,8 @@
|
|||
from drf_spectacular.utils import extend_schema_field
|
||||
from rest_framework import serializers
|
||||
|
||||
from funkwhale_api.activity import serializers as activity_serializers
|
||||
from funkwhale_api.federation import serializers as federation_serializers
|
||||
from funkwhale_api.music.serializers import TrackActivitySerializer, TrackSerializer
|
||||
from funkwhale_api.users.serializers import UserActivitySerializer, UserBasicSerializer
|
||||
|
||||
from . import models
|
||||
|
||||
|
@ -12,47 +10,39 @@ from . import models
|
|||
class ListeningActivitySerializer(activity_serializers.ModelSerializer):
|
||||
type = serializers.SerializerMethodField()
|
||||
object = TrackActivitySerializer(source="track")
|
||||
actor = UserActivitySerializer(source="user")
|
||||
actor = federation_serializers.APIActorSerializer()
|
||||
published = serializers.DateTimeField(source="creation_date")
|
||||
|
||||
class Meta:
|
||||
model = models.Listening
|
||||
fields = ["id", "local_id", "object", "type", "actor", "published"]
|
||||
|
||||
def get_actor(self, obj):
|
||||
return UserActivitySerializer(obj.user).data
|
||||
|
||||
def get_type(self, obj):
|
||||
return "Listen"
|
||||
|
||||
|
||||
class ListeningSerializer(serializers.ModelSerializer):
|
||||
track = TrackSerializer(read_only=True)
|
||||
user = UserBasicSerializer(read_only=True)
|
||||
actor = serializers.SerializerMethodField()
|
||||
actor = federation_serializers.APIActorSerializer(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = models.Listening
|
||||
fields = ("id", "user", "track", "creation_date", "actor")
|
||||
fields = ("id", "actor", "track", "creation_date", "actor")
|
||||
|
||||
def create(self, validated_data):
|
||||
validated_data["user"] = self.context["user"]
|
||||
validated_data["actor"] = self.context["user"].actor
|
||||
|
||||
return super().create(validated_data)
|
||||
|
||||
@extend_schema_field(federation_serializers.APIActorSerializer)
|
||||
def get_actor(self, obj):
|
||||
actor = obj.user.actor
|
||||
if actor:
|
||||
return federation_serializers.APIActorSerializer(actor).data
|
||||
|
||||
|
||||
class ListeningWriteSerializer(serializers.ModelSerializer):
|
||||
actor = federation_serializers.APIActorSerializer(read_only=True, required=False)
|
||||
|
||||
class Meta:
|
||||
model = models.Listening
|
||||
fields = ("id", "user", "track", "creation_date")
|
||||
fields = ("id", "actor", "track", "creation_date")
|
||||
|
||||
def create(self, validated_data):
|
||||
validated_data["user"] = self.context["user"]
|
||||
validated_data["actor"] = self.context["user"].actor
|
||||
|
||||
return super().create(validated_data)
|
||||
|
|
|
@ -4,6 +4,7 @@ from rest_framework import mixins, viewsets
|
|||
from config import plugins
|
||||
from funkwhale_api.activity import record
|
||||
from funkwhale_api.common import fields, permissions
|
||||
from funkwhale_api.federation import routes
|
||||
from funkwhale_api.music import utils as music_utils
|
||||
from funkwhale_api.music.models import Track
|
||||
from funkwhale_api.users.oauth import permissions as oauth_permissions
|
||||
|
@ -18,9 +19,7 @@ class ListeningViewSet(
|
|||
viewsets.GenericViewSet,
|
||||
):
|
||||
serializer_class = serializers.ListeningSerializer
|
||||
queryset = models.Listening.objects.all().select_related(
|
||||
"user__actor__attachment_icon"
|
||||
)
|
||||
queryset = models.Listening.objects.all().select_related("actor__attachment_icon")
|
||||
|
||||
permission_classes = [
|
||||
oauth_permissions.ScopePermission,
|
||||
|
@ -29,6 +28,7 @@ class ListeningViewSet(
|
|||
required_scope = "listenings"
|
||||
anonymous_policy = "setting"
|
||||
owner_checks = ["write"]
|
||||
owner_field = "actor.user"
|
||||
filterset_class = filters.ListeningFilter
|
||||
|
||||
def get_serializer_class(self):
|
||||
|
@ -38,23 +38,40 @@ class ListeningViewSet(
|
|||
|
||||
def perform_create(self, serializer):
|
||||
r = super().perform_create(serializer)
|
||||
instance = serializer.instance
|
||||
plugins.trigger_hook(
|
||||
plugins.LISTENING_CREATED,
|
||||
listening=serializer.instance,
|
||||
listening=instance,
|
||||
confs=plugins.get_confs(self.request.user),
|
||||
)
|
||||
routes.outbox.dispatch(
|
||||
{"type": "Listen", "object": {"type": "Track"}},
|
||||
context={
|
||||
"track": instance.track,
|
||||
"actor": instance.actor,
|
||||
"id": instance.fid,
|
||||
},
|
||||
)
|
||||
record.send(serializer.instance)
|
||||
return r
|
||||
|
||||
def get_queryset(self):
|
||||
queryset = super().get_queryset()
|
||||
queryset = queryset.filter(
|
||||
fields.privacy_level_query(self.request.user, "user__privacy_level")
|
||||
fields.privacy_level_query(
|
||||
self.request.user, "actor__user__privacy_level", "actor__user"
|
||||
)
|
||||
)
|
||||
tracks = Track.objects.with_playable_uploads(
|
||||
music_utils.get_actor_from_request(self.request)
|
||||
).select_related(
|
||||
"artist", "album__artist", "attributed_to", "artist__attachment_cover"
|
||||
tracks = (
|
||||
Track.objects.with_playable_uploads(
|
||||
music_utils.get_actor_from_request(self.request)
|
||||
)
|
||||
.prefetch_related(
|
||||
"artist_credit",
|
||||
"album__artist_credit__artist",
|
||||
"artist_credit__artist__attachment_cover",
|
||||
)
|
||||
.select_related("attributed_to")
|
||||
)
|
||||
return queryset.prefetch_related(Prefetch("track", queryset=tracks))
|
||||
|
||||
|
|
|
@ -37,7 +37,7 @@ def get_content():
|
|||
|
||||
def get_top_music_categories():
|
||||
return (
|
||||
models.Track.objects.filter(artist__content_category="music")
|
||||
models.Track.objects.filter(artist_credit__artist__content_category="music")
|
||||
.exclude(tagged_items__tag_id=None)
|
||||
.values(name=F("tagged_items__tag__name"))
|
||||
.annotate(count=Count("name"))
|
||||
|
@ -47,7 +47,7 @@ def get_top_music_categories():
|
|||
|
||||
def get_top_podcast_categories():
|
||||
return (
|
||||
models.Track.objects.filter(artist__content_category="podcast")
|
||||
models.Track.objects.filter(artist_credit__artist__content_category="podcast")
|
||||
.exclude(tagged_items__tag_id=None)
|
||||
.values(name=F("tagged_items__tag__name"))
|
||||
.annotate(count=Count("name"))
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
from django.conf.urls import url
|
||||
from django.urls import re_path
|
||||
|
||||
from funkwhale_api.common import routers
|
||||
|
||||
|
@ -8,7 +8,7 @@ admin_router = routers.OptionalSlashRouter()
|
|||
admin_router.register(r"admin/settings", views.AdminSettings, "admin-settings")
|
||||
|
||||
urlpatterns = [
|
||||
url(r"^nodeinfo/2.0/?$", views.NodeInfo20.as_view(), name="nodeinfo-2.0"),
|
||||
url(r"^settings/?$", views.InstanceSettings.as_view(), name="settings"),
|
||||
url(r"^spa-manifest.json", views.SpaManifest.as_view(), name="spa-manifest"),
|
||||
re_path(r"^nodeinfo/2.0/?$", views.NodeInfo20.as_view(), name="nodeinfo-2.0"),
|
||||
re_path(r"^settings/?$", views.InstanceSettings.as_view(), name="settings"),
|
||||
re_path(r"^spa-manifest.json", views.SpaManifest.as_view(), name="spa-manifest"),
|
||||
] + admin_router.urls
|
||||
|
|
|
@ -1,7 +1,14 @@
|
|||
from django.conf.urls import url
|
||||
from django.urls import re_path
|
||||
|
||||
from funkwhale_api.common import routers
|
||||
|
||||
from . import views
|
||||
|
||||
admin_router = routers.OptionalSlashRouter()
|
||||
admin_router.register(r"admin/settings", views.AdminSettings, "admin-settings")
|
||||
|
||||
urlpatterns = [
|
||||
url(r"^nodeinfo/2.1/?$", views.NodeInfo21.as_view(), name="nodeinfo-2.1"),
|
||||
]
|
||||
re_path(r"^nodeinfo/2.1/?$", views.NodeInfo21.as_view(), name="nodeinfo-2.1"),
|
||||
re_path(r"^settings/?$", views.InstanceSettings.as_view(), name="settings"),
|
||||
re_path(r"^spa-manifest.json", views.SpaManifest.as_view(), name="spa-manifest"),
|
||||
] + admin_router.urls
|
||||
|
|
|
@ -171,6 +171,9 @@ class NodeInfo21(NodeInfo20):
|
|||
if pref.get("federation__enabled"):
|
||||
data["features"].append("federation")
|
||||
|
||||
if pref.get("music__only_allow_musicbrainz_tagged_files"):
|
||||
data["features"].append("onlyMbidTaggedContent")
|
||||
|
||||
serializer = self.serializer_class(data)
|
||||
return Response(
|
||||
serializer.data, status=200, content_type=NODEINFO_2_CONTENT_TYPE
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
import django_filters
|
||||
from django import forms
|
||||
from django.db.models import Q
|
||||
from django.db.models.functions import Collate
|
||||
from django_filters import rest_framework as filters
|
||||
|
||||
from funkwhale_api.audio import models as audio_models
|
||||
|
@ -96,12 +97,15 @@ class ManageAlbumFilterSet(filters.FilterSet):
|
|||
search_fields={
|
||||
"title": {"to": "title"},
|
||||
"fid": {"to": "fid"},
|
||||
"artist": {"to": "artist__name"},
|
||||
"artist": {"to": "artist_credit__artist__name"},
|
||||
"mbid": {"to": "mbid"},
|
||||
},
|
||||
filter_fields={
|
||||
"uuid": {"to": "uuid"},
|
||||
"artist_id": {"to": "artist_id", "field": forms.IntegerField()},
|
||||
"artist_id": {
|
||||
"to": "artist_credit__artist_id",
|
||||
"field": forms.IntegerField(),
|
||||
},
|
||||
"domain": {
|
||||
"handler": lambda v: federation_utils.get_domain_query_from_url(v)
|
||||
},
|
||||
|
@ -117,7 +121,7 @@ class ManageAlbumFilterSet(filters.FilterSet):
|
|||
|
||||
class Meta:
|
||||
model = music_models.Album
|
||||
fields = ["title", "mbid", "fid", "artist"]
|
||||
fields = ["title", "mbid", "fid", "artist_credit"]
|
||||
|
||||
|
||||
class ManageTrackFilterSet(filters.FilterSet):
|
||||
|
@ -127,9 +131,9 @@ class ManageTrackFilterSet(filters.FilterSet):
|
|||
"title": {"to": "title"},
|
||||
"fid": {"to": "fid"},
|
||||
"mbid": {"to": "mbid"},
|
||||
"artist": {"to": "artist__name"},
|
||||
"artist": {"to": "artist_credit__artist__name"},
|
||||
"album": {"to": "album__title"},
|
||||
"album_artist": {"to": "album__artist__name"},
|
||||
"album_artist": {"to": "album__artist_credit__artist__name"},
|
||||
"copyright": {"to": "copyright"},
|
||||
},
|
||||
filter_fields={
|
||||
|
@ -156,7 +160,7 @@ class ManageTrackFilterSet(filters.FilterSet):
|
|||
|
||||
class Meta:
|
||||
model = music_models.Track
|
||||
fields = ["title", "mbid", "fid", "artist", "album", "license"]
|
||||
fields = ["title", "mbid", "fid", "artist_credit", "album", "license"]
|
||||
|
||||
|
||||
class ManageLibraryFilterSet(filters.FilterSet):
|
||||
|
@ -370,6 +374,13 @@ class ManageTagFilterSet(filters.FilterSet):
|
|||
model = tags_models.Tag
|
||||
fields = []
|
||||
|
||||
def get_queryset(self, request):
|
||||
return (
|
||||
super()
|
||||
.get_queryset(request)
|
||||
.annotate(tag_deterministic=Collate("name", "und-x-icu"))
|
||||
)
|
||||
|
||||
|
||||
class ManageReportFilterSet(filters.FilterSet):
|
||||
q = fields.SmartSearchFilter(
|
||||
|
|
|
@ -67,8 +67,8 @@ class ManageUserSerializer(serializers.ModelSerializer):
|
|||
"date_joined",
|
||||
"last_activity",
|
||||
"permissions",
|
||||
"privacy_level",
|
||||
"upload_quota",
|
||||
"privacy_level",
|
||||
"full_username",
|
||||
)
|
||||
read_only_fields = [
|
||||
|
@ -451,17 +451,25 @@ class ManageNestedArtistSerializer(ManageBaseArtistSerializer):
|
|||
pass
|
||||
|
||||
|
||||
class ManageNestedArtistCreditSerializer(ManageBaseArtistSerializer):
|
||||
artist = ManageNestedArtistSerializer()
|
||||
|
||||
class Meta:
|
||||
model = music_models.ArtistCredit
|
||||
fields = ["artist"]
|
||||
|
||||
|
||||
class ManageAlbumSerializer(
|
||||
music_serializers.OptionalDescriptionMixin, ManageBaseAlbumSerializer
|
||||
):
|
||||
attributed_to = ManageBaseActorSerializer()
|
||||
artist = ManageNestedArtistSerializer()
|
||||
artist_credit = ManageNestedArtistCreditSerializer(many=True)
|
||||
tags = serializers.SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
model = music_models.Album
|
||||
fields = ManageBaseAlbumSerializer.Meta.fields + [
|
||||
"artist",
|
||||
"artist_credit",
|
||||
"attributed_to",
|
||||
"tags",
|
||||
"tracks_count",
|
||||
|
@ -477,17 +485,17 @@ class ManageAlbumSerializer(
|
|||
|
||||
|
||||
class ManageTrackAlbumSerializer(ManageBaseAlbumSerializer):
|
||||
artist = ManageNestedArtistSerializer()
|
||||
artist_credit = ManageNestedArtistCreditSerializer(many=True)
|
||||
|
||||
class Meta:
|
||||
model = music_models.Album
|
||||
fields = ManageBaseAlbumSerializer.Meta.fields + ["artist"]
|
||||
fields = ManageBaseAlbumSerializer.Meta.fields + ["artist_credit"]
|
||||
|
||||
|
||||
class ManageTrackSerializer(
|
||||
music_serializers.OptionalDescriptionMixin, ManageNestedTrackSerializer
|
||||
):
|
||||
artist = ManageNestedArtistSerializer()
|
||||
artist_credit = ManageNestedArtistCreditSerializer(many=True)
|
||||
album = ManageTrackAlbumSerializer(allow_null=True)
|
||||
attributed_to = ManageBaseActorSerializer(allow_null=True)
|
||||
uploads_count = serializers.SerializerMethodField()
|
||||
|
@ -497,7 +505,7 @@ class ManageTrackSerializer(
|
|||
class Meta:
|
||||
model = music_models.Track
|
||||
fields = ManageNestedTrackSerializer.Meta.fields + [
|
||||
"artist",
|
||||
"artist_credit",
|
||||
"album",
|
||||
"attributed_to",
|
||||
"uploads_count",
|
||||
|
@ -564,7 +572,6 @@ class ManageLibrarySerializer(serializers.ModelSerializer):
|
|||
domain = serializers.CharField(source="domain_name")
|
||||
actor = ManageBaseActorSerializer()
|
||||
uploads_count = serializers.SerializerMethodField()
|
||||
followers_count = serializers.SerializerMethodField()
|
||||
|
||||
class Meta:
|
||||
model = music_models.Library
|
||||
|
@ -574,14 +581,11 @@ class ManageLibrarySerializer(serializers.ModelSerializer):
|
|||
"fid",
|
||||
"url",
|
||||
"name",
|
||||
"description",
|
||||
"domain",
|
||||
"is_local",
|
||||
"creation_date",
|
||||
"privacy_level",
|
||||
"uploads_count",
|
||||
"followers_count",
|
||||
"followers_url",
|
||||
"actor",
|
||||
]
|
||||
read_only_fields = [
|
||||
|
@ -597,10 +601,6 @@ class ManageLibrarySerializer(serializers.ModelSerializer):
|
|||
def get_uploads_count(self, obj) -> int:
|
||||
return getattr(obj, "_uploads_count", int(obj.uploads_count))
|
||||
|
||||
@extend_schema_field(OpenApiTypes.INT)
|
||||
def get_followers_count(self, obj):
|
||||
return getattr(obj, "followers_count", None)
|
||||
|
||||
|
||||
class ManageNestedLibrarySerializer(serializers.ModelSerializer):
|
||||
domain = serializers.CharField(source="domain_name")
|
||||
|
@ -614,12 +614,10 @@ class ManageNestedLibrarySerializer(serializers.ModelSerializer):
|
|||
"fid",
|
||||
"url",
|
||||
"name",
|
||||
"description",
|
||||
"domain",
|
||||
"is_local",
|
||||
"creation_date",
|
||||
"privacy_level",
|
||||
"followers_url",
|
||||
"actor",
|
||||
]
|
||||
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
from django.conf.urls import include, url
|
||||
from django.conf.urls import include
|
||||
from django.urls import re_path
|
||||
|
||||
from funkwhale_api.common import routers
|
||||
|
||||
|
@ -32,14 +33,16 @@ other_router.register(r"channels", views.ManageChannelViewSet, "channels")
|
|||
other_router.register(r"tags", views.ManageTagViewSet, "tags")
|
||||
|
||||
urlpatterns = [
|
||||
url(
|
||||
re_path(
|
||||
r"^federation/",
|
||||
include((federation_router.urls, "federation"), namespace="federation"),
|
||||
),
|
||||
url(r"^library/", include((library_router.urls, "instance"), namespace="library")),
|
||||
url(
|
||||
re_path(
|
||||
r"^library/", include((library_router.urls, "instance"), namespace="library")
|
||||
),
|
||||
re_path(
|
||||
r"^moderation/",
|
||||
include((moderation_router.urls, "moderation"), namespace="moderation"),
|
||||
),
|
||||
url(r"^users/", include((users_router.urls, "instance"), namespace="users")),
|
||||
re_path(r"^users/", include((users_router.urls, "instance"), namespace="users")),
|
||||
] + other_router.urls
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue