Chore: changes from develop, linter + docs

This commit is contained in:
jon r 2025-04-17 12:25:38 +02:00
parent 9fd8bbd5b6
commit e96692adc0
132 changed files with 31097 additions and 4833 deletions

View File

@ -16,7 +16,7 @@
"ignoreDeps": ["$CI_REGISTRY/funkwhale/backend-test-docker"], "ignoreDeps": ["$CI_REGISTRY/funkwhale/backend-test-docker"],
"packageRules": [ "packageRules": [
{ {
"matchPaths": ["api/*", "front/*", "docs/*"], "matchFileNames": ["api/*", "front/*", "docs/*"],
"additionalBranchPrefix": "{{parentDir}}-", "additionalBranchPrefix": "{{parentDir}}-",
"semanticCommitScope": "{{parentDir}}" "semanticCommitScope": "{{parentDir}}"
}, },
@ -54,20 +54,20 @@
"addLabels": ["Area::Backend"] "addLabels": ["Area::Backend"]
}, },
{ {
"matchPackagePatterns": ["^@vueuse/.*"], "groupName": "vueuse",
"groupName": "vueuse" "matchDepNames": ["/^@vueuse/.*/"]
}, },
{ {
"matchPackageNames": ["channels", "channels-redis", "daphne"], "matchDepNames": ["channels", "channels-redis", "daphne"],
"groupName": "channels" "groupName": "channels"
}, },
{ {
"matchPackageNames": ["node"], "matchDepNames": ["node"],
"allowedVersions": "/\\d+[02468]$/" "allowedVersions": "/\\d+[02468]$/"
}, },
{ {
"matchFiles": ["deploy/docker-compose.yml"], "matchFileNames": ["deploy/docker-compose.yml"],
"matchPackageNames": ["postgres"], "matchDepNames": ["postgres"],
"postUpgradeTasks": { "postUpgradeTasks": {
"commands": [ "commands": [
"echo 'Upgrade Postgres to version {{ newVersion }}. [Make sure to migrate!](https://docs.funkwhale.audio/administrator_documentation/upgrade_docs/docker.html#upgrade-the-postgres-container)' > changes/changelog.d/postgres.update" "echo 'Upgrade Postgres to version {{ newVersion }}. [Make sure to migrate!](https://docs.funkwhale.audio/administrator_documentation/upgrade_docs/docker.html#upgrade-the-postgres-container)' > changes/changelog.d/postgres.update"
@ -76,7 +76,7 @@
} }
}, },
{ {
"matchPackageNames": ["python"], "matchDepNames": ["python"],
"rangeStrategy": "widen" "rangeStrategy": "widen"
} }
] ]

View File

@ -6,7 +6,11 @@ repos:
rev: v4.4.0 rev: v4.4.0
hooks: hooks:
- id: check-added-large-files - id: check-added-large-files
exclude: 'api/funkwhale_api/common/schema.yml' exclude: |
(?x)(
^api/funkwhale_api/common/schema.yml|
^api/tests/music/test_coverart.ogg
)
- id: check-case-conflict - id: check-case-conflict
- id: check-executables-have-shebangs - id: check-executables-have-shebangs

View File

@ -51,7 +51,7 @@ package:
zip -q 'funkwhale-front.zip' -r front && \ zip -q 'funkwhale-front.zip' -r front && \
rm -Rf front rm -Rf front
cd '$(BUILD_DIR)' && \ # cd '$(BUILD_DIR)' && \
cp ../front/tauri/target/release/bundle/appimage/funkwhale_*.AppImage FunkwhaleDesktop.AppImage # cp ../front/tauri/target/release/bundle/appimage/funkwhale_*.AppImage FunkwhaleDesktop.AppImage
cd '$(BUILD_DIR)' && sha256sum * > SHA256SUMS cd '$(BUILD_DIR)' && sha256sum * > SHA256SUMS

View File

@ -1,4 +1,4 @@
FROM alpine:3.19 AS requirements FROM alpine:3.21 AS requirements
RUN set -eux; \ RUN set -eux; \
apk add --no-cache \ apk add --no-cache \
@ -12,7 +12,7 @@ RUN set -eux; \
poetry export --without-hashes --extras typesense > requirements.txt; \ poetry export --without-hashes --extras typesense > requirements.txt; \
poetry export --without-hashes --with dev > dev-requirements.txt; poetry export --without-hashes --with dev > dev-requirements.txt;
FROM alpine:3.19 AS builder FROM alpine:3.21 AS builder
ENV PYTHONDONTWRITEBYTECODE=1 ENV PYTHONDONTWRITEBYTECODE=1
ENV PYTHONUNBUFFERED=1 ENV PYTHONUNBUFFERED=1
@ -37,12 +37,17 @@ RUN set -eux; \
openssl-dev \ openssl-dev \
postgresql-dev \ postgresql-dev \
zlib-dev \ zlib-dev \
py3-cryptography=41.0.7-r0 \ py3-cryptography \
py3-lxml=4.9.3-r1 \ py3-lxml \
py3-pillow=10.3.0-r0 \ py3-pillow \
py3-psycopg2=2.9.9-r0 \ py3-psycopg2 \
py3-watchfiles=0.19.0-r1 \ py3-watchfiles \
python3-dev python3-dev \
gfortran \
libgfortran \
openblas-dev \
py3-scipy \
py3-scikit-learn;
# Create virtual env # Create virtual env
RUN python3 -m venv --system-site-packages /venv RUN python3 -m venv --system-site-packages /venv
@ -53,19 +58,28 @@ COPY --from=requirements /dev-requirements.txt /dev-requirements.txt
RUN --mount=type=cache,target=~/.cache/pip; \ RUN --mount=type=cache,target=~/.cache/pip; \
set -eux; \ set -eux; \
pip3 install --upgrade pip; \ pip3 install --upgrade pip;
pip3 install setuptools wheel; \
RUN --mount=type=cache,target=~/.cache/pip; \
set -eux; \
pip3 install setuptools wheel;
RUN --mount=type=cache,target=~/.cache/pip; \
set -eux; \
# Currently we are unable to relieably build rust-based packages on armv7. This # Currently we are unable to relieably build rust-based packages on armv7. This
# is why we need to use the packages shipped by Alpine Linux. # is why we need to use the packages shipped by Alpine Linux.
# Since poetry does not allow in-place dependency pinning, we need # Since poetry does not allow in-place dependency pinning, we need
# to install the deps using pip. # to install the deps using pip.
grep -Ev 'cryptography|lxml|pillow|psycopg2|watchfiles' /requirements.txt \ grep -Ev 'cryptography|lxml|pillow|psycopg2|watchfiles|scipy|scikit-learn' /requirements.txt \
| pip3 install -r /dev/stdin \ | pip3 install -r /dev/stdin \
cryptography==41.0.7 \ cryptography \
lxml==4.9.3 \ lxml \
pillow==10.2.0 \ pillow \
psycopg2==2.9.9 \ psycopg2 \
watchfiles==0.19.0 watchfiles \
scipy \
scikit-learn;
ARG install_dev_deps=0 ARG install_dev_deps=0
RUN --mount=type=cache,target=~/.cache/pip; \ RUN --mount=type=cache,target=~/.cache/pip; \
@ -73,14 +87,14 @@ RUN --mount=type=cache,target=~/.cache/pip; \
if [ "$install_dev_deps" = "1" ] ; then \ if [ "$install_dev_deps" = "1" ] ; then \
grep -Ev 'cryptography|lxml|pillow|psycopg2|watchfiles' /dev-requirements.txt \ grep -Ev 'cryptography|lxml|pillow|psycopg2|watchfiles' /dev-requirements.txt \
| pip3 install -r /dev/stdin \ | pip3 install -r /dev/stdin \
cryptography==41.0.7 \ cryptography \
lxml==4.9.3 \ lxml \
pillow==10.2.0 \ pillow \
psycopg2==2.9.9 \ psycopg2 \
watchfiles==0.19.0; \ watchfiles; \
fi fi
FROM alpine:3.19 AS production FROM alpine:3.21 AS production
ENV PYTHONDONTWRITEBYTECODE=1 ENV PYTHONDONTWRITEBYTECODE=1
ENV PYTHONUNBUFFERED=1 ENV PYTHONUNBUFFERED=1
@ -97,11 +111,13 @@ RUN set -eux; \
libpq \ libpq \
libxml2 \ libxml2 \
libxslt \ libxslt \
py3-cryptography=41.0.7-r0 \ py3-cryptography \
py3-lxml=4.9.3-r1 \ py3-lxml \
py3-pillow=10.3.0-r0 \ py3-pillow \
py3-psycopg2=2.9.9-r0 \ py3-psycopg2 \
py3-watchfiles=0.19.0-r1 \ py3-watchfiles \
py3-scipy \
py3-scikit-learn \
python3 \ python3 \
tzdata tzdata
@ -111,6 +127,7 @@ ENV PATH="/venv/bin:$PATH"
COPY . /app COPY . /app
WORKDIR /app WORKDIR /app
RUN apk add --no-cache gfortran
RUN --mount=type=cache,target=~/.cache/pip; \ RUN --mount=type=cache,target=~/.cache/pip; \
set -eux; \ set -eux; \
pip3 install --no-deps --editable . pip3 install --no-deps --editable .

View File

@ -1,4 +1,4 @@
FROM python:3.12-slim AS builder FROM python:3.13-slim AS builder
ARG POETRY_VERSION=1.8 ARG POETRY_VERSION=1.8
@ -39,7 +39,7 @@ RUN python3 -m venv --system-site-packages ${VIRTUAL_ENV} && . ${VIRTUAL_ENV}/bi
RUN --mount=type=cache,target=/opt/.cache \ RUN --mount=type=cache,target=/opt/.cache \
poetry install --no-root --extras typesense poetry install --no-root --extras typesense
FROM python:3.12-slim AS runtime FROM python:3.13-slim AS runtime
ARG POETRY_VERSION=1.8 ARG POETRY_VERSION=1.8

View File

@ -299,6 +299,10 @@ def background_task(name):
# HOOKS # HOOKS
TRIGGER_THIRD_PARTY_UPLOAD = "third_party_upload"
"""
Called when a track is being listened
"""
LISTENING_CREATED = "listening_created" LISTENING_CREATED = "listening_created"
""" """
Called when a track is being listened Called when a track is being listened

View File

@ -114,6 +114,7 @@ else:
logger.info("Loaded env file at %s/.env", path) logger.info("Loaded env file at %s/.env", path)
break break
FUNKWHALE_PLUGINS = env("FUNKWHALE_PLUGINS", default="")
FUNKWHALE_PLUGINS_PATH = env( FUNKWHALE_PLUGINS_PATH = env(
"FUNKWHALE_PLUGINS_PATH", default="/srv/funkwhale/plugins/" "FUNKWHALE_PLUGINS_PATH", default="/srv/funkwhale/plugins/"
) )
@ -314,6 +315,7 @@ MIDDLEWARE = (
tuple(plugins.trigger_filter(plugins.MIDDLEWARES_BEFORE, [], enabled=True)) tuple(plugins.trigger_filter(plugins.MIDDLEWARES_BEFORE, [], enabled=True))
+ tuple(ADDITIONAL_MIDDLEWARES_BEFORE) + tuple(ADDITIONAL_MIDDLEWARES_BEFORE)
+ ( + (
"allauth.account.middleware.AccountMiddleware",
"django.middleware.security.SecurityMiddleware", "django.middleware.security.SecurityMiddleware",
"django.middleware.clickjacking.XFrameOptionsMiddleware", "django.middleware.clickjacking.XFrameOptionsMiddleware",
"corsheaders.middleware.CorsMiddleware", "corsheaders.middleware.CorsMiddleware",
@ -1396,6 +1398,7 @@ VERSATILEIMAGEFIELD_RENDITION_KEY_SETS = {
], ],
"attachment_square": [ "attachment_square": [
("original", "url"), ("original", "url"),
("small_square_crop", "crop__50x50"),
("medium_square_crop", "crop__200x200"), ("medium_square_crop", "crop__200x200"),
("large_square_crop", "crop__600x600"), ("large_square_crop", "crop__600x600"),
], ],

View File

@ -152,3 +152,6 @@ REST_FRAMEWORK.update(
], ],
} }
) )
# allows makemigrations and superuser creation
FORCE = env("FORCE", default=1)

View File

@ -1,9 +0,0 @@
import os
os.environ.setdefault("FUNKWHALE_URL", "http://funkwhale.dev")
from .common import * # noqa
DEBUG = True
SECRET_KEY = "a_super_secret_key!"
TYPESENSE_API_KEY = "apikey"

View File

@ -37,7 +37,7 @@ class ChannelFilter(moderation_filters.HiddenContentFilterSet):
# tuple-mapping retains order # tuple-mapping retains order
fields=( fields=(
("creation_date", "creation_date"), ("creation_date", "creation_date"),
("artist_credit__artist__modification_date", "modification_date"), ("artist__modification_date", "modification_date"),
("?", "random"), ("?", "random"),
) )
) )

View File

@ -263,6 +263,7 @@ class ChannelSerializer(serializers.ModelSerializer):
attributed_to = federation_serializers.APIActorSerializer() attributed_to = federation_serializers.APIActorSerializer()
rss_url = serializers.CharField(source="get_rss_url") rss_url = serializers.CharField(source="get_rss_url")
url = serializers.SerializerMethodField() url = serializers.SerializerMethodField()
subscriptions_count = serializers.SerializerMethodField()
class Meta: class Meta:
model = models.Channel model = models.Channel
@ -276,6 +277,7 @@ class ChannelSerializer(serializers.ModelSerializer):
"rss_url", "rss_url",
"url", "url",
"downloads_count", "downloads_count",
"subscriptions_count",
] ]
def to_representation(self, obj): def to_representation(self, obj):
@ -284,6 +286,7 @@ class ChannelSerializer(serializers.ModelSerializer):
data["subscriptions_count"] = self.get_subscriptions_count(obj) data["subscriptions_count"] = self.get_subscriptions_count(obj)
return data return data
@extend_schema_field(OpenApiTypes.INT)
def get_subscriptions_count(self, obj) -> int: def get_subscriptions_count(self, obj) -> int:
return obj.actor.received_follows.exclude(approved=False).count() return obj.actor.received_follows.exclude(approved=False).count()

View File

@ -2,10 +2,12 @@ from django import http
from django.db import transaction from django.db import transaction
from django.db.models import Count, Prefetch, Q, Sum from django.db.models import Count, Prefetch, Q, Sum
from django.utils import timezone from django.utils import timezone
from drf_spectacular.utils import extend_schema, extend_schema_view from drf_spectacular.utils import extend_schema, extend_schema_view, inline_serializer
from rest_framework import decorators, exceptions, mixins from rest_framework import decorators, exceptions, mixins
from rest_framework import permissions as rest_permissions from rest_framework import permissions as rest_permissions
from rest_framework import response, viewsets from rest_framework import response
from rest_framework import serializers as rest_serializers
from rest_framework import viewsets
from funkwhale_api.common import locales, permissions, preferences from funkwhale_api.common import locales, permissions, preferences
from funkwhale_api.common import utils as common_utils from funkwhale_api.common import utils as common_utils
@ -210,6 +212,32 @@ class ChannelViewSet(
data = serializers.rss_serialize_channel_full(channel=object, uploads=uploads) data = serializers.rss_serialize_channel_full(channel=object, uploads=uploads)
return response.Response(data, status=200) return response.Response(data, status=200)
@extend_schema(
responses=inline_serializer(
name="MetedataChoicesSerializer",
fields={
"language": rest_serializers.ListField(
child=inline_serializer(
name="LanguageItem",
fields={
"value": rest_serializers.CharField(),
"label": rest_serializers.CharField(),
},
)
),
"itunes_category": rest_serializers.ListField(
child=inline_serializer(
name="iTunesCategoryItem",
fields={
"value": rest_serializers.CharField(),
"label": rest_serializers.CharField(),
"children": rest_serializers.CharField(),
},
)
),
},
)
)
@decorators.action( @decorators.action(
methods=["get"], methods=["get"],
detail=False, detail=False,

View File

@ -49,6 +49,7 @@ def handler_create_user(
utils.logger.warn("Unknown permission %s", permission) utils.logger.warn("Unknown permission %s", permission)
utils.logger.debug("Creating actor…") utils.logger.debug("Creating actor…")
user.actor = models.create_actor(user) user.actor = models.create_actor(user)
models.create_user_libraries(user)
user.save() user.save()
return user return user

View File

@ -1,5 +1,4 @@
import os from django.conf import settings
from django.contrib.auth.management.commands.createsuperuser import ( from django.contrib.auth.management.commands.createsuperuser import (
Command as BaseCommand, Command as BaseCommand,
) )
@ -12,7 +11,8 @@ class Command(BaseCommand):
Creating Django Superusers would bypass some of our username checks, which can lead to unexpected behaviour. Creating Django Superusers would bypass some of our username checks, which can lead to unexpected behaviour.
We therefore prohibit the execution of the command. We therefore prohibit the execution of the command.
""" """
if not os.environ.get("FORCE") == "1": force = settings.FORCE
if not force == 1:
raise CommandError( raise CommandError(
"Running createsuperuser on your Funkwhale instance bypasses some of our checks " "Running createsuperuser on your Funkwhale instance bypasses some of our checks "
"which can lead to unexpected behavior of your instance. We therefore suggest to " "which can lead to unexpected behavior of your instance. We therefore suggest to "

View File

@ -1,5 +1,4 @@
import os from django.conf import settings
from django.core.management.base import CommandError from django.core.management.base import CommandError
from django.core.management.commands.makemigrations import Command as BaseCommand from django.core.management.commands.makemigrations import Command as BaseCommand
@ -11,8 +10,8 @@ class Command(BaseCommand):
We ensure the command is disabled, unless a specific env var is provided. We ensure the command is disabled, unless a specific env var is provided.
""" """
force = os.environ.get("FORCE") == "1" force = settings.FORCE
if not force: if not force == 1:
raise CommandError( raise CommandError(
"Running makemigrations on your Funkwhale instance can have desastrous" "Running makemigrations on your Funkwhale instance can have desastrous"
" consequences. This command is disabled, and should only be run in " " consequences. This command is disabled, and should only be run in "

View File

@ -257,6 +257,13 @@ class Attachment(models.Model):
proxy_url = reverse("api:v1:attachments-proxy", kwargs={"uuid": self.uuid}) proxy_url = reverse("api:v1:attachments-proxy", kwargs={"uuid": self.uuid})
return federation_utils.full_url(proxy_url + "?next=original") return federation_utils.full_url(proxy_url + "?next=original")
@property
def download_url_small_square_crop(self):
if self.file:
return utils.media_url(self.file.crop["50x50"].url)
proxy_url = reverse("api:v1:attachments-proxy", kwargs={"uuid": self.uuid})
return federation_utils.full_url(proxy_url + "?next=small_square_crop")
@property @property
def download_url_medium_square_crop(self): def download_url_medium_square_crop(self):
if self.file: if self.file:

File diff suppressed because it is too large Load Diff

View File

@ -293,11 +293,22 @@ class AttachmentSerializer(serializers.Serializer):
file = StripExifImageField(write_only=True) file = StripExifImageField(write_only=True)
urls = serializers.SerializerMethodField() urls = serializers.SerializerMethodField()
@extend_schema_field(OpenApiTypes.OBJECT) @extend_schema_field(
{
"type": "object",
"properties": {
"original": {"type": "string"},
"small_square_crop": {"type": "string"},
"medium_square_crop": {"type": "string"},
"large_square_crop": {"type": "string"},
},
}
)
def get_urls(self, o): def get_urls(self, o):
urls = {} urls = {}
urls["source"] = o.url urls["source"] = o.url
urls["original"] = o.download_url_original urls["original"] = o.download_url_original
urls["small_square_crop"] = o.download_url_small_square_crop
urls["medium_square_crop"] = o.download_url_medium_square_crop urls["medium_square_crop"] = o.download_url_medium_square_crop
urls["large_square_crop"] = o.download_url_large_square_crop urls["large_square_crop"] = o.download_url_large_square_crop
return urls return urls

View File

@ -176,7 +176,12 @@ class AttachmentViewSet(
return r return r
size = request.GET.get("next", "original").lower() size = request.GET.get("next", "original").lower()
if size not in ["original", "medium_square_crop", "large_square_crop"]: if size not in [
"original",
"small_square_crop",
"medium_square_crop",
"large_square_crop",
]:
size = "original" size = "original"
try: try:

View File

@ -0,0 +1,13 @@
import logging
from config import plugins
from funkwhale_api.contrib.archivedl import tasks
from .funkwhale_startup import PLUGIN
logger = logging.getLogger(__name__)
@plugins.register_hook(plugins.TRIGGER_THIRD_PARTY_UPLOAD, PLUGIN)
def lauch_download(track, conf={}):
tasks.archive_download.delay(track_id=track.pk, conf=conf)

View File

@ -0,0 +1,10 @@
from config import plugins
PLUGIN = plugins.get_plugin_config(
name="archivedl",
label="Archive-dl",
description="",
version="0.1",
user=False,
conf=[],
)

View File

@ -0,0 +1,148 @@
import asyncio
import hashlib
import logging
import os
import tempfile
import urllib.parse
import requests
from django.core.files import File
from django.utils import timezone
from funkwhale_api.federation import actors
from funkwhale_api.music import models, utils
from funkwhale_api.taskapp import celery
logger = logging.getLogger(__name__)
def create_upload(url, track, files_data):
mimetype = f"audio/{files_data.get('format', 'unknown')}"
duration = files_data.get("mtime", 0)
filesize = files_data.get("size", 0)
bitrate = files_data.get("bitrate", 0)
service_library = models.Library.objects.create(
privacy_level="everyone",
actor=actors.get_service_actor(),
)
return models.Upload.objects.create(
mimetype=mimetype,
source=url,
third_party_provider="archive-dl",
creation_date=timezone.now(),
track=track,
duration=duration,
size=filesize,
bitrate=bitrate,
library=service_library,
from_activity=None,
import_status="finished",
)
@celery.app.task(name="archivedl.archive_download")
@celery.require_instance(models.Track.objects.select_related(), "track")
def archive_download(track, conf):
artist_name = utils.get_artist_credit_string(track)
query = f"mediatype:audio AND title:{track.title} AND creator:{artist_name}"
with requests.Session() as session:
url = get_search_url(query, page_size=1, page=1)
page_data = fetch_json(url, session)
for obj in page_data["response"]["docs"]:
logger.info(f"launching download item for {str(obj)}")
download_item(
item_data=obj,
session=session,
allowed_extensions=utils.SUPPORTED_EXTENSIONS,
track=track,
)
def fetch_json(url, session):
logger.info(f"Fetching {url}...")
with session.get(url) as response:
return response.json()
def download_item(
item_data,
session,
allowed_extensions,
track,
):
files_data = get_files_data(item_data["identifier"], session)
to_download = list(
filter_files(
files_data["result"],
allowed_extensions=allowed_extensions,
)
)
url = f"https://archive.org/download/{item_data['identifier']}/{to_download[0]['name']}"
upload = create_upload(url, track, to_download[0])
try:
with tempfile.TemporaryDirectory() as temp_dir:
path = os.path.join(temp_dir, to_download[0]["name"])
download_file(
path,
url=url,
session=session,
checksum=to_download[0]["sha1"],
upload=upload,
to_download=to_download,
)
logger.info(f"Finished to download item {item_data['identifier']}...")
except Exception as e:
upload.delete()
raise e
def check_integrity(path, expected_checksum):
with open(path, mode="rb") as f:
hash = hashlib.sha1()
hash.update(f.read())
return expected_checksum == hash.hexdigest()
def get_files_data(identifier, session):
url = f"https://archive.org/metadata/{identifier}/files"
logger.info(f"Fetching files data at {url}...")
with session.get(url) as response:
return response.json()
def download_file(path, url, session, checksum, upload, to_download):
if os.path.exists(path) and check_integrity(path, checksum):
logger.info(f"Skipping already downloaded file at {path}")
return
logger.info(f"Downloading file {url}...")
with open(path, mode="wb") as f:
try:
with session.get(url) as response:
f.write(response.content)
except asyncio.TimeoutError as e:
logger.error(f"Timeout error while downloading {url}: {e}")
with open(path, "rb") as f:
upload.audio_file.save(f"{to_download['name']}", File(f))
upload.import_status = "finished"
upload.url = url
upload.save()
return upload
def filter_files(files, allowed_extensions):
for f in files:
if allowed_extensions:
extension = os.path.splitext(f["name"])[-1][1:]
if extension not in allowed_extensions:
continue
yield f
def get_search_url(query, page_size, page):
q = urllib.parse.urlencode({"q": query})
return f"https://archive.org/advancedsearch.php?{q}&sort[]=addeddate+desc&rows={page_size}&page={page}&output=json"

View File

@ -57,7 +57,7 @@ def import_listenbrainz_listenings(user, user_name, since):
new_ts = max( new_ts = max(
listens, listens,
key=lambda obj: datetime.datetime.fromtimestamp( key=lambda obj: datetime.datetime.fromtimestamp(
obj.listened_at, timezone.utc obj.listened_at, datetime.timezone.utc
), ),
) )
response = client.get_listens(username=user_name, min_ts=new_ts, count=100) response = client.get_listens(username=user_name, min_ts=new_ts, count=100)
@ -74,7 +74,7 @@ def add_lb_listenings_to_db(listens, user):
== "Funkwhale ListenBrainz plugin" == "Funkwhale ListenBrainz plugin"
and history_models.Listening.objects.filter( and history_models.Listening.objects.filter(
creation_date=datetime.datetime.fromtimestamp( creation_date=datetime.datetime.fromtimestamp(
listen.listened_at, timezone.utc listen.listened_at, datetime.timezone.utc
) )
).exists() ).exists()
): ):
@ -103,7 +103,7 @@ def add_lb_listenings_to_db(listens, user):
user = user user = user
fw_listen = history_models.Listening( fw_listen = history_models.Listening(
creation_date=datetime.datetime.fromtimestamp( creation_date=datetime.datetime.fromtimestamp(
listen.listened_at, timezone.utc listen.listened_at, datetime.timezone.utc
), ),
track=track, track=track,
actor=user.actor, actor=user.actor,
@ -125,7 +125,7 @@ def import_listenbrainz_favorites(user, user_name, since):
last_sync = min( last_sync = min(
response["feedback"], response["feedback"],
key=lambda obj: datetime.datetime.fromtimestamp( key=lambda obj: datetime.datetime.fromtimestamp(
obj["created"], timezone.utc obj["created"], datetime.timezone.utc
), ),
)["created"] )["created"]
add_lb_feedback_to_db(response["feedback"], user) add_lb_feedback_to_db(response["feedback"], user)
@ -149,7 +149,7 @@ def add_lb_feedback_to_db(feedbacks, user):
favorites_models.TrackFavorite.objects.get_or_create( favorites_models.TrackFavorite.objects.get_or_create(
actor=user.actor, actor=user.actor,
creation_date=datetime.datetime.fromtimestamp( creation_date=datetime.datetime.fromtimestamp(
feedback["created"], timezone.utc feedback["created"], datetime.timezone.utc
), ),
track=track, track=track,
source="Listenbrainz", source="Listenbrainz",

View File

@ -627,12 +627,6 @@ def get_actors_from_audience(urls):
final_query, Q(pk__in=actor_follows.values_list("actor", flat=True)) final_query, Q(pk__in=actor_follows.values_list("actor", flat=True))
) )
library_follows = models.LibraryFollow.objects.filter(
queries["followed"], approved=True
)
final_query = funkwhale_utils.join_queries_or(
final_query, Q(pk__in=library_follows.values_list("actor", flat=True))
)
if not final_query: if not final_query:
return models.Actor.objects.none() return models.Actor.objects.none()
return models.Actor.objects.filter(final_query) return models.Actor.objects.filter(final_query)

View File

@ -56,7 +56,6 @@ class LibrarySerializer(serializers.ModelSerializer):
"uuid", "uuid",
"actor", "actor",
"name", "name",
"description",
"creation_date", "creation_date",
"uploads_count", "uploads_count",
"privacy_level", "privacy_level",

View File

@ -81,11 +81,12 @@ class SignatureAuthentication(authentication.BaseAuthentication):
fetch_delay = 24 * 3600 fetch_delay = 24 * 3600
now = timezone.now() now = timezone.now()
last_fetch = actor.domain.nodeinfo_fetch_date last_fetch = actor.domain.nodeinfo_fetch_date
if not last_fetch or ( if not actor.domain.is_local:
last_fetch < (now - datetime.timedelta(seconds=fetch_delay)) if not last_fetch or (
): last_fetch < (now - datetime.timedelta(seconds=fetch_delay))
tasks.update_domain_nodeinfo(domain_name=actor.domain.name) ):
actor.domain.refresh_from_db() tasks.update_domain_nodeinfo(domain_name=actor.domain.name)
actor.domain.refresh_from_db()
return actor return actor
def authenticate(self, request): def authenticate(self, request):

View File

@ -128,11 +128,6 @@ class ActorFactory(NoUpdateOnCreate, factory.django.DjangoModelFactory):
class Meta: class Meta:
model = models.Actor model = models.Actor
class Params:
with_real_keys = factory.Trait(
keys=factory.LazyFunction(keys.get_key_pair),
)
@factory.post_generation @factory.post_generation
def local(self, create, extracted, **kwargs): def local(self, create, extracted, **kwargs):
if not extracted and not kwargs: if not extracted and not kwargs:
@ -153,6 +148,26 @@ class ActorFactory(NoUpdateOnCreate, factory.django.DjangoModelFactory):
extracted.actor = self extracted.actor = self
extracted.save(update_fields=["user"]) extracted.save(update_fields=["user"])
else: else:
user = UserFactory(actor=self, **kwargs)
user.actor = self
user.save()
@factory.post_generation
def user(self, create, extracted, **kwargs):
"""
Handle the creation or assignment of the related user instance.
If `actor__user` is passed, it will be linked; otherwise, no user is created.
"""
if not create:
return
if extracted: # If a User instance is provided
extracted.actor = self
extracted.save(update_fields=["actor"])
elif kwargs:
from funkwhale_api.users.factories import UserFactory
# Create a User linked to this Actor
self.user = UserFactory(actor=self, **kwargs) self.user = UserFactory(actor=self, **kwargs)
@ -173,13 +188,9 @@ class MusicLibraryFactory(NoUpdateOnCreate, factory.django.DjangoModelFactory):
uuid = factory.Faker("uuid4") uuid = factory.Faker("uuid4")
actor = factory.SubFactory(ActorFactory) actor = factory.SubFactory(ActorFactory)
privacy_level = "me" privacy_level = "me"
name = factory.Faker("sentence") name = privacy_level
description = factory.Faker("sentence")
uploads_count = 0 uploads_count = 0
fid = factory.Faker("federation_url") fid = factory.Faker("federation_url")
followers_url = factory.LazyAttribute(
lambda o: o.fid + "/followers" if o.fid else None
)
class Meta: class Meta:
model = "music.Library" model = "music.Library"

View File

@ -9,7 +9,7 @@ MODELS = [
(music_models.Album, ["fid"]), (music_models.Album, ["fid"]),
(music_models.Track, ["fid"]), (music_models.Track, ["fid"]),
(music_models.Upload, ["fid"]), (music_models.Upload, ["fid"]),
(music_models.Library, ["fid", "followers_url"]), (music_models.Library, ["fid"]),
( (
federation_models.Actor, federation_models.Actor,
[ [

View File

@ -166,7 +166,7 @@ def outbox_follow(context):
def outbox_create_audio(context): def outbox_create_audio(context):
upload = context["upload"] upload = context["upload"]
channel = upload.library.get_channel() channel = upload.library.get_channel()
followers_target = channel.actor if channel else upload.library followers_target = channel.actor if channel else upload.library.actor
actor = channel.actor if channel else upload.library.actor actor = channel.actor if channel else upload.library.actor
if channel: if channel:
serializer = serializers.ChannelCreateUploadSerializer(upload) serializer = serializers.ChannelCreateUploadSerializer(upload)
@ -310,8 +310,8 @@ def outbox_delete_audio(context):
uploads = context["uploads"] uploads = context["uploads"]
library = uploads[0].library library = uploads[0].library
channel = library.get_channel() channel = library.get_channel()
followers_target = channel.actor if channel else library
actor = channel.actor if channel else library.actor actor = channel.actor if channel else library.actor
followers_target = channel.actor if channel else actor
serializer = serializers.ActivitySerializer( serializer = serializers.ActivitySerializer(
{ {
"type": "Delete", "type": "Delete",
@ -679,6 +679,9 @@ def inbox_delete_favorite(payload, context):
favorite.delete() favorite.delete()
# to do : test listening routes and broadcast
@outbox.register({"type": "Listen", "object.type": "Track"}) @outbox.register({"type": "Listen", "object.type": "Track"})
def outbox_create_listening(context): def outbox_create_listening(context):
track = context["track"] track = context["track"]

View File

@ -1,5 +1,6 @@
import logging import logging
import os import os
import re
import urllib.parse import urllib.parse
import uuid import uuid
@ -995,8 +996,6 @@ class LibrarySerializer(PaginatedCollectionSerializer):
actor = serializers.URLField(max_length=500, required=False) actor = serializers.URLField(max_length=500, required=False)
attributedTo = serializers.URLField(max_length=500, required=False) attributedTo = serializers.URLField(max_length=500, required=False)
name = serializers.CharField() name = serializers.CharField()
summary = serializers.CharField(allow_blank=True, allow_null=True, required=False)
followers = serializers.URLField(max_length=500)
audience = serializers.ChoiceField( audience = serializers.ChoiceField(
choices=["", "./", None, "https://www.w3.org/ns/activitystreams#Public"], choices=["", "./", None, "https://www.w3.org/ns/activitystreams#Public"],
required=False, required=False,
@ -1013,9 +1012,7 @@ class LibrarySerializer(PaginatedCollectionSerializer):
PAGINATED_COLLECTION_JSONLD_MAPPING, PAGINATED_COLLECTION_JSONLD_MAPPING,
{ {
"name": jsonld.first_val(contexts.AS.name), "name": jsonld.first_val(contexts.AS.name),
"summary": jsonld.first_val(contexts.AS.summary),
"audience": jsonld.first_id(contexts.AS.audience), "audience": jsonld.first_id(contexts.AS.audience),
"followers": jsonld.first_id(contexts.AS.followers),
"actor": jsonld.first_id(contexts.AS.actor), "actor": jsonld.first_id(contexts.AS.actor),
"attributedTo": jsonld.first_id(contexts.AS.attributedTo), "attributedTo": jsonld.first_id(contexts.AS.attributedTo),
}, },
@ -1037,7 +1034,6 @@ class LibrarySerializer(PaginatedCollectionSerializer):
conf = { conf = {
"id": library.fid, "id": library.fid,
"name": library.name, "name": library.name,
"summary": library.description,
"page_size": 100, "page_size": 100,
"attributedTo": library.actor, "attributedTo": library.actor,
"actor": library.actor, "actor": library.actor,
@ -1048,7 +1044,6 @@ class LibrarySerializer(PaginatedCollectionSerializer):
r["audience"] = ( r["audience"] = (
contexts.AS.Public if library.privacy_level == "everyone" else "" contexts.AS.Public if library.privacy_level == "everyone" else ""
) )
r["followers"] = library.followers_url
return r return r
def create(self, validated_data): def create(self, validated_data):
@ -1068,8 +1063,6 @@ class LibrarySerializer(PaginatedCollectionSerializer):
defaults={ defaults={
"uploads_count": validated_data["totalItems"], "uploads_count": validated_data["totalItems"],
"name": validated_data["name"], "name": validated_data["name"],
"description": validated_data.get("summary"),
"followers_url": validated_data["followers"],
"privacy_level": privacy[validated_data["audience"]], "privacy_level": privacy[validated_data["audience"]],
}, },
) )
@ -1588,6 +1581,50 @@ class TrackSerializer(MusicEntitySerializer):
return super().update(obj, validated_data) return super().update(obj, validated_data)
def duration_int_to_xml(duration):
if not duration:
return None
multipliers = {"S": 1, "M": 60, "H": 3600, "D": 86400}
ret = "P"
days, seconds = divmod(int(duration), multipliers["D"])
ret += f"{days:d}DT" if days > 0 else "T"
hours, seconds = divmod(seconds, multipliers["H"])
ret += f"{hours:d}H" if hours > 0 else ""
minutes, seconds = divmod(seconds, multipliers["M"])
ret += f"{minutes:d}M" if minutes > 0 else ""
ret += f"{seconds:d}S" if seconds > 0 or ret == "PT" else ""
return ret
class DayTimeDurationSerializer(serializers.DurationField):
multipliers = {"S": 1, "M": 60, "H": 3600, "D": 86400}
def to_internal_value(self, value):
if isinstance(value, float):
return value
parsed = re.match(
r"P([0-9]+D)?T([0-9]+H)?([0-9]+M)?([0-9]+(?:\.[0-9]+)?S)?", str(value)
)
if parsed is not None:
return int(
sum(
[
self.multipliers[s[-1]] * float("0" + s[:-1])
for s in parsed.groups()
if s is not None
]
)
)
self.fail(
"invalid", format="https://www.w3.org/TR/xmlschema11-2/#dayTimeDuration"
)
def to_representation(self, value):
duration_int_to_xml(value)
class UploadSerializer(jsonld.JsonLdSerializer): class UploadSerializer(jsonld.JsonLdSerializer):
type = serializers.ChoiceField(choices=[contexts.AS.Audio]) type = serializers.ChoiceField(choices=[contexts.AS.Audio])
id = serializers.URLField(max_length=500) id = serializers.URLField(max_length=500)
@ -1597,7 +1634,7 @@ class UploadSerializer(jsonld.JsonLdSerializer):
updated = serializers.DateTimeField(required=False, allow_null=True) updated = serializers.DateTimeField(required=False, allow_null=True)
bitrate = serializers.IntegerField(min_value=0) bitrate = serializers.IntegerField(min_value=0)
size = serializers.IntegerField(min_value=0) size = serializers.IntegerField(min_value=0)
duration = serializers.IntegerField(min_value=0) duration = DayTimeDurationSerializer(min_value=0)
track = TrackSerializer(required=True) track = TrackSerializer(required=True)
@ -1709,7 +1746,7 @@ class UploadSerializer(jsonld.JsonLdSerializer):
"published": instance.creation_date.isoformat(), "published": instance.creation_date.isoformat(),
"bitrate": instance.bitrate, "bitrate": instance.bitrate,
"size": instance.size, "size": instance.size,
"duration": instance.duration, "duration": duration_int_to_xml(instance.duration),
"url": [ "url": [
{ {
"href": utils.full_url(instance.listen_url_no_download), "href": utils.full_url(instance.listen_url_no_download),
@ -1859,7 +1896,7 @@ class ChannelUploadSerializer(jsonld.JsonLdSerializer):
url = LinkListSerializer(keep_mediatype=["audio/*"], min_length=1) url = LinkListSerializer(keep_mediatype=["audio/*"], min_length=1)
name = serializers.CharField() name = serializers.CharField()
published = serializers.DateTimeField(required=False) published = serializers.DateTimeField(required=False)
duration = serializers.IntegerField(min_value=0, required=False) duration = DayTimeDurationSerializer(required=False)
position = serializers.IntegerField(min_value=0, allow_null=True, required=False) position = serializers.IntegerField(min_value=0, allow_null=True, required=False)
disc = serializers.IntegerField(min_value=1, allow_null=True, required=False) disc = serializers.IntegerField(min_value=1, allow_null=True, required=False)
album = serializers.URLField(max_length=500, required=False) album = serializers.URLField(max_length=500, required=False)
@ -1968,7 +2005,7 @@ class ChannelUploadSerializer(jsonld.JsonLdSerializer):
if upload.track.local_license: if upload.track.local_license:
data["license"] = upload.track.local_license["identifiers"][0] data["license"] = upload.track.local_license["identifiers"][0]
include_if_not_none(data, upload.duration, "duration") include_if_not_none(data, duration_int_to_xml(upload.duration), "duration")
include_if_not_none(data, upload.track.position, "position") include_if_not_none(data, upload.track.position, "position")
include_if_not_none(data, upload.track.disc_number, "disc") include_if_not_none(data, upload.track.disc_number, "disc")
include_if_not_none(data, upload.track.copyright, "copyright") include_if_not_none(data, upload.track.copyright, "copyright")

View File

@ -30,7 +30,7 @@ def verify_date(raw_date):
ts = parse_http_date(raw_date) ts = parse_http_date(raw_date)
except ValueError as e: except ValueError as e:
raise forms.ValidationError(str(e)) raise forms.ValidationError(str(e))
dt = datetime.datetime.utcfromtimestamp(ts) dt = datetime.datetime.fromtimestamp(ts, datetime.timezone.utc)
dt = dt.replace(tzinfo=ZoneInfo("UTC")) dt = dt.replace(tzinfo=ZoneInfo("UTC"))
delta = datetime.timedelta(seconds=DATE_HEADER_VALID_FOR) delta = datetime.timedelta(seconds=DATE_HEADER_VALID_FOR)
now = timezone.now() now = timezone.now()

View File

@ -387,7 +387,6 @@ class MusicLibraryViewSet(
"id": lb.get_federation_id(), "id": lb.get_federation_id(),
"actor": lb.actor, "actor": lb.actor,
"name": lb.name, "name": lb.name,
"summary": lb.description,
"items": lb.uploads.for_federation() "items": lb.uploads.for_federation()
.order_by("-creation_date") .order_by("-creation_date")
.prefetch_related( .prefetch_related(

View File

@ -126,7 +126,7 @@ class NodeInfo21(NodeInfo20):
serializer_class = serializers.NodeInfo21Serializer serializer_class = serializers.NodeInfo21Serializer
@extend_schema( @extend_schema(
responses=serializers.NodeInfo20Serializer, operation_id="getNodeInfo20" responses=serializers.NodeInfo21Serializer, operation_id="getNodeInfo21"
) )
def get(self, request): def get(self, request):
pref = preferences.all() pref = preferences.all()

View File

@ -572,7 +572,6 @@ class ManageLibrarySerializer(serializers.ModelSerializer):
domain = serializers.CharField(source="domain_name") domain = serializers.CharField(source="domain_name")
actor = ManageBaseActorSerializer() actor = ManageBaseActorSerializer()
uploads_count = serializers.SerializerMethodField() uploads_count = serializers.SerializerMethodField()
followers_count = serializers.SerializerMethodField()
class Meta: class Meta:
model = music_models.Library model = music_models.Library
@ -582,14 +581,11 @@ class ManageLibrarySerializer(serializers.ModelSerializer):
"fid", "fid",
"url", "url",
"name", "name",
"description",
"domain", "domain",
"is_local", "is_local",
"creation_date", "creation_date",
"privacy_level", "privacy_level",
"uploads_count", "uploads_count",
"followers_count",
"followers_url",
"actor", "actor",
] ]
read_only_fields = [ read_only_fields = [
@ -605,10 +601,6 @@ class ManageLibrarySerializer(serializers.ModelSerializer):
def get_uploads_count(self, obj) -> int: def get_uploads_count(self, obj) -> int:
return getattr(obj, "_uploads_count", int(obj.uploads_count)) return getattr(obj, "_uploads_count", int(obj.uploads_count))
@extend_schema_field(OpenApiTypes.INT)
def get_followers_count(self, obj):
return getattr(obj, "followers_count", None)
class ManageNestedLibrarySerializer(serializers.ModelSerializer): class ManageNestedLibrarySerializer(serializers.ModelSerializer):
domain = serializers.CharField(source="domain_name") domain = serializers.CharField(source="domain_name")
@ -622,12 +614,10 @@ class ManageNestedLibrarySerializer(serializers.ModelSerializer):
"fid", "fid",
"url", "url",
"name", "name",
"description",
"domain", "domain",
"is_local", "is_local",
"creation_date", "creation_date",
"privacy_level", "privacy_level",
"followers_url",
"actor", "actor",
] ]

View File

@ -164,7 +164,6 @@ class LibraryStateSerializer(serializers.ModelSerializer):
"uuid", "uuid",
"fid", "fid",
"name", "name",
"description",
"creation_date", "creation_date",
"privacy_level", "privacy_level",
] ]

View File

@ -116,7 +116,7 @@ launch_scan.short_description = "Launch scan"
class LibraryAdmin(admin.ModelAdmin): class LibraryAdmin(admin.ModelAdmin):
list_display = ["id", "name", "actor", "uuid", "privacy_level", "creation_date"] list_display = ["id", "name", "actor", "uuid", "privacy_level", "creation_date"]
list_select_related = True list_select_related = True
search_fields = ["actor__username", "name", "description"] search_fields = ["uuid", "name", "actor__preferred_username"]
list_filter = ["privacy_level"] list_filter = ["privacy_level"]
actions = [launch_scan] actions = [launch_scan]

View File

@ -129,7 +129,7 @@ class Format(types.MultipleChoicePreference):
("aac", "aac"), ("aac", "aac"),
("mp3", "mp3"), ("mp3", "mp3"),
] ]
help_text = "Witch audio format to allow" help_text = "Which audio format to allow"
@global_preferences_registry.register @global_preferences_registry.register

View File

@ -5,17 +5,19 @@ Populates the database with fake data
import logging import logging
import random import random
from funkwhale_api.audio import factories as audio_factories
from funkwhale_api.cli import users from funkwhale_api.cli import users
from funkwhale_api.favorites import factories as favorites_factories
from funkwhale_api.federation import factories as federation_factories from funkwhale_api.federation import factories as federation_factories
from funkwhale_api.history import factories as history_factories from funkwhale_api.history import factories as history_factories
from funkwhale_api.music import factories as music_factories from funkwhale_api.music import factories as music_factories
from funkwhale_api.playlists import factories as playlist_factories from funkwhale_api.playlists import factories as playlist_factories
from funkwhale_api.users import serializers from funkwhale_api.users import models, serializers
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
def create_data(count=2, super_user_name=None): def create_data(super_user_name=None):
super_user = None super_user = None
if super_user_name: if super_user_name:
try: try:
@ -35,39 +37,111 @@ def create_data(count=2, super_user_name=None):
in errors[0] in errors[0]
): ):
print( print(
f"Superuser {super_user_name} already in db. Skipping fake-data creation" f"Superuser {super_user_name} already in db. Skipping superuser creation"
) )
super_user = models.User.objects.get(username=super_user_name)
continue continue
else: else:
raise e raise e
print(f"Superuser with username {super_user_name} and password `funkwhale`") print(f"Superuser with username {super_user_name} and password `funkwhale`")
library = federation_factories.MusicLibraryFactory( library = federation_factories.MusicLibraryFactory(
actor=( actor=(super_user.actor if super_user else federation_factories.ActorFactory()),
super_user.actor if super_user else federation_factories.ActorFactory() local=True,
), )
local=True, uploads = music_factories.UploadFactory.create_batch(
size=random.randint(3, 18),
playable=True,
library=library,
local=True,
)
for upload in uploads[:2]:
history_factories.ListeningFactory(
track=upload.track, actor=upload.library.actor
) )
uploads = music_factories.UploadFactory.create_batch( favorites_factories.TrackFavorite(
size=random.randint(3, 18), track=upload.track, actor=upload.library.actor
playable=True,
library=library,
local=True,
) )
for upload in uploads:
history_factories.ListeningFactory(
track=upload.track, actor=upload.library.actor
)
print("Created fid", upload.track.fid)
playlist = playlist_factories.PlaylistFactory( print("Created fid", upload.track.fid)
name="playlist test public",
privacy_level="everyone", playlist = playlist_factories.PlaylistFactory(
actor=( name="playlist test public",
super_user.actor if super_user else federation_factories.ActorFactory() privacy_level="everyone",
), actor=(super_user.actor if super_user else federation_factories.ActorFactory()),
) )
playlist_factories.PlaylistTrackFactory(playlist=playlist, track=upload.track) playlist_factories.PlaylistTrackFactory(playlist=playlist, track=upload.track)
federation_factories.LibraryFollowFactory.create_batch(
size=random.randint(3, 18), actor=super_user.actor
)
# my podcast
my_podcast_library = federation_factories.MusicLibraryFactory(
actor=(super_user.actor if super_user else federation_factories.ActorFactory()),
local=True,
)
my_podcast_channel = audio_factories.ChannelFactory(
library=my_podcast_library,
attributed_to=super_user.actor,
artist__content_category="podcast",
)
my_podcast_channel_serie = music_factories.AlbumFactory(
artist_credit__artist=my_podcast_channel.artist
)
music_factories.TrackFactory.create_batch(
size=random.randint(3, 6),
artist_credit__artist=my_podcast_channel.artist,
album=my_podcast_channel_serie,
)
# podcast
podcast_channel = audio_factories.ChannelFactory(artist__content_category="podcast")
podcast_channel_serie = music_factories.AlbumFactory(
artist_credit__artist=podcast_channel.artist
)
music_factories.TrackFactory.create_batch(
size=random.randint(3, 6),
artist_credit__artist=podcast_channel.artist,
album=podcast_channel_serie,
)
audio_factories.SubscriptionFactory(
approved=True, target=podcast_channel.actor, actor=super_user.actor
)
# my artist channel
my_artist_library = federation_factories.MusicLibraryFactory(
actor=(super_user.actor if super_user else federation_factories.ActorFactory()),
local=True,
)
my_artist_channel = audio_factories.ChannelFactory(
library=my_artist_library,
attributed_to=super_user.actor,
artist__content_category="music",
)
my_artist_channel_serie = music_factories.AlbumFactory(
artist_credit__artist=my_artist_channel.artist
)
music_factories.TrackFactory.create_batch(
size=random.randint(3, 6),
artist_credit__artist=my_artist_channel.artist,
album=my_artist_channel_serie,
)
# artist channel
artist_channel = audio_factories.ChannelFactory(artist__content_category="artist")
artist_channel_serie = music_factories.AlbumFactory(
artist_credit__artist=artist_channel.artist
)
music_factories.TrackFactory.create_batch(
size=random.randint(3, 6),
artist_credit__artist=artist_channel.artist,
album=artist_channel_serie,
)
audio_factories.SubscriptionFactory(
approved=True, target=artist_channel.actor, actor=super_user.actor
)
if __name__ == "__main__": if __name__ == "__main__":

View File

@ -16,7 +16,7 @@ from . import models, utils
def filter_tags(queryset, name, value): def filter_tags(queryset, name, value):
non_empty_tags = [v.lower() for v in value if v] non_empty_tags = [v.lower() for v in value if v]
for tag in non_empty_tags: for tag in non_empty_tags:
queryset = queryset.filter(tagged_items__tag__name=tag).distinct() queryset = queryset.filter(tagged_items__tag__name__iexact=tag).distinct()
return queryset return queryset

View File

@ -630,7 +630,7 @@ def process_load_queue(stdout, **kwargs):
for path, event in batched_events.copy().items(): for path, event in batched_events.copy().items():
if time.time() - event["time"] <= flush_delay: if time.time() - event["time"] <= flush_delay:
continue continue
now = datetime.datetime.utcnow() now = datetime.datetime.now(datetime.timezone.utc)
stdout.write( stdout.write(
"{} -- Processing {}:{}...\n".format( "{} -- Processing {}:{}...\n".format(
now.strftime("%Y/%m/%d %H:%M:%S"), event["type"], event["path"] now.strftime("%Y/%m/%d %H:%M:%S"), event["type"], event["path"]

View File

@ -4,6 +4,7 @@ import logging
from collections.abc import Mapping from collections.abc import Mapping
import arrow import arrow
import magic
import mutagen._util import mutagen._util
import mutagen.flac import mutagen.flac
import mutagen.oggtheora import mutagen.oggtheora
@ -131,6 +132,28 @@ def clean_flac_pictures(apic):
return pictures return pictures
def clean_ogg_coverart(metadata_block_picture):
pictures = []
for b64_data in [metadata_block_picture]:
try:
data = base64.b64decode(b64_data)
except (TypeError, ValueError):
continue
mime = magic.Magic(mime=True)
mime.from_buffer(data)
pictures.append(
{
"mimetype": mime.from_buffer(data),
"content": data,
"description": "",
"type": mutagen.id3.PictureType.COVER_FRONT,
}
)
return pictures
def clean_ogg_pictures(metadata_block_picture): def clean_ogg_pictures(metadata_block_picture):
pictures = [] pictures = []
for b64_data in [metadata_block_picture]: for b64_data in [metadata_block_picture]:
@ -196,10 +219,16 @@ CONF = {
"license": {}, "license": {},
"copyright": {}, "copyright": {},
"genre": {}, "genre": {},
"pictures": { "pictures": [
"field": "metadata_block_picture", {
"to_application": clean_ogg_pictures, "field": "metadata_block_picture",
}, "to_application": clean_ogg_pictures,
},
{
"field": "coverart",
"to_application": clean_ogg_coverart,
},
],
"comment": {"field": "comment"}, "comment": {"field": "comment"},
}, },
}, },
@ -221,10 +250,16 @@ CONF = {
"license": {}, "license": {},
"copyright": {}, "copyright": {},
"genre": {}, "genre": {},
"pictures": { "pictures": [
"field": "metadata_block_picture", {
"to_application": clean_ogg_pictures, "field": "metadata_block_picture",
}, "to_application": clean_ogg_pictures,
},
{
"field": "coverart",
"to_application": clean_ogg_coverart,
},
],
"comment": {"field": "comment"}, "comment": {"field": "comment"},
}, },
}, },
@ -415,25 +450,30 @@ class Metadata(Mapping):
def _get_from_self(self, key, default=NODEFAULT): def _get_from_self(self, key, default=NODEFAULT):
try: try:
field_conf = self._conf["fields"][key] field_confs = self._conf["fields"][key]
except KeyError: except KeyError:
raise UnsupportedTag(f"{key} is not supported for this file format") raise UnsupportedTag(f"{key} is not supported for this file format")
real_key = field_conf.get("field", key) if not isinstance(field_confs, list):
try: field_confs = [field_confs]
getter = field_conf.get("getter", self._conf["getter"])
v = getter(self._file, real_key)
except KeyError:
if default == NODEFAULT:
raise TagNotFound(real_key)
return default
converter = field_conf.get("to_application") for field_conf in field_confs:
if converter: real_key = field_conf.get("field", key)
v = converter(v) try:
field = VALIDATION.get(key) getter = field_conf.get("getter", self._conf["getter"])
if field: v = getter(self._file, real_key)
v = field.to_python(v) except KeyError:
return v continue
converter = field_conf.get("to_application")
if converter:
v = converter(v)
field = VALIDATION.get(key)
if field:
v = field.to_python(v)
return v
if default == NODEFAULT:
raise TagNotFound(real_key)
return default
def get_picture(self, *picture_types): def get_picture(self, *picture_types):
if not picture_types: if not picture_types:

View File

@ -0,0 +1,10 @@
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("music", "0059_remove_album_artist_remove_track_artist_artistcredit_and_more"),
("playlists", "0007_alter_playlist_actor_alter_playlisttrack_uuid_and_more"),
]
operations = []

View File

@ -0,0 +1,110 @@
# Generated by Django 4.2.9 on 2025-01-03 16:12
from django.db import migrations, models
from django.db import IntegrityError
from funkwhale_api.federation import utils as federation_utils
from django.urls import reverse
import uuid
def insert_tracks_to_playlist(apps, playlist, uploads):
PlaylistTrack = apps.get_model("playlists", "PlaylistTrack")
plts = [
PlaylistTrack(
creation_date=playlist.creation_date,
playlist=playlist,
track=upload.track,
index=0 + i,
uuid=(new_uuid := uuid.uuid4()),
fid=federation_utils.full_url(
reverse(
f"federation:music:playlists-detail",
kwargs={"uuid": new_uuid},
)
),
)
for i, upload in enumerate(uploads)
if upload.track
]
return PlaylistTrack.objects.bulk_create(plts)
def migrate_libraries_to_playlist(apps, schema_editor):
Playlist = apps.get_model("playlists", "Playlist")
Library = apps.get_model("music", "Library")
LibraryFollow = apps.get_model("federation", "LibraryFollow")
Follow = apps.get_model("federation", "Follow")
User = apps.get_model("users", "User")
Actor = apps.get_model("federation", "Actor")
# library to playlist
for library in Library.objects.all():
playlist = Playlist.objects.create(
name=library.name,
actor=library.actor,
creation_date=library.creation_date,
privacy_level=library.privacy_level,
description=library.description,
uuid=(new_uuid := uuid.uuid4()),
fid=federation_utils.full_url(
reverse(
f"federation:music:playlists-detail",
kwargs={"uuid": new_uuid},
)
),
)
playlist.save()
if library.uploads.all().exists():
insert_tracks_to_playlist(apps, playlist, library.uploads.all())
# library follows to user follow
for lib_follow in LibraryFollow.objects.filter(target=library):
try:
Follow.objects.create(
uuid=lib_follow.uuid,
target=library.actor,
actor=lib_follow.actor,
approved=lib_follow.approved,
creation_date=lib_follow.creation_date,
modification_date=lib_follow.modification_date,
)
except IntegrityError:
pass
LibraryFollow.objects.all().delete()
# migrate uploads to new library
for actor in Actor.objects.all():
privacy_levels = ["me", "instance", "everyone"]
for privacy_level in privacy_levels:
build_in_lib = Library.objects.create(
actor=actor,
privacy_level=privacy_level,
name=privacy_level,
uuid=(new_uuid := uuid.uuid4()),
fid=federation_utils.full_url(
reverse(
f"federation:music:libraries-detail",
kwargs={"uuid": new_uuid},
)
),
)
for library in actor.libraries.filter(privacy_level=privacy_level):
library.uploads.all().update(library=build_in_lib)
if library.pk is not build_in_lib.pk:
library.delete()
class Migration(migrations.Migration):
dependencies = [
("music", "0060_empty_for_test"),
("playlists", "0008_playlist_library_drop"),
]
operations = [
migrations.RunPython(
migrate_libraries_to_playlist, reverse_code=migrations.RunPython.noop
),
]

View File

@ -0,0 +1,17 @@
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("music", "0061_migrate_libraries_to_playlist"),
]
operations = [
migrations.RemoveField(
model_name="library",
name="description",
),
migrations.RemoveField(
model_name="library",
name="followers_url",
),
]

View File

@ -0,0 +1,42 @@
# Generated by Django 4.2.9 on 2024-12-21 20:02
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("music", "0062_del_lib_description"),
]
operations = [
migrations.AddField(
model_name="upload",
name="third_party_provider",
field=models.CharField(blank=True, max_length=100, null=True),
),
migrations.AlterField(
model_name="uploadversion",
name="mimetype",
field=models.CharField(
choices=[
("audio/mp3", "mp3"),
("audio/mpeg3", "mp3"),
("audio/x-mp3", "mp3"),
("audio/mpeg", "mp3"),
("video/ogg", "ogg"),
("audio/ogg", "ogg"),
("audio/opus", "opus"),
("audio/x-m4a", "aac"),
("audio/x-m4a", "m4a"),
("audio/m4a", "m4a"),
("audio/x-flac", "flac"),
("audio/flac", "flac"),
("audio/aiff", "aif"),
("audio/x-aiff", "aif"),
("audio/aiff", "aiff"),
("audio/x-aiff", "aiff"),
],
max_length=50,
),
),
]

View File

@ -7,7 +7,7 @@ import urllib.parse
import uuid import uuid
import arrow import arrow
import pydub import slugify
from django.conf import settings from django.conf import settings
from django.contrib.contenttypes.fields import GenericRelation from django.contrib.contenttypes.fields import GenericRelation
from django.contrib.postgres.indexes import GinIndex from django.contrib.postgres.indexes import GinIndex
@ -24,6 +24,7 @@ from django.dispatch import receiver
from django.urls import reverse from django.urls import reverse
from django.utils import timezone from django.utils import timezone
from config import plugins
from funkwhale_api import musicbrainz from funkwhale_api import musicbrainz
from funkwhale_api.common import fields from funkwhale_api.common import fields
from funkwhale_api.common import models as common_models from funkwhale_api.common import models as common_models
@ -522,10 +523,19 @@ class TrackQuerySet(common_models.LocalFromFidQuerySet, models.QuerySet):
def with_playable_uploads(self, actor): def with_playable_uploads(self, actor):
uploads = Upload.objects.playable_by(actor) uploads = Upload.objects.playable_by(actor)
return self.prefetch_related( queryset = self.prefetch_related(
models.Prefetch("uploads", queryset=uploads, to_attr="playable_uploads") models.Prefetch("uploads", queryset=uploads, to_attr="playable_uploads")
) )
if queryset and queryset[0].uploads.count() > 0:
return queryset
else:
plugins.trigger_hook(
plugins.TRIGGER_THIRD_PARTY_UPLOAD,
track=self.first(),
)
return queryset
def order_for_album(self): def order_for_album(self):
""" """
Order by disc number then position Order by disc number then position
@ -709,7 +719,7 @@ class Track(APIModelMixin):
@property @property
def listen_url(self) -> str: def listen_url(self) -> str:
# Not using reverse because this is slow # Not using reverse because this is slow
return f"/api/v1/listen/{self.uuid}/" return f"/api/v2/listen/{self.uuid}/"
@property @property
def local_license(self): def local_license(self):
@ -766,11 +776,16 @@ TRACK_FILE_IMPORT_STATUS_CHOICES = (
def get_file_path(instance, filename): def get_file_path(instance, filename):
# Convert unicode characters in name to ASCII characters.
filename = slugify.slugify(filename, ok=slugify.SLUG_OK + ".", only_ascii=True)
if isinstance(instance, UploadVersion): if isinstance(instance, UploadVersion):
return common_utils.ChunkedPath("transcoded")(instance, filename) return common_utils.ChunkedPath("transcoded")(instance, filename)
if instance.library.actor.get_user(): if instance.library.actor.get_user():
return common_utils.ChunkedPath("tracks")(instance, filename) return common_utils.ChunkedPath("tracks")(instance, filename)
elif instance.third_party_provider:
return common_utils.ChunkedPath("third_party_tracks")(instance, filename)
else: else:
# we cache remote tracks in a different directory # we cache remote tracks in a different directory
return common_utils.ChunkedPath("federation_cache/tracks")(instance, filename) return common_utils.ChunkedPath("federation_cache/tracks")(instance, filename)
@ -842,6 +857,9 @@ class Upload(models.Model):
checksum = models.CharField(max_length=100, db_index=True, null=True, blank=True) checksum = models.CharField(max_length=100, db_index=True, null=True, blank=True)
quality = models.IntegerField(choices=quality_choices, default=1) quality = models.IntegerField(choices=quality_choices, default=1)
third_party_provider = models.CharField(max_length=100, null=True, blank=True)
objects = UploadQuerySet.as_manager() objects = UploadQuerySet.as_manager()
@property @property
@ -924,6 +942,12 @@ class Upload(models.Model):
if self.source and self.source.startswith("file://"): if self.source and self.source.startswith("file://"):
return open(self.source.replace("file://", "", 1), "rb") return open(self.source.replace("file://", "", 1), "rb")
def get_audio_file_path(self):
if self.audio_file:
return self.audio_file.path
if self.source and self.source.startswith("file://"):
return self.source.replace("file://", "", 1)
def get_audio_data(self): def get_audio_data(self):
audio_file = self.get_audio_file() audio_file = self.get_audio_file()
if not audio_file: if not audio_file:
@ -937,14 +961,6 @@ class Upload(models.Model):
"size": self.get_file_size(), "size": self.get_file_size(),
} }
def get_audio_segment(self):
input = self.get_audio_file()
if not input:
return
audio = pydub.AudioSegment.from_file(input)
return audio
def get_quality(self): def get_quality(self):
extension_to_mimetypes = utils.get_extension_to_mimetype_dict() extension_to_mimetypes = utils.get_extension_to_mimetype_dict()
@ -1068,8 +1084,8 @@ class Upload(models.Model):
) )
version.audio_file.save(new_name, f) version.audio_file.save(new_name, f)
utils.transcode_audio( utils.transcode_audio(
audio=self.get_audio_segment(), audio_file_path=self.get_audio_file_path(),
output=version.audio_file, output_path=version.audio_file.path,
output_format=utils.MIMETYPE_TO_EXTENSION[mimetype], output_format=utils.MIMETYPE_TO_EXTENSION[mimetype],
bitrate=str(bitrate), bitrate=str(bitrate),
) )
@ -1319,10 +1335,8 @@ class Library(federation_models.FederationMixin):
actor = models.ForeignKey( actor = models.ForeignKey(
"federation.Actor", related_name="libraries", on_delete=models.CASCADE "federation.Actor", related_name="libraries", on_delete=models.CASCADE
) )
followers_url = models.URLField(max_length=500)
creation_date = models.DateTimeField(default=timezone.now) creation_date = models.DateTimeField(default=timezone.now)
name = models.CharField(max_length=100) name = models.CharField(max_length=100)
description = models.TextField(max_length=5000, null=True, blank=True)
privacy_level = models.CharField( privacy_level = models.CharField(
choices=LIBRARY_PRIVACY_LEVEL_CHOICES, default="me", max_length=25 choices=LIBRARY_PRIVACY_LEVEL_CHOICES, default="me", max_length=25
) )

View File

@ -140,10 +140,11 @@ class ArtistWithAlbumsSerializer(OptionalDescriptionMixin, serializers.Serialize
return getattr(o, "_tracks_count", 0) return getattr(o, "_tracks_count", 0)
class SimpleArtistSerializer(serializers.ModelSerializer): class ArtistSerializer(serializers.ModelSerializer):
attachment_cover = CoverField(allow_null=True, required=False) cover = CoverField(allow_null=True, required=False)
description = common_serializers.ContentSerializer(allow_null=True, required=False) description = common_serializers.ContentSerializer(allow_null=True, required=False)
channel = serializers.UUIDField(allow_null=True, required=False) channel = serializers.UUIDField(allow_null=True, required=False)
tags = serializers.SerializerMethodField()
class Meta: class Meta:
model = models.Artist model = models.Artist
@ -157,61 +158,24 @@ class SimpleArtistSerializer(serializers.ModelSerializer):
"is_local", "is_local",
"content_category", "content_category",
"description", "description",
"attachment_cover", "cover",
"channel", "channel",
"attributed_to", "attributed_to",
"tags",
) )
class ArtistCreditSerializer(serializers.ModelSerializer):
artist = SimpleArtistSerializer()
class Meta:
model = models.ArtistCredit
fields = ["artist", "credit", "joinphrase", "index"]
class AlbumSerializer(OptionalDescriptionMixin, serializers.Serializer):
artist_credit = ArtistCreditSerializer(many=True)
cover = CoverField(allow_null=True)
is_playable = serializers.SerializerMethodField()
tags = serializers.SerializerMethodField()
tracks_count = serializers.SerializerMethodField()
attributed_to = APIActorSerializer()
id = serializers.IntegerField()
fid = serializers.URLField()
mbid = serializers.UUIDField()
title = serializers.CharField()
release_date = serializers.DateField()
creation_date = serializers.DateTimeField()
is_local = serializers.BooleanField()
duration = serializers.SerializerMethodField(read_only=True)
def get_tracks_count(self, o) -> int:
return len(o.tracks.all())
def get_is_playable(self, obj) -> bool:
try:
return any(
[
bool(getattr(t, "is_playable_by_actor", None))
for t in obj.tracks.all()
]
)
except AttributeError:
return None
@extend_schema_field({"type": "array", "items": {"type": "string"}}) @extend_schema_field({"type": "array", "items": {"type": "string"}})
def get_tags(self, obj): def get_tags(self, obj):
tagged_items = getattr(obj, "_prefetched_tagged_items", []) tagged_items = getattr(obj, "_prefetched_tagged_items", [])
return [ti.tag.name for ti in tagged_items] return [ti.tag.name for ti in tagged_items]
def get_duration(self, obj) -> int:
try: class ArtistCreditSerializer(serializers.ModelSerializer):
return obj.duration artist = ArtistSerializer()
except AttributeError:
# no annotation? class Meta:
return 0 model = models.ArtistCredit
fields = ["artist", "credit", "joinphrase", "index"]
class TrackAlbumSerializer(serializers.ModelSerializer): class TrackAlbumSerializer(serializers.ModelSerializer):
@ -275,7 +239,7 @@ class TrackSerializer(OptionalDescriptionMixin, serializers.Serializer):
listen_url = serializers.SerializerMethodField() listen_url = serializers.SerializerMethodField()
tags = serializers.SerializerMethodField() tags = serializers.SerializerMethodField()
attributed_to = APIActorSerializer(allow_null=True) attributed_to = APIActorSerializer(allow_null=True)
description = common_serializers.ContentSerializer(allow_null=True, required=False)
id = serializers.IntegerField() id = serializers.IntegerField()
fid = serializers.URLField() fid = serializers.URLField()
mbid = serializers.UUIDField() mbid = serializers.UUIDField()
@ -317,6 +281,51 @@ class TrackSerializer(OptionalDescriptionMixin, serializers.Serializer):
return bool(getattr(obj, "playable_uploads", [])) return bool(getattr(obj, "playable_uploads", []))
class AlbumSerializer(OptionalDescriptionMixin, serializers.Serializer):
artist_credit = ArtistCreditSerializer(many=True)
cover = CoverField(allow_null=True)
is_playable = serializers.SerializerMethodField()
tags = serializers.SerializerMethodField()
tracks_count = serializers.SerializerMethodField()
attributed_to = APIActorSerializer()
id = serializers.IntegerField()
fid = serializers.URLField()
mbid = serializers.UUIDField()
title = serializers.CharField()
release_date = serializers.DateField()
creation_date = serializers.DateTimeField()
is_local = serializers.BooleanField()
duration = serializers.SerializerMethodField(read_only=True)
tracks = TrackSerializer(many=True, allow_null=True)
description = common_serializers.ContentSerializer(allow_null=True, required=False)
def get_tracks_count(self, o) -> int:
return len(o.tracks.all())
def get_is_playable(self, obj) -> bool:
try:
return any(
[
bool(getattr(t, "is_playable_by_actor", None))
for t in obj.tracks.all()
]
)
except AttributeError:
return None
@extend_schema_field({"type": "array", "items": {"type": "string"}})
def get_tags(self, obj):
tagged_items = getattr(obj, "_prefetched_tagged_items", [])
return [ti.tag.name for ti in tagged_items]
def get_duration(self, obj) -> int:
try:
return obj.duration
except AttributeError:
# no annotation?
return 0
@common_serializers.track_fields_for_update("name", "description", "privacy_level") @common_serializers.track_fields_for_update("name", "description", "privacy_level")
class LibraryForOwnerSerializer(serializers.ModelSerializer): class LibraryForOwnerSerializer(serializers.ModelSerializer):
uploads_count = serializers.SerializerMethodField() uploads_count = serializers.SerializerMethodField()
@ -329,7 +338,6 @@ class LibraryForOwnerSerializer(serializers.ModelSerializer):
"uuid", "uuid",
"fid", "fid",
"name", "name",
"description",
"privacy_level", "privacy_level",
"uploads_count", "uploads_count",
"size", "size",
@ -364,6 +372,9 @@ class UploadSerializer(serializers.ModelSerializer):
required=False, required=False,
filters=lambda context: {"actor": context["user"].actor}, filters=lambda context: {"actor": context["user"].actor},
) )
privacy_level = serializers.ChoiceField(
choices=models.LIBRARY_PRIVACY_LEVEL_CHOICES, required=False
)
channel = common_serializers.RelatedField( channel = common_serializers.RelatedField(
"uuid", "uuid",
ChannelSerializer(), ChannelSerializer(),
@ -387,6 +398,7 @@ class UploadSerializer(serializers.ModelSerializer):
"size", "size",
"import_date", "import_date",
"import_status", "import_status",
"privacy_level",
] ]
read_only_fields = [ read_only_fields = [
@ -487,6 +499,7 @@ class UploadForOwnerSerializer(UploadSerializer):
r = super().to_representation(obj) r = super().to_representation(obj)
if "audio_file" in r: if "audio_file" in r:
del r["audio_file"] del r["audio_file"]
r["privacy_level"] = obj.library.privacy_level
return r return r
def validate(self, validated_data): def validate(self, validated_data):
@ -528,6 +541,26 @@ class UploadForOwnerSerializer(UploadSerializer):
return f return f
class UploadBulkUpdateSerializer(serializers.Serializer):
uuid = serializers.UUIDField()
privacy_level = serializers.ChoiceField(
choices=models.LIBRARY_PRIVACY_LEVEL_CHOICES
)
def validate(self, data):
try:
upload = models.Upload.objects.get(uuid=data["uuid"])
except models.Upload.DoesNotExist:
raise serializers.ValidationError(
f"Upload with uuid {data['uuid']} does not exist"
)
upload.library = upload.library.actor.libraries.get(
privacy_level=data["privacy_level"]
)
return upload
class UploadActionSerializer(common_serializers.ActionSerializer): class UploadActionSerializer(common_serializers.ActionSerializer):
actions = [ actions = [
common_serializers.Action("delete", allow_all=True), common_serializers.Action("delete", allow_all=True),

View File

@ -340,7 +340,6 @@ def library_library(request, uuid, redirect_to_ap):
{"tag": "meta", "property": "og:url", "content": library_url}, {"tag": "meta", "property": "og:url", "content": library_url},
{"tag": "meta", "property": "og:type", "content": "website"}, {"tag": "meta", "property": "og:type", "content": "website"},
{"tag": "meta", "property": "og:title", "content": obj.name}, {"tag": "meta", "property": "og:title", "content": obj.name},
{"tag": "meta", "property": "og:description", "content": obj.description},
] ]
if preferences.get("federation__enabled"): if preferences.get("federation__enabled"):

View File

@ -176,7 +176,7 @@ def fail_import(upload, error_code, detail=None, **fields):
upload.import_metadata, "funkwhale", "config", "broadcast", default=True upload.import_metadata, "funkwhale", "config", "broadcast", default=True
) )
if broadcast: if broadcast:
signals.upload_import_status_updated.send( signals.upload_import_status_updated.send_robust(
old_status=old_status, old_status=old_status,
new_status=upload.import_status, new_status=upload.import_status,
upload=upload, upload=upload,
@ -297,7 +297,7 @@ def process_upload(upload, update_denormalization=True):
update_fields=["import_details", "import_status", "import_date", "track"] update_fields=["import_details", "import_status", "import_date", "track"]
) )
if broadcast: if broadcast:
signals.upload_import_status_updated.send( signals.upload_import_status_updated.send_robust(
old_status=old_status, old_status=old_status,
new_status=upload.import_status, new_status=upload.import_status,
upload=upload, upload=upload,
@ -341,7 +341,7 @@ def process_upload(upload, update_denormalization=True):
) )
if broadcast: if broadcast:
signals.upload_import_status_updated.send( signals.upload_import_status_updated.send_robust(
old_status=old_status, old_status=old_status,
new_status=upload.import_status, new_status=upload.import_status,
upload=upload, upload=upload,
@ -993,7 +993,7 @@ def albums_set_tags_from_tracks(ids=None, dry_run=False):
data = tags_tasks.get_tags_from_foreign_key( data = tags_tasks.get_tags_from_foreign_key(
ids=qs, ids=qs,
foreign_key_model=models.Track, foreign_key_model=models.Track,
foreign_key_attr="album", foreign_key_attr="albums",
) )
logger.info("Found automatic tags for %s albums…", len(data)) logger.info("Found automatic tags for %s albums…", len(data))
if dry_run: if dry_run:

View File

@ -4,10 +4,10 @@ import pathlib
import magic import magic
import mutagen import mutagen
import pydub
from django.conf import settings from django.conf import settings
from django.core.cache import cache from django.core.cache import cache
from django.db.models import F from django.db.models import F
from ffmpeg import FFmpeg
from funkwhale_api.common import throttling from funkwhale_api.common import throttling
from funkwhale_api.common.search import get_fts_query # noqa from funkwhale_api.common.search import get_fts_query # noqa
@ -56,6 +56,7 @@ AUDIO_EXTENSIONS_AND_MIMETYPE = [
("opus", "audio/opus"), ("opus", "audio/opus"),
("aac", "audio/x-m4a"), ("aac", "audio/x-m4a"),
("m4a", "audio/x-m4a"), ("m4a", "audio/x-m4a"),
("m4a", "audio/m4a"),
("flac", "audio/x-flac"), ("flac", "audio/x-flac"),
("flac", "audio/flac"), ("flac", "audio/flac"),
("aif", "audio/aiff"), ("aif", "audio/aiff"),
@ -113,15 +114,10 @@ def get_actor_from_request(request):
return actor return actor
def transcode_file(input, output, input_format=None, output_format="mp3", **kwargs): def transcode_audio(audio_file_path, output_path, output_format="mp3", **kwargs):
with input.open("rb"): FFmpeg().input(audio_file_path).output(
audio = pydub.AudioSegment.from_file(input, format=input_format) output_path, format=output_format, **kwargs
return transcode_audio(audio, output, output_format, **kwargs) ).option("y").execute()
def transcode_audio(audio, output, output_format, **kwargs):
with output.open("wb"):
return audio.export(output, format=output_format, **kwargs)
def increment_downloads_count(upload, user, wsgi_request): def increment_downloads_count(upload, user, wsgi_request):

View File

@ -8,7 +8,7 @@ import requests.exceptions
from django.conf import settings from django.conf import settings
from django.core.cache import cache from django.core.cache import cache
from django.db import transaction from django.db import transaction
from django.db.models import Count, F, Prefetch, Q, Sum from django.db.models import BooleanField, Case, Count, F, Prefetch, Q, Sum, Value, When
from django.db.models.functions import Collate from django.db.models.functions import Collate
from django.utils import timezone from django.utils import timezone
from drf_spectacular.utils import OpenApiParameter, extend_schema, extend_schema_view from drf_spectacular.utils import OpenApiParameter, extend_schema, extend_schema_view
@ -293,11 +293,8 @@ class AlbumViewSet(
class LibraryViewSet( class LibraryViewSet(
mixins.CreateModelMixin,
mixins.ListModelMixin, mixins.ListModelMixin,
mixins.RetrieveModelMixin, mixins.RetrieveModelMixin,
mixins.UpdateModelMixin,
mixins.DestroyModelMixin,
viewsets.GenericViewSet, viewsets.GenericViewSet,
): ):
lookup_field = "uuid" lookup_field = "uuid"
@ -332,42 +329,6 @@ class LibraryViewSet(
return qs return qs
def perform_create(self, serializer):
serializer.save(actor=self.request.user.actor)
@transaction.atomic
def perform_destroy(self, instance):
routes.outbox.dispatch(
{"type": "Delete", "object": {"type": "Library"}},
context={"library": instance},
)
instance.delete()
@extend_schema(
responses=federation_api_serializers.LibraryFollowSerializer(many=True)
)
@action(
methods=["get"],
detail=True,
)
@transaction.non_atomic_requests
def follows(self, request, *args, **kwargs):
library = self.get_object()
queryset = (
library.received_follows.filter(target__actor=self.request.user.actor)
.prefetch_related("actor", "target__actor")
.order_by("-creation_date")
)
page = self.paginate_queryset(queryset)
if page is not None:
serializer = federation_api_serializers.LibraryFollowSerializer(
page, many=True, required=False
)
return self.get_paginated_response(serializer.data)
serializer = self.get_serializer(queryset, many=True, required=False)
return Response(serializer.data)
# TODO quickfix, basically specifying the response would be None # TODO quickfix, basically specifying the response would be None
@extend_schema(responses=None) @extend_schema(responses=None)
@action( @action(
@ -704,7 +665,15 @@ def handle_stream(track, request, download, explicit_file, format, max_bitrate):
if explicit_file: if explicit_file:
queryset = queryset.filter(uuid=explicit_file) queryset = queryset.filter(uuid=explicit_file)
queryset = queryset.playable_by(actor) queryset = queryset.playable_by(actor)
queryset = queryset.order_by(F("audio_file").desc(nulls_last=True)) # third_party uploads are displayed before manual upload only if no audio file is found in manual upload
queryset = queryset.order_by(
Case(
When(third_party_provider__isnull=False, then=Value(1)),
default=Value(0),
output_field=BooleanField(),
),
F("audio_file").desc(nulls_last=True),
)
upload = queryset.first() upload = queryset.first()
if not upload: if not upload:
return Response(status=404) return Response(status=404)
@ -829,6 +798,28 @@ class UploadViewSet(
cover_data["content"] = base64.b64encode(cover_data["content"]) cover_data["content"] = base64.b64encode(cover_data["content"])
return Response(payload, status=200) return Response(payload, status=200)
@extend_schema(
request=serializers.UploadBulkUpdateSerializer(many=True),
)
@action(detail=False, methods=["patch"])
def bulk_update(self, request, *args, **kwargs):
"""
Used to move an upload from one library to another. Receive a upload uuid and a privacy_level
"""
serializer = serializers.UploadBulkUpdateSerializer(
data=request.data, many=True
)
serializer.is_valid(raise_exception=True)
models.Upload.objects.bulk_update(serializer.validated_data, ["library"])
return Response(
serializers.UploadForOwnerSerializer(
serializer.validated_data, many=True
).data,
status=200,
)
@action(methods=["post"], detail=False) @action(methods=["post"], detail=False)
def action(self, request, *args, **kwargs): def action(self, request, *args, **kwargs):
queryset = self.get_queryset() queryset = self.get_queryset()

View File

@ -0,0 +1,22 @@
# Generated by Django 4.2.9 on 2025-01-03 16:12
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("playlists", "0007_alter_playlist_actor_alter_playlisttrack_uuid_and_more"),
]
operations = [
migrations.AlterField(
model_name="playlist",
name="name",
field=models.CharField(max_length=100),
),
migrations.AddField(
model_name="playlist",
name="description",
field=models.TextField(blank=True, max_length=5000, null=True),
),
]

View File

@ -78,14 +78,14 @@ class PlaylistQuerySet(models.QuerySet, common_models.LocalFromFidQuerySet):
class Playlist(federation_models.FederationMixin): class Playlist(federation_models.FederationMixin):
uuid = models.UUIDField(default=uuid.uuid4, unique=True) uuid = models.UUIDField(default=uuid.uuid4, unique=True)
name = models.CharField(max_length=50) name = models.CharField(max_length=100)
actor = models.ForeignKey( actor = models.ForeignKey(
"federation.Actor", related_name="playlists", on_delete=models.CASCADE "federation.Actor", related_name="playlists", on_delete=models.CASCADE
) )
creation_date = models.DateTimeField(default=timezone.now) creation_date = models.DateTimeField(default=timezone.now)
modification_date = models.DateTimeField(auto_now=True) modification_date = models.DateTimeField(auto_now=True)
privacy_level = fields.get_privacy_field() privacy_level = fields.get_privacy_field()
description = models.TextField(max_length=5000, null=True, blank=True)
objects = PlaylistQuerySet.as_manager() objects = PlaylistQuerySet.as_manager()
federation_namespace = "playlists" federation_namespace = "playlists"

View File

@ -49,6 +49,7 @@ class PlaylistSerializer(serializers.ModelSerializer):
"duration", "duration",
"is_playable", "is_playable",
"actor", "actor",
"description",
) )
read_only_fields = ["id", "modification_date", "creation_date"] read_only_fields = ["id", "modification_date", "creation_date"]

View File

@ -256,5 +256,5 @@ class PlaylistViewSet(
except models.PlaylistTrack.DoesNotExist: except models.PlaylistTrack.DoesNotExist:
return Response(status=404) return Response(status=404)
artists = music_models.Artist.objects.filter(pk__in=artists_pks) artists = music_models.Artist.objects.filter(pk__in=artists_pks)
serializer = music_serializers.SimpleArtistSerializer(artists, many=True) serializer = music_serializers.ArtistSerializer(artists, many=True)
return Response(serializer.data, status=200) return Response(serializer.data, status=200)

View File

@ -111,6 +111,9 @@ class GetArtistInfo2Serializer(serializers.Serializer):
if artist.mbid: if artist.mbid:
payload["musicBrainzId"] = TagValue(artist.mbid) payload["musicBrainzId"] = TagValue(artist.mbid)
if artist.attachment_cover: if artist.attachment_cover:
payload["smallImageUrl"] = TagValue(
artist.attachment_cover.download_url_small_square_crop
)
payload["mediumImageUrl"] = TagValue( payload["mediumImageUrl"] = TagValue(
artist.attachment_cover.download_url_medium_square_crop artist.attachment_cover.download_url_medium_square_crop
) )
@ -226,6 +229,28 @@ class GetSongSerializer(serializers.Serializer):
return get_track_data(track.album, track, uploads[0]) return get_track_data(track.album, track, uploads[0])
class GetTopSongsSerializer(serializers.Serializer):
def to_representation(self, artist):
top_tracks = (
history_models.Listening.objects.filter(track__artist_credit__artist=artist)
.values("track")
.annotate(listen_count=Count("id"))
.order_by("-listen_count")[: self.context["count"]]
)
if not len(top_tracks):
return {}
top_tracks_instances = []
for track in top_tracks:
track = music_models.Track.objects.get(id=track["track"])
top_tracks_instances.append(track)
return [
get_track_data(track.album, track, track.uploads.all()[0])
for track in top_tracks_instances
]
def get_starred_tracks_data(favorites): def get_starred_tracks_data(favorites):
by_track_id = {f.track_id: f for f in favorites} by_track_id = {f.track_id: f for f in favorites}
tracks = ( tracks = (
@ -335,15 +360,21 @@ def get_channel_data(channel, uploads):
"id": str(channel.uuid), "id": str(channel.uuid),
"url": channel.get_rss_url(), "url": channel.get_rss_url(),
"title": channel.artist.name, "title": channel.artist.name,
"description": channel.artist.description.as_plain_text "description": (
if channel.artist.description channel.artist.description.as_plain_text
else "", if channel.artist.description
"coverArt": f"at-{channel.artist.attachment_cover.uuid}" else ""
if channel.artist.attachment_cover ),
else "", "coverArt": (
"originalImageUrl": channel.artist.attachment_cover.url f"at-{channel.artist.attachment_cover.uuid}"
if channel.artist.attachment_cover if channel.artist.attachment_cover
else "", else ""
),
"originalImageUrl": (
channel.artist.attachment_cover.url
if channel.artist.attachment_cover
else ""
),
"status": "completed", "status": "completed",
} }
if uploads: if uploads:
@ -360,12 +391,14 @@ def get_channel_episode_data(upload, channel_id):
"channelId": str(channel_id), "channelId": str(channel_id),
"streamId": upload.track.id, "streamId": upload.track.id,
"title": upload.track.title, "title": upload.track.title,
"description": upload.track.description.as_plain_text "description": (
if upload.track.description upload.track.description.as_plain_text if upload.track.description else ""
else "", ),
"coverArt": f"at-{upload.track.attachment_cover.uuid}" "coverArt": (
if upload.track.attachment_cover f"at-{upload.track.attachment_cover.uuid}"
else "", if upload.track.attachment_cover
else ""
),
"isDir": "false", "isDir": "false",
"year": upload.track.creation_date.year, "year": upload.track.creation_date.year,
"publishDate": upload.track.creation_date.isoformat(), "publishDate": upload.track.creation_date.isoformat(),

View File

@ -1,6 +1,7 @@
""" """
Documentation of Subsonic API can be found at http://www.subsonic.org/pages/api.jsp Documentation of Subsonic API can be found at http://www.subsonic.org/pages/api.jsp
""" """
import datetime import datetime
import functools import functools
@ -90,6 +91,8 @@ def find_object(
} }
} }
) )
except qs.model.MultipleObjectsReturned:
obj = qs.filter(**{model_field: value})[0]
kwargs["obj"] = obj kwargs["obj"] = obj
return func(self, request, *args, **kwargs) return func(self, request, *args, **kwargs)
@ -260,6 +263,43 @@ class SubsonicViewSet(viewsets.GenericViewSet):
return response.Response(payload, status=200) return response.Response(payload, status=200)
# This should return last.fm data but we choose to return the pod top song
@action(
detail=False,
methods=["get", "post"],
url_name="get_top_songs",
url_path="getTopSongs",
)
@find_object(
music_models.Artist.objects.all(),
model_field="artist_credit__artist__name",
field="artist",
filter_playable=True,
cast=str,
)
def get_top_songs(self, request, *args, **kwargs):
artist = kwargs.pop("obj")
data = request.GET or request.POST
try:
count = int(data["count"])
except KeyError:
return response.Response(
{
"error": {
"code": 10,
"message": "required parameter 'count' not present",
}
}
)
# passing with many=true to make the serializer accept the returned list
data = serializers.GetTopSongsSerializer(
[artist], context={"count": count}, many=True
).data
payload = {"topSongs": data[0]}
return response.Response(payload, status=200)
@action( @action(
detail=False, detail=False,
methods=["get", "post"], methods=["get", "post"],
@ -289,6 +329,44 @@ class SubsonicViewSet(viewsets.GenericViewSet):
payload = {"album": data} payload = {"album": data}
return response.Response(payload, status=200) return response.Response(payload, status=200)
# A clone of get_album (this should return last.fm data but we prefer to send our own metadata)
@action(
detail=False,
methods=["get", "post"],
url_name="get_album_info_2",
url_path="getAlbumInfo2",
)
@find_object(
music_models.Album.objects.with_duration().prefetch_related(
"artist_credit__artist"
),
filter_playable=True,
)
def get_album_info_2(self, request, *args, **kwargs):
album = kwargs.pop("obj")
data = serializers.GetAlbumSerializer(album).data
payload = {"albumInfo": data}
return response.Response(payload, status=200)
# A clone of get_album (this should return last.fm data but we prefer to send our own metadata)
@action(
detail=False,
methods=["get", "post"],
url_name="get_album_info",
url_path="getAlbumInfo",
)
@find_object(
music_models.Album.objects.with_duration().prefetch_related(
"artist_credit__artist"
),
filter_playable=True,
)
def get_album_info(self, request, *args, **kwargs):
album = kwargs.pop("obj")
data = serializers.GetAlbumSerializer(album).data
payload = {"albumInfo": data}
return response.Response(payload, status=200)
@action(detail=False, methods=["get", "post"], url_name="stream", url_path="stream") @action(detail=False, methods=["get", "post"], url_name="stream", url_path="stream")
@find_object(music_models.Track.objects.all(), filter_playable=True) @find_object(music_models.Track.objects.all(), filter_playable=True)
def stream(self, request, *args, **kwargs): def stream(self, request, *args, **kwargs):
@ -419,12 +497,12 @@ class SubsonicViewSet(viewsets.GenericViewSet):
queryset = ( queryset = (
queryset.playable_by(actor) queryset.playable_by(actor)
.filter( .filter(
Q(tagged_items__tag__name=genre) Q(tagged_items__tag__name__iexact=genre)
| Q(artist_credit__artist__tagged_items__tag__name=genre) | Q(artist_credit__artist__tagged_items__tag__name__iexact=genre)
| Q( | Q(
artist_credit__albums__artist_credit__artist__tagged_items__tag__name=genre artist_credit__albums__artist_credit__artist__tagged_items__tag__name__iexact=genre
) )
| Q(artist_credit__albums__tagged_items__tag__name=genre) | Q(artist_credit__albums__tagged_items__tag__name__iexact=genre)
) )
.prefetch_related("uploads") .prefetch_related("uploads")
.distinct() .distinct()
@ -485,8 +563,8 @@ class SubsonicViewSet(viewsets.GenericViewSet):
elif type == "byGenre" and data.get("genre"): elif type == "byGenre" and data.get("genre"):
genre = data.get("genre") genre = data.get("genre")
queryset = queryset.filter( queryset = queryset.filter(
Q(tagged_items__tag__name=genre) Q(tagged_items__tag__name__iexact=genre)
| Q(artist_credit__artist__tagged_items__tag__name=genre) | Q(artist_credit__artist__tagged_items__tag__name__iexact=genre)
) )
elif type == "byYear": elif type == "byYear":
try: try:
@ -815,7 +893,7 @@ class SubsonicViewSet(viewsets.GenericViewSet):
.select_related("attachment_cover") .select_related("attachment_cover")
.get(pk=artist_id) .get(pk=artist_id)
) )
except (TypeError, ValueError, music_models.Album.DoesNotExist): except (TypeError, ValueError, music_models.Artist.DoesNotExist):
return response.Response( return response.Response(
{"error": {"code": 70, "message": "cover art not found."}} {"error": {"code": 70, "message": "cover art not found."}}
) )
@ -824,7 +902,7 @@ class SubsonicViewSet(viewsets.GenericViewSet):
try: try:
attachment_id = id.replace("at-", "") attachment_id = id.replace("at-", "")
attachment = common_models.Attachment.objects.get(uuid=attachment_id) attachment = common_models.Attachment.objects.get(uuid=attachment_id)
except (TypeError, ValueError, music_models.Album.DoesNotExist): except (TypeError, ValueError, common_models.Attachment.DoesNotExist):
return response.Response( return response.Response(
{"error": {"code": 70, "message": "cover art not found."}} {"error": {"code": 70, "message": "cover art not found."}}
) )

View File

@ -11,12 +11,6 @@ class Migration(migrations.Migration):
] ]
operations = [ operations = [
CreateCollation(
"case_insensitive",
provider="icu",
locale="und-u-ks-level2",
deterministic=False,
),
migrations.AddField( migrations.AddField(
model_name="tag", model_name="tag",
name="mbid", name="mbid",
@ -25,8 +19,6 @@ class Migration(migrations.Migration):
migrations.AlterField( migrations.AlterField(
model_name="tag", model_name="tag",
name="name", name="name",
field=models.CharField( field=models.CharField(max_length=100, unique=True),
db_collation="case_insensitive", max_length=100, unique=True
),
), ),
] ]

View File

@ -12,7 +12,8 @@ TAG_REGEX = re.compile(r"^((\w+)([\d_]*))$")
class Tag(models.Model): class Tag(models.Model):
name = models.CharField( name = models.CharField(
max_length=100, unique=True, db_collation="case_insensitive" max_length=100,
unique=True,
) )
mbid = models.UUIDField(null=True, db_index=True, blank=True, unique=True) mbid = models.UUIDField(null=True, db_index=True, blank=True, unique=True)
creation_date = models.DateTimeField(default=timezone.now) creation_date = models.DateTimeField(default=timezone.now)

View File

@ -24,10 +24,10 @@ def get_tags_from_foreign_key(
objs = foreign_key_model.objects.filter( objs = foreign_key_model.objects.filter(
**{f"artist_credit__{foreign_key_attr}__pk__in": ids} **{f"artist_credit__{foreign_key_attr}__pk__in": ids}
).order_by("-id") ).order_by("-id")
objs = objs.only("id", f"artist_credit__{foreign_key_attr}_id").prefetch_related( objs = objs.only("id", f"artist_credit__{foreign_key_attr}__id").prefetch_related(
tagged_items_attr tagged_items_attr
) )
for obj in objs.iterator(): for obj in objs.iterator(chunk_size=1000):
for ac in obj.artist_credit.all(): for ac in obj.artist_credit.all():
# loop on all objects, store the objs tags + counter on the corresponding foreign key # loop on all objects, store the objs tags + counter on the corresponding foreign key
row_data = data.setdefault( row_data = data.setdefault(

View File

@ -1,27 +1,30 @@
from troi import Artist, Element, Playlist, Recording from troi import Artist, ArtistCredit, Element, Playlist, Recording
from troi.patch import Patch from troi.patch import Patch
recording_list = [ recording_list = [
Recording( Recording(
name="I Want It That Way", name="I Want It That Way",
mbid="87dfa566-21c3-45ed-bc42-1d345b8563fa", mbid="87dfa566-21c3-45ed-bc42-1d345b8563fa",
artist=Artist(name="artist_name"), artist_credit=ArtistCredit(artists=[Artist(name="artist_name")]),
),
Recording(
name="Untouchable",
artist_credit=ArtistCredit(artists=[Artist(name="Another lol")]),
), ),
Recording(name="Untouchable", artist=Artist(name="Another lol")),
Recording( Recording(
name="The Perfect Kiss", name="The Perfect Kiss",
mbid="ec0da94e-fbfe-4eb0-968e-024d4c32d1d0", mbid="ec0da94e-fbfe-4eb0-968e-024d4c32d1d0",
artist=Artist(name="artist_name2"), artist_credit=ArtistCredit(artists=[Artist(name="artist_name2")]),
), ),
Recording( Recording(
name="Love Your Voice", name="Love Your Voice",
mbid="93726547-f8c0-4efd-8e16-d2dee76500f6", mbid="93726547-f8c0-4efd-8e16-d2dee76500f6",
artist=Artist(name="artist_name"), artist_credit=ArtistCredit(artists=[Artist(name="artist_name")]),
), ),
Recording( Recording(
name="Hall of Fame", name="Hall of Fame",
mbid="395bd5a1-79cc-4e04-8869-ca9eabc78d09", mbid="395bd5a1-79cc-4e04-8869-ca9eabc78d09",
artist=Artist(name="artist_name_3"), artist_credit=ArtistCredit(artists=[Artist(name="artist_name3")]),
), ),
] ]
@ -34,8 +37,19 @@ class DummyElement(Element):
return [Playlist] return [Playlist]
def read(self, sources): def read(self, sources):
recordings = recording_list recordings = [
Recording(
name="I Want It That Way", mbid="87dfa566-21c3-45ed-bc42-1d345b8563fa"
),
Recording(name="Untouchable"),
Recording(
name="The Perfect Kiss", mbid="ec0da94e-fbfe-4eb0-968e-024d4c32d1d0"
),
Recording(
name="Love Your Voice", mbid="93726547-f8c0-4efd-8e16-d2dee76500f6"
),
Recording(name="Hall of Fame", mbid="395bd5a1-79cc-4e04-8869-ca9eabc78d09"),
]
return [ return [
Playlist( Playlist(
name="Test Export Playlist", name="Test Export Playlist",

View File

@ -57,7 +57,7 @@ def resolve_recordings_to_fw_track(recordings):
for recording in recordings: for recording in recordings:
rec = mc.clean_recording(recording.name) rec = mc.clean_recording(recording.name)
artist = mc.clean_artist(recording.artist.name) artist = mc.clean_artist(recording.artist_credit.artists[0].name)
canonical_name_for_track = delete_non_alnum_characters(artist + rec) canonical_name_for_track = delete_non_alnum_characters(artist + rec)
logger.debug(f"Trying to resolve : {canonical_name_for_track}") logger.debug(f"Trying to resolve : {canonical_name_for_track}")

View File

@ -0,0 +1,40 @@
# Generated by Django 4.2.18 on 2025-01-15 13:45
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("users", "0024_alter_accesstoken_user_and_more"),
]
operations = [
migrations.AddField(
model_name="application",
name="allowed_origins",
field=models.TextField(
blank=True,
default="",
help_text="Allowed origins list to enable CORS, space separated",
),
),
migrations.AddField(
model_name="application",
name="hash_client_secret",
field=models.BooleanField(default=True),
),
migrations.AddField(
model_name="application",
name="post_logout_redirect_uris",
field=models.TextField(
blank=True,
default="",
help_text="Allowed Post Logout URIs list, space separated",
),
),
migrations.AlterField(
model_name="accesstoken",
name="token",
field=models.CharField(db_index=True, max_length=255, unique=True),
),
]

View File

@ -0,0 +1,55 @@
# Generated by Django 5.1.5 on 2025-01-15 17:10
import oauth2_provider.models
from django.db import migrations, models
import oauth2_provider.models
from django.db import migrations, models
from oauth2_provider.settings import oauth2_settings
# see https://github.com/jazzband/django-oauth-toolkit/blob/master/oauth2_provider/migrations/0012_add_token_checksum.py
def forwards_func(apps, schema_editor):
"""
Forward migration touches every "old" accesstoken.token which will cause the checksum to be computed.
"""
AccessToken = apps.get_model(oauth2_settings.ACCESS_TOKEN_MODEL)
accesstokens = AccessToken._default_manager.iterator()
for accesstoken in accesstokens:
accesstoken.save(update_fields=["token_checksum"])
class Migration(migrations.Migration):
dependencies = [
("users", "0025_application_allowed_origins_and_more"),
]
operations = [
migrations.AddField(
model_name="accesstoken",
name="token_checksum",
field=oauth2_provider.models.TokenChecksumField(
blank=True, null=True, max_length=64
),
preserve_default=False,
),
migrations.AddField(
model_name="refreshtoken",
name="token_family",
field=models.UUIDField(blank=True, editable=False, null=True),
),
migrations.AlterField(
model_name="accesstoken",
name="token",
field=models.TextField(),
),
migrations.RunPython(forwards_func, migrations.RunPython.noop),
migrations.AlterField(
model_name="accesstoken",
name="token_checksum",
field=oauth2_provider.models.TokenChecksumField(
blank=False, max_length=64, db_index=True, unique=True
),
),
]

View File

@ -24,6 +24,7 @@ from funkwhale_api.common import validators as common_validators
from funkwhale_api.federation import keys from funkwhale_api.federation import keys
from funkwhale_api.federation import models as federation_models from funkwhale_api.federation import models as federation_models
from funkwhale_api.federation import utils as federation_utils from funkwhale_api.federation import utils as federation_utils
from funkwhale_api.music import models as music_models
def get_token(length=5): def get_token(length=5):
@ -373,14 +374,14 @@ class Application(oauth2_models.AbstractApplication):
OOB_SCHEMES = ["urn:ietf:wg:oauth:2.0:oob", "urn:ietf:wg:oauth:2.0:oob:auto"] OOB_SCHEMES = ["urn:ietf:wg:oauth:2.0:oob", "urn:ietf:wg:oauth:2.0:oob:auto"]
class CustomRedirectURIValidator(oauth2_validators.RedirectURIValidator): class CustomRedirectURIValidator(oauth2_validators.AllowedURIValidator):
def __call__(self, value): def __call__(self, value):
if value in OOB_SCHEMES: if value in OOB_SCHEMES:
return value return value
return super().__call__(value) return super().__call__(value)
oauth2_models.RedirectURIValidator = CustomRedirectURIValidator oauth2_models.AllowedURIValidator = CustomRedirectURIValidator
class Grant(oauth2_models.AbstractGrant): class Grant(oauth2_models.AbstractGrant):
@ -454,7 +455,27 @@ def create_actor(user, **kwargs):
args["private_key"] = private.decode("utf-8") args["private_key"] = private.decode("utf-8")
args["public_key"] = public.decode("utf-8") args["public_key"] = public.decode("utf-8")
return federation_models.Actor.objects.create(user=user, **args) actor = federation_models.Actor.objects.create(**args)
user.actor = actor
user.save()
return actor
def create_user_libraries(user):
for privacy_level, l in music_models.LIBRARY_PRIVACY_LEVEL_CHOICES:
music_models.Library.objects.create(
actor=user.actor,
privacy_level=privacy_level,
name=privacy_level,
uuid=(new_uuid := uuid.uuid4()),
fid=federation_utils.full_url(
reverse(
"federation:music:playlists-detail",
kwargs={"uuid": new_uuid},
)
),
)
@receiver(ldap_populate_user) @receiver(ldap_populate_user)

View File

@ -114,7 +114,7 @@ class RegisterSerializer(RS):
user_request_id=user_request.pk, user_request_id=user_request.pk,
new_status=user_request.status, new_status=user_request.status,
) )
models.create_user_libraries(user)
return user return user

4296
api/poetry.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -25,104 +25,105 @@ exclude = ["tests"]
funkwhale-manage = 'funkwhale_api.main:main' funkwhale-manage = 'funkwhale_api.main:main'
[tool.poetry.dependencies] [tool.poetry.dependencies]
python = "^3.8,<3.13" python = "^3.10,<3.14"
# Django # Django
dj-rest-auth = "5.0.2" dj-rest-auth = "7.0.1"
django = "4.2.9" django = "5.1.6"
django-allauth = "0.55.2" django-allauth = "65.4.1"
django-cache-memoize = "0.1.10" django-cache-memoize = "0.2.1"
django-cacheops = "==7.0.2" django-cacheops = "==7.1"
django-cleanup = "==8.1.0" django-cleanup = "==9.0.0"
django-cors-headers = "==4.3.1" django-cors-headers = "==4.7.0"
django-dynamic-preferences = "==1.14.0" django-dynamic-preferences = "==1.17.0"
django-environ = "==0.11.2" django-environ = "==0.12.0"
django-filter = "==23.5" django-filter = "==25.1"
django-oauth-toolkit = "2.2.0" django-oauth-toolkit = "3.0.1"
django-redis = "==5.2.0" django-redis = "==5.4.0"
django-storages = "==1.13.2" django-storages = "==1.14.5"
django-versatileimagefield = "==3.1" django-versatileimagefield = "==3.1"
djangorestframework = "==3.14.0" djangorestframework = "==3.15.2"
drf-spectacular = "==0.26.5" drf-spectacular = "==0.28.0"
markdown = "==3.4.4" markdown = "==3.7"
persisting-theory = "==1.0" persisting-theory = "==1.0"
psycopg2-binary = "==2.9.9" psycopg2-binary = "==2.9.10"
redis = "==5.0.1" redis = "==5.2.1"
# Django LDAP # Django LDAP
django-auth-ldap = "==4.1.0" django-auth-ldap = "==5.1.0"
python-ldap = "==3.4.4" python-ldap = "==3.4.4"
# Channels # Channels
channels = { extras = ["daphne"], version = "==4.0.0" } channels = { extras = ["daphne"], version = "==4.2.0" }
channels-redis = "==4.1.0" channels-redis = "==4.2.1"
# Celery # Celery
kombu = "5.3.4" kombu = "5.4.2"
celery = "5.3.6" celery = "5.4.0"
# Deployment # Deployment
gunicorn = "==21.2.0" gunicorn = "==23.0.0"
uvicorn = { version = "==0.20.0", extras = ["standard"] } uvicorn = { version = "==0.34.0", extras = ["standard"] }
# Libs # Libs
aiohttp = "3.9.1" aiohttp = "3.11.12"
arrow = "==1.2.3" arrow = "==1.3.0"
backports-zoneinfo = { version = "==0.2.1", python = "<3.9" } backports-zoneinfo = { version = "==0.2.1", python = "<3.9" }
bleach = "==6.1.0" bleach = "==6.2.0"
boto3 = "==1.26.161" boto3 = "==1.36.21"
click = "==8.1.7" click = "==8.1.8"
cryptography = "==41.0.7" cryptography = "==44.0.1"
feedparser = "==6.0.10" defusedxml = "0.7.1"
liblistenbrainz = "==0.5.5" feedparser = "==6.0.11"
python-ffmpeg = "==2.0.12"
liblistenbrainz = "==0.5.6"
musicbrainzngs = "==0.7.1" musicbrainzngs = "==0.7.1"
mutagen = "==1.46.0" mutagen = "==1.46.0"
pillow = "==10.2.0" pillow = "==11.1.0"
pydub = "==0.25.1" pyld = "==2.0.4"
pyld = "==2.0.3"
python-magic = "==0.4.27" python-magic = "==0.4.27"
requests = "==2.31.0" requests = "==2.32.3"
requests-http-message-signatures = "==0.3.1" requests-http-message-signatures = "==0.3.1"
sentry-sdk = "==1.19.1" sentry-sdk = "==2.22.0"
watchdog = "==4.0.0" watchdog = "==6.0.0"
troi = "==2024.1.26.0" troi = "==2025.1.29.0"
lb-matching-tools = "==2024.1.25.0rc1" lb-matching-tools = "==2024.1.30.1"
unidecode = "==1.3.7" unidecode = "==1.3.8"
pycountry = "23.12.11" pycountry = "24.6.1"
# Typesense # Typesense
typesense = { version = "==0.15.1", optional = true } typesense = { version = "==0.21.0", optional = true }
# Dependencies pinning # Dependencies pinning
ipython = "==8.12.3" ipython = "==8.32.0"
pluralizer = "==1.2.0" pluralizer = "==1.2.0"
service-identity = "==24.1.0" service-identity = "==24.2.0"
unicode-slugify = "==0.1.5" unicode-slugify = "==0.1.5"
[tool.poetry.group.dev.dependencies] [tool.poetry.group.dev.dependencies]
aioresponses = "==0.7.6" aioresponses = "==0.7.8"
asynctest = "==0.13.0" asynctest = "==0.13.0"
black = "==24.1.1" black = "==25.1.0"
coverage = { version = "==7.4.1", extras = ["toml"] } coverage = { version = "==7.6.12", extras = ["toml"] }
debugpy = "==1.6.7.post1" debugpy = "==1.8.12"
django-coverage-plugin = "==3.0.0" django-coverage-plugin = "==3.1.0"
django-debug-toolbar = "==4.2.0" django-debug-toolbar = "==5.0.1"
factory-boy = "==3.2.1" factory-boy = "==3.3.3"
faker = "==23.2.1" faker = "==36.1.1"
flake8 = "==3.9.2" flake8 = "==7.1.2"
ipdb = "==0.13.13" ipdb = "==0.13.13"
pytest = "==8.0.0" pytest = "==8.3.4"
pytest-asyncio = "==0.21.0" pytest-asyncio = "==0.25.3"
prompt-toolkit = "==3.0.41" prompt-toolkit = "==3.0.50"
pytest-cov = "==4.0.0" pytest-cov = "==6.0.0"
pytest-django = "==4.5.2" pytest-django = "==4.10.0"
pytest-env = "==1.1.3" pytest-env = "==1.1.5"
pytest-mock = "==3.10.0" pytest-mock = "==3.14.0"
pytest-randomly = "==3.12.0" pytest-randomly = "==3.16.0"
pytest-sugar = "==1.0.0" pytest-sugar = "==1.0.0"
requests-mock = "==1.10.0" requests-mock = "==1.12.1"
pylint = "==3.0.3" pylint = "==3.3.4"
pylint-django = "==2.5.5" pylint-django = "==2.6.1"
django-extensions = "==3.2.3" django-extensions = "==3.2.3"
[tool.poetry.extras] [tool.poetry.extras]

View File

@ -230,6 +230,7 @@ def test_channel_serializer_representation(factories, to_api_date):
"rss_url": channel.get_rss_url(), "rss_url": channel.get_rss_url(),
"url": channel.actor.url, "url": channel.actor.url,
"downloads_count": 12, "downloads_count": 12,
"subscriptions_count": 0,
} }
expected["artist"]["description"] = common_serializers.ContentSerializer( expected["artist"]["description"] = common_serializers.ContentSerializer(
content content
@ -254,6 +255,7 @@ def test_channel_serializer_external_representation(factories, to_api_date):
"rss_url": channel.get_rss_url(), "rss_url": channel.get_rss_url(),
"url": channel.actor.url, "url": channel.actor.url,
"downloads_count": 0, "downloads_count": 0,
"subscriptions_count": 0,
} }
expected["artist"]["description"] = common_serializers.ContentSerializer( expected["artist"]["description"] = common_serializers.ContentSerializer(
content content

View File

@ -1,115 +1,115 @@
import pytest import pytest
from click.testing import CliRunner from click.testing import CliRunner
from funkwhale_api.cli import library, main from funkwhale_api.cli import library, main, users
@pytest.mark.parametrize( @pytest.mark.parametrize(
"cmd, args, handlers", "cmd, args, handlers",
[ [
# ( (
# ("users", "create"), ("users", "create"),
# ( (
# "--username", "--username",
# "testuser", "testuser",
# "--password", "--password",
# "testpassword", "testpassword",
# "--email", "--email",
# "test@hello.com", "test@hello.com",
# "--upload-quota", "--upload-quota",
# "35", "35",
# "--permission", "--permission",
# "library", "library",
# "--permission", "--permission",
# "moderation", "moderation",
# "--staff", "--staff",
# "--superuser", "--superuser",
# ), ),
# [ [
# ( (
# users, users,
# "handler_create_user", "handler_create_user",
# { {
# "username": "testuser", "username": "testuser",
# "password": "testpassword", "password": "testpassword",
# "email": "test@hello.com", "email": "test@hello.com",
# "upload_quota": 35, "upload_quota": 35,
# "permissions": ("library", "moderation"), "permissions": ("library", "moderation"),
# "is_staff": True, "is_staff": True,
# "is_superuser": True, "is_superuser": True,
# }, },
# ) )
# ], ],
# ), ),
# ( (
# ("users", "rm"), ("users", "rm"),
# ("testuser1", "testuser2", "--no-input"), ("testuser1", "testuser2", "--no-input"),
# [ [
# ( (
# users, users,
# "handler_delete_user", "handler_delete_user",
# {"usernames": ("testuser1", "testuser2"), "soft": True}, {"usernames": ("testuser1", "testuser2"), "soft": True},
# ) )
# ], ],
# ), ),
# ( (
# ("users", "rm"), ("users", "rm"),
# ( (
# "testuser1", "testuser1",
# "testuser2", "testuser2",
# "--no-input", "--no-input",
# "--hard", "--hard",
# ), ),
# [ [
# ( (
# users, users,
# "handler_delete_user", "handler_delete_user",
# {"usernames": ("testuser1", "testuser2"), "soft": False}, {"usernames": ("testuser1", "testuser2"), "soft": False},
# ) )
# ], ],
# ), ),
# ( (
# ("users", "set"), ("users", "set"),
# ( (
# "testuser1", "testuser1",
# "testuser2", "testuser2",
# "--no-input", "--no-input",
# "--inactive", "--inactive",
# "--upload-quota", "--upload-quota",
# "35", "35",
# "--no-staff", "--no-staff",
# "--superuser", "--superuser",
# "--permission-library", "--permission-library",
# "--no-permission-moderation", "--no-permission-moderation",
# "--no-permission-settings", "--no-permission-settings",
# "--password", "--password",
# "newpassword", "newpassword",
# ), ),
# [ [
# ( (
# users, users,
# "handler_update_user", "handler_update_user",
# { {
# "usernames": ("testuser1", "testuser2"), "usernames": ("testuser1", "testuser2"),
# "kwargs": { "kwargs": {
# "is_active": False, "is_active": False,
# "upload_quota": 35, "upload_quota": 35,
# "is_staff": False, "is_staff": False,
# "is_superuser": True, "is_superuser": True,
# "permission_library": True, "permission_library": True,
# "permission_moderation": False, "permission_moderation": False,
# "permission_settings": False, "permission_settings": False,
# "password": "newpassword", "password": "newpassword",
# }, },
# }, },
# ) )
# ], ],
# ), ),
# ( (
# ("albums", "add-tags-from-tracks"), ("albums", "add-tags-from-tracks"),
# tuple(), tuple(),
# [(library, "handler_add_tags_from_tracks", {"albums": True})], [(library, "handler_add_tags_from_tracks", {"albums": True})],
# ), ),
( (
("artists", "add-tags-from-tracks"), ("artists", "add-tags-from-tracks"),
tuple(), tuple(),

View File

@ -1,6 +1,5 @@
import os
import pytest import pytest
from django.conf import settings
from django.core.management import call_command from django.core.management import call_command
from django.core.management.base import CommandError from django.core.management.base import CommandError
@ -119,12 +118,13 @@ commands = ["createsuperuser", "makemigrations"]
@pytest.mark.parametrize("command", commands) @pytest.mark.parametrize("command", commands)
def test_blocked_commands(command): def test_blocked_commands(command):
with pytest.raises(CommandError): with pytest.raises(CommandError):
setattr(settings, "FORCE", 0)
call_command(command) call_command(command)
@pytest.mark.parametrize("command", commands) @pytest.mark.parametrize("command", commands)
def test_unblocked_commands(command, mocker): def test_unblocked_commands(command, mocker):
mocker.patch.dict(os.environ, {"FORCE": "1"}) setattr(settings, "FORCE", 1)
call_command(command) call_command(command)

View File

@ -195,6 +195,9 @@ def test_attachment_serializer_existing_file(factories, to_api_date):
"urls": { "urls": {
"source": attachment.url, "source": attachment.url,
"original": federation_utils.full_url(attachment.file.url), "original": federation_utils.full_url(attachment.file.url),
"small_square_crop": federation_utils.full_url(
attachment.file.crop["50x50"].url
),
"medium_square_crop": federation_utils.full_url( "medium_square_crop": federation_utils.full_url(
attachment.file.crop["200x200"].url attachment.file.crop["200x200"].url
), ),
@ -225,6 +228,9 @@ def test_attachment_serializer_remote_file(factories, to_api_date):
"urls": { "urls": {
"source": attachment.url, "source": attachment.url,
"original": federation_utils.full_url(proxy_url + "?next=original"), "original": federation_utils.full_url(proxy_url + "?next=original"),
"small_square_crop": federation_utils.full_url(
proxy_url + "?next=small_square_crop"
),
"medium_square_crop": federation_utils.full_url( "medium_square_crop": federation_utils.full_url(
proxy_url + "?next=medium_square_crop" proxy_url + "?next=medium_square_crop"
), ),

View File

@ -391,7 +391,7 @@ def migrator(transactional_db):
@pytest.fixture(autouse=True) @pytest.fixture(autouse=True)
def rsa_small_key(settings): def rsa_small_key(settings):
# smaller size for faster generation, since it's CPU hungry # smaller size for faster generation, since it's CPU hungry
settings.RSA_KEY_SIZE = 512 settings.RSA_KEY_SIZE = 1024
@pytest.fixture(autouse=True) @pytest.fixture(autouse=True)

View File

@ -4,7 +4,6 @@ import logging
import liblistenbrainz import liblistenbrainz
import pytest import pytest
from django.urls import reverse from django.urls import reverse
from django.utils import timezone
from config import plugins from config import plugins
from funkwhale_api.contrib.listenbrainz import funkwhale_ready from funkwhale_api.contrib.listenbrainz import funkwhale_ready
@ -52,7 +51,8 @@ def test_sync_listenings_from_listenbrainz(factories, mocker, caplog):
factories["music.Track"](mbid="f89db7f8-4a1f-4228-a0a1-e7ba028b7476") factories["music.Track"](mbid="f89db7f8-4a1f-4228-a0a1-e7ba028b7476")
track = factories["music.Track"](mbid="54c60860-f43d-484e-b691-7ab7ec8de559") track = factories["music.Track"](mbid="54c60860-f43d-484e-b691-7ab7ec8de559")
factories["history.Listening"]( factories["history.Listening"](
creation_date=datetime.datetime.fromtimestamp(1871, timezone.utc), track=track creation_date=datetime.datetime.fromtimestamp(1871, datetime.timezone.utc),
track=track,
) )
conf = { conf = {

View File

@ -1,5 +1,3 @@
import uuid
import pytest import pytest
from django.db.models import Q from django.db.models import Q
from django.urls import reverse from django.urls import reverse
@ -246,9 +244,6 @@ def test_should_reject(factories, params, policy_kwargs, expected):
def test_get_actors_from_audience_urls(settings, db): def test_get_actors_from_audience_urls(settings, db):
settings.FEDERATION_HOSTNAME = "federation.hostname" settings.FEDERATION_HOSTNAME = "federation.hostname"
library_uuid1 = uuid.uuid4()
library_uuid2 = uuid.uuid4()
urls = [ urls = [
"https://wrong.url", "https://wrong.url",
"https://federation.hostname" "https://federation.hostname"
@ -257,21 +252,15 @@ def test_get_actors_from_audience_urls(settings, db):
+ reverse("federation:actors-detail", kwargs={"preferred_username": "alice"}), + reverse("federation:actors-detail", kwargs={"preferred_username": "alice"}),
"https://federation.hostname" "https://federation.hostname"
+ reverse("federation:actors-detail", kwargs={"preferred_username": "bob"}), + reverse("federation:actors-detail", kwargs={"preferred_username": "bob"}),
"https://federation.hostname" "https://federation.hostname",
+ reverse("federation:music:libraries-detail", kwargs={"uuid": library_uuid1}),
"https://federation.hostname"
+ reverse("federation:music:libraries-detail", kwargs={"uuid": library_uuid2}),
activity.PUBLIC_ADDRESS, activity.PUBLIC_ADDRESS,
] ]
followed_query = Q(target__followers_url=urls[0]) followed_query = Q(target__followers_url=urls[0])
for url in urls[1:-1]: for url in urls[1:-1]:
followed_query |= Q(target__followers_url=url) followed_query |= Q(target__followers_url=url)
actor_follows = models.Follow.objects.filter(followed_query, approved=True) actor_follows = models.Follow.objects.filter(followed_query, approved=True)
library_follows = models.LibraryFollow.objects.filter(followed_query, approved=True)
expected = models.Actor.objects.filter( expected = models.Actor.objects.filter(
Q(fid__in=urls[0:-1]) Q(fid__in=urls[0:-1]) | Q(pk__in=actor_follows.values_list("actor", flat=True))
| Q(pk__in=actor_follows.values_list("actor", flat=True))
| Q(pk__in=library_follows.values_list("actor", flat=True))
) )
assert str(activity.get_actors_from_audience(urls).query) == str(expected.query) assert str(activity.get_actors_from_audience(urls).query) == str(expected.query)
@ -478,17 +467,9 @@ def test_prepare_deliveries_and_inbox_items(factories, preferences):
) )
remote_actor3 = factories["federation.Actor"](shared_inbox_url=None) remote_actor3 = factories["federation.Actor"](shared_inbox_url=None)
remote_actor4 = factories["federation.Actor"]() remote_actor4 = factories["federation.Actor"]()
library = factories["music.Library"]()
library_follower_local = factories["federation.LibraryFollow"](
target=library, actor__local=True, approved=True
).actor
library_follower_remote = factories["federation.LibraryFollow"](
target=library, actor__local=False, approved=True
).actor
# follow not approved # follow not approved
factories["federation.LibraryFollow"]( factories["federation.Follow"](
target=library, actor__local=False, approved=False target=remote_actor3, actor__local=False, approved=False
) )
followed_actor = factories["federation.Actor"]() followed_actor = factories["federation.Actor"]()
@ -511,7 +492,6 @@ def test_prepare_deliveries_and_inbox_items(factories, preferences):
remote_actor2, remote_actor2,
remote_actor3, remote_actor3,
activity.PUBLIC_ADDRESS, activity.PUBLIC_ADDRESS,
{"type": "followers", "target": library},
{"type": "followers", "target": followed_actor}, {"type": "followers", "target": followed_actor},
{"type": "actor_inbox", "actor": remote_actor4}, {"type": "actor_inbox", "actor": remote_actor4},
] ]
@ -524,7 +504,6 @@ def test_prepare_deliveries_and_inbox_items(factories, preferences):
models.InboxItem(actor=local_actor1, type="to"), models.InboxItem(actor=local_actor1, type="to"),
models.InboxItem(actor=local_actor2, type="to"), models.InboxItem(actor=local_actor2, type="to"),
models.InboxItem(actor=local_actor3, type="to"), models.InboxItem(actor=local_actor3, type="to"),
models.InboxItem(actor=library_follower_local, type="to"),
models.InboxItem(actor=actor_follower_local, type="to"), models.InboxItem(actor=actor_follower_local, type="to"),
], ],
key=lambda v: v.actor.pk, key=lambda v: v.actor.pk,
@ -535,7 +514,6 @@ def test_prepare_deliveries_and_inbox_items(factories, preferences):
models.Delivery(inbox_url=remote_actor1.shared_inbox_url), models.Delivery(inbox_url=remote_actor1.shared_inbox_url),
models.Delivery(inbox_url=remote_actor3.inbox_url), models.Delivery(inbox_url=remote_actor3.inbox_url),
models.Delivery(inbox_url=remote_actor4.inbox_url), models.Delivery(inbox_url=remote_actor4.inbox_url),
models.Delivery(inbox_url=library_follower_remote.inbox_url),
models.Delivery(inbox_url=actor_follower_remote.inbox_url), models.Delivery(inbox_url=actor_follower_remote.inbox_url),
], ],
key=lambda v: v.inbox_url, key=lambda v: v.inbox_url,
@ -549,7 +527,6 @@ def test_prepare_deliveries_and_inbox_items(factories, preferences):
remote_actor2.fid, remote_actor2.fid,
remote_actor3.fid, remote_actor3.fid,
activity.PUBLIC_ADDRESS, activity.PUBLIC_ADDRESS,
library.followers_url,
followed_actor.followers_url, followed_actor.followers_url,
remote_actor4.fid, remote_actor4.fid,
] ]

View File

@ -12,7 +12,6 @@ def test_library_serializer(factories, to_api_date):
"uuid": str(library.uuid), "uuid": str(library.uuid),
"actor": serializers.APIActorSerializer(library.actor).data, "actor": serializers.APIActorSerializer(library.actor).data,
"name": library.name, "name": library.name,
"description": library.description,
"creation_date": to_api_date(library.creation_date), "creation_date": to_api_date(library.creation_date),
"uploads_count": library.uploads_count, "uploads_count": library.uploads_count,
"privacy_level": library.privacy_level, "privacy_level": library.privacy_level,

View File

@ -5,8 +5,10 @@ from funkwhale_api.federation import authentication, exceptions, jsonld, keys
def test_authenticate(factories, mocker, api_request): def test_authenticate(factories, mocker, api_request):
private, public = keys.get_key_pair() private, public = keys.get_key_pair()
factories["federation.Domain"](name="test.federation", nodeinfo_fetch_date=None) domain = factories["federation.Domain"](
actor_url = "https://test.federation/actor" name="test.federationnolocal", nodeinfo_fetch_date=None
)
actor_url = "https://test.federationnolocal/actor"
mocker.patch( mocker.patch(
"funkwhale_api.federation.actors.get_actor_data", "funkwhale_api.federation.actors.get_actor_data",
return_value={ return_value={
@ -42,11 +44,12 @@ def test_authenticate(factories, mocker, api_request):
authenticator = authentication.SignatureAuthentication() authenticator = authentication.SignatureAuthentication()
user, _ = authenticator.authenticate(django_request) user, _ = authenticator.authenticate(django_request)
actor = django_request.actor actor = django_request.actor
actor.domain = domain
actor.save()
assert user.is_anonymous is True assert user.is_anonymous is True
assert actor.public_key == public.decode("utf-8") assert actor.public_key == public.decode("utf-8")
assert actor.fid == actor_url assert actor.fid == actor_url
update_domain_nodeinfo.assert_called_once_with(domain_name="test.federation") update_domain_nodeinfo.assert_called_once_with(domain_name="test.federationnolocal")
def test_authenticate_skips_blocked_domain(factories, api_request): def test_authenticate_skips_blocked_domain(factories, api_request):

View File

@ -29,7 +29,7 @@ def test_fix_fids_no_dry_run(factories, mocker, queryset_equal_queries):
(music_models.Album, ["fid"]), (music_models.Album, ["fid"]),
(music_models.Track, ["fid"]), (music_models.Track, ["fid"]),
(music_models.Upload, ["fid"]), (music_models.Upload, ["fid"]),
(music_models.Library, ["fid", "followers_url"]), (music_models.Library, ["fid"]),
( (
federation_models.Actor, federation_models.Actor,
[ [

View File

@ -370,7 +370,7 @@ def test_outbox_create_audio(factories, mocker):
} }
) )
expected = serializer.data expected = serializer.data
expected["to"] = [{"type": "followers", "target": upload.library}] expected["to"] = [{"type": "followers", "target": upload.library.actor}]
assert dict(activity["payload"]) == dict(expected) assert dict(activity["payload"]) == dict(expected)
assert activity["actor"] == upload.library.actor assert activity["actor"] == upload.library.actor
@ -685,7 +685,7 @@ def test_outbox_delete_audio(factories):
{"type": "Delete", "object": {"type": "Audio", "id": [upload.fid]}} {"type": "Delete", "object": {"type": "Audio", "id": [upload.fid]}}
).data ).data
expected["to"] = [{"type": "followers", "target": upload.library}] expected["to"] = [{"type": "followers", "target": upload.library.actor}]
assert dict(activity["payload"]) == dict(expected) assert dict(activity["payload"]) == dict(expected)
assert activity["actor"] == upload.library.actor assert activity["actor"] == upload.library.actor

View File

@ -548,13 +548,11 @@ def test_music_library_serializer_to_ap(factories):
"type": "Library", "type": "Library",
"id": library.fid, "id": library.fid,
"name": library.name, "name": library.name,
"summary": library.description,
"attributedTo": library.actor.fid, "attributedTo": library.actor.fid,
"totalItems": 0, "totalItems": 0,
"current": library.fid + "?page=1", "current": library.fid + "?page=1",
"last": library.fid + "?page=1", "last": library.fid + "?page=1",
"first": library.fid + "?page=1", "first": library.fid + "?page=1",
"followers": library.followers_url,
} }
assert serializer.data == expected assert serializer.data == expected
@ -569,10 +567,8 @@ def test_music_library_serializer_from_public(factories, mocker):
"@context": jsonld.get_default_context(), "@context": jsonld.get_default_context(),
"audience": "https://www.w3.org/ns/activitystreams#Public", "audience": "https://www.w3.org/ns/activitystreams#Public",
"name": "Hello", "name": "Hello",
"summary": "World",
"type": "Library", "type": "Library",
"id": "https://library.id", "id": "https://library.id",
"followers": "https://library.id/followers",
"attributedTo": actor.fid, "attributedTo": actor.fid,
"totalItems": 12, "totalItems": 12,
"first": "https://library.id?page=1", "first": "https://library.id?page=1",
@ -589,8 +585,6 @@ def test_music_library_serializer_from_public(factories, mocker):
assert library.uploads_count == data["totalItems"] assert library.uploads_count == data["totalItems"]
assert library.privacy_level == "everyone" assert library.privacy_level == "everyone"
assert library.name == "Hello" assert library.name == "Hello"
assert library.description == "World"
assert library.followers_url == data["followers"]
retrieve.assert_called_once_with( retrieve.assert_called_once_with(
actor.fid, actor.fid,
@ -609,10 +603,8 @@ def test_music_library_serializer_from_private(factories, mocker):
"@context": jsonld.get_default_context(), "@context": jsonld.get_default_context(),
"audience": "", "audience": "",
"name": "Hello", "name": "Hello",
"summary": "World",
"type": "Library", "type": "Library",
"id": "https://library.id", "id": "https://library.id",
"followers": "https://library.id/followers",
"attributedTo": actor.fid, "attributedTo": actor.fid,
"totalItems": 12, "totalItems": 12,
"first": "https://library.id?page=1", "first": "https://library.id?page=1",
@ -629,8 +621,6 @@ def test_music_library_serializer_from_private(factories, mocker):
assert library.uploads_count == data["totalItems"] assert library.uploads_count == data["totalItems"]
assert library.privacy_level == "me" assert library.privacy_level == "me"
assert library.name == "Hello" assert library.name == "Hello"
assert library.description == "World"
assert library.followers_url == data["followers"]
retrieve.assert_called_once_with( retrieve.assert_called_once_with(
actor.fid, actor.fid,
actor=None, actor=None,
@ -647,10 +637,8 @@ def test_music_library_serializer_from_ap_update(factories, mocker):
"@context": jsonld.get_default_context(), "@context": jsonld.get_default_context(),
"audience": "https://www.w3.org/ns/activitystreams#Public", "audience": "https://www.w3.org/ns/activitystreams#Public",
"name": "Hello", "name": "Hello",
"summary": "World",
"type": "Library", "type": "Library",
"id": library.fid, "id": library.fid,
"followers": "https://library.id/followers",
"attributedTo": actor.fid, "attributedTo": actor.fid,
"totalItems": 12, "totalItems": 12,
"first": "https://library.id?page=1", "first": "https://library.id?page=1",
@ -666,8 +654,6 @@ def test_music_library_serializer_from_ap_update(factories, mocker):
assert library.uploads_count == data["totalItems"] assert library.uploads_count == data["totalItems"]
assert library.privacy_level == "everyone" assert library.privacy_level == "everyone"
assert library.name == "Hello" assert library.name == "Hello"
assert library.description == "World"
assert library.followers_url == data["followers"]
def test_activity_pub_artist_serializer_to_ap(factories): def test_activity_pub_artist_serializer_to_ap(factories):
@ -1282,7 +1268,7 @@ def test_activity_pub_upload_serializer_from_ap(factories, mocker, r_mock):
"name": "Ignored", "name": "Ignored",
"published": published.isoformat(), "published": published.isoformat(),
"updated": updated.isoformat(), "updated": updated.isoformat(),
"duration": 43, "duration": "PT43S",
"bitrate": 42, "bitrate": 42,
"size": 66, "size": 66,
"url": {"href": "https://audio.file", "type": "Link", "mediaType": "audio/mp3"}, "url": {"href": "https://audio.file", "type": "Link", "mediaType": "audio/mp3"},
@ -1351,7 +1337,7 @@ def test_activity_pub_upload_serializer_from_ap(factories, mocker, r_mock):
assert track_create.call_count == 1 assert track_create.call_count == 1
assert upload.fid == data["id"] assert upload.fid == data["id"]
assert upload.track.fid == data["track"]["id"] assert upload.track.fid == data["track"]["id"]
assert upload.duration == data["duration"] assert upload.duration == 43
assert upload.size == data["size"] assert upload.size == data["size"]
assert upload.bitrate == data["bitrate"] assert upload.bitrate == data["bitrate"]
assert upload.source == data["url"]["href"] assert upload.source == data["url"]["href"]
@ -1371,7 +1357,7 @@ def test_activity_pub_upload_serializer_from_ap_update(factories, mocker, now, r
"name": "Ignored", "name": "Ignored",
"published": now.isoformat(), "published": now.isoformat(),
"updated": now.isoformat(), "updated": now.isoformat(),
"duration": 42, "duration": "PT42S",
"bitrate": 42, "bitrate": 42,
"size": 66, "size": 66,
"url": { "url": {
@ -1390,7 +1376,7 @@ def test_activity_pub_upload_serializer_from_ap_update(factories, mocker, now, r
upload.refresh_from_db() upload.refresh_from_db()
assert upload.fid == data["id"] assert upload.fid == data["id"]
assert upload.duration == data["duration"] assert upload.duration == 42
assert upload.size == data["size"] assert upload.size == data["size"]
assert upload.bitrate == data["bitrate"] assert upload.bitrate == data["bitrate"]
assert upload.source == data["url"]["href"] assert upload.source == data["url"]["href"]
@ -1422,7 +1408,7 @@ def test_activity_pub_audio_serializer_to_ap(factories):
"name": upload.track.full_name, "name": upload.track.full_name,
"published": upload.creation_date.isoformat(), "published": upload.creation_date.isoformat(),
"updated": upload.modification_date.isoformat(), "updated": upload.modification_date.isoformat(),
"duration": upload.duration, "duration": "PT43S",
"bitrate": upload.bitrate, "bitrate": upload.bitrate,
"size": upload.size, "size": upload.size,
"to": contexts.AS.Public, "to": contexts.AS.Public,
@ -1791,7 +1777,7 @@ def test_channel_upload_serializer(factories):
"content": common_utils.render_html(content.text, content.content_type), "content": common_utils.render_html(content.text, content.content_type),
"to": "https://www.w3.org/ns/activitystreams#Public", "to": "https://www.w3.org/ns/activitystreams#Public",
"position": upload.track.position, "position": upload.track.position,
"duration": upload.duration, "duration": "PT54S",
"album": upload.track.album.fid, "album": upload.track.album.fid,
"disc": upload.track.disc_number, "disc": upload.track.disc_number,
"copyright": upload.track.copyright, "copyright": upload.track.copyright,
@ -1840,7 +1826,7 @@ def test_channel_upload_serializer_from_ap_create(factories, now, mocker):
"published": now.isoformat(), "published": now.isoformat(),
"mediaType": "text/html", "mediaType": "text/html",
"content": "<p>Hello</p>", "content": "<p>Hello</p>",
"duration": 543, "duration": "PT543S",
"position": 4, "position": 4,
"disc": 2, "disc": 2,
"album": album.fid, "album": album.fid,
@ -1889,7 +1875,7 @@ def test_channel_upload_serializer_from_ap_create(factories, now, mocker):
assert upload.mimetype == payload["url"][1]["mediaType"] assert upload.mimetype == payload["url"][1]["mediaType"]
assert upload.size == payload["url"][1]["size"] assert upload.size == payload["url"][1]["size"]
assert upload.bitrate == payload["url"][1]["bitrate"] assert upload.bitrate == payload["url"][1]["bitrate"]
assert upload.duration == payload["duration"] assert upload.duration == 543
assert upload.track.artist_credit.all()[0].artist == channel.artist assert upload.track.artist_credit.all()[0].artist == channel.artist
assert upload.track.position == payload["position"] assert upload.track.position == payload["position"]
assert upload.track.disc_number == payload["disc"] assert upload.track.disc_number == payload["disc"]
@ -1923,7 +1909,7 @@ def test_channel_upload_serializer_from_ap_update(factories, now, mocker):
"published": now.isoformat(), "published": now.isoformat(),
"mediaType": "text/html", "mediaType": "text/html",
"content": "<p>Hello</p>", "content": "<p>Hello</p>",
"duration": 543, "duration": "PT543S",
"position": 4, "position": 4,
"disc": 2, "disc": 2,
"album": album.fid, "album": album.fid,
@ -1973,7 +1959,7 @@ def test_channel_upload_serializer_from_ap_update(factories, now, mocker):
assert upload.mimetype == payload["url"][1]["mediaType"] assert upload.mimetype == payload["url"][1]["mediaType"]
assert upload.size == payload["url"][1]["size"] assert upload.size == payload["url"][1]["size"]
assert upload.bitrate == payload["url"][1]["bitrate"] assert upload.bitrate == payload["url"][1]["bitrate"]
assert upload.duration == payload["duration"] assert upload.duration == 543
assert upload.track.artist_credit.all()[0].artist == channel.artist assert upload.track.artist_credit.all()[0].artist == channel.artist
assert upload.track.position == payload["position"] assert upload.track.position == payload["position"]
assert upload.track.disc_number == payload["disc"] assert upload.track.disc_number == payload["disc"]

View File

@ -219,7 +219,6 @@ def test_music_library_retrieve_page_public(factories, api_client):
"actor": library.actor, "actor": library.actor,
"page": Paginator([upload], 1).page(1), "page": Paginator([upload], 1).page(1),
"name": library.name, "name": library.name,
"summary": library.description,
} }
).data ).data

View File

@ -447,16 +447,13 @@ def test_manage_library_serializer(factories, now, to_api_date):
"fid": library.fid, "fid": library.fid,
"url": library.url, "url": library.url,
"uuid": str(library.uuid), "uuid": str(library.uuid),
"followers_url": library.followers_url,
"domain": library.domain_name, "domain": library.domain_name,
"is_local": library.is_local, "is_local": library.is_local,
"name": library.name, "name": library.name,
"description": library.description,
"privacy_level": library.privacy_level, "privacy_level": library.privacy_level,
"creation_date": to_api_date(library.creation_date), "creation_date": to_api_date(library.creation_date),
"actor": serializers.ManageBaseActorSerializer(library.actor).data, "actor": serializers.ManageBaseActorSerializer(library.actor).data,
"uploads_count": 44, "uploads_count": 44,
"followers_count": 42,
} }
s = serializers.ManageLibrarySerializer(library) s = serializers.ManageLibrarySerializer(library)

View File

@ -19,7 +19,7 @@ def test_upload_import_status_updated_broadcast(factories, mocker):
upload = factories["music.Upload"]( upload = factories["music.Upload"](
import_status="finished", library__actor__user=user import_status="finished", library__actor__user=user
) )
signals.upload_import_status_updated.send( signals.upload_import_status_updated.send_robust(
sender=None, upload=upload, old_status="pending", new_status="finished" sender=None, upload=upload, old_status="pending", new_status="finished"
) )
group_send.assert_called_once_with( group_send.assert_called_once_with(

Binary file not shown.

View File

@ -187,6 +187,7 @@ def test_can_get_metadata_from_id3_aiff_file(field, value):
"with_cover.ogg", "with_cover.ogg",
"with_cover.opus", "with_cover.opus",
"test.m4a", "test.m4a",
"test_coverart.ogg",
], ],
) )
def test_can_get_pictures(name): def test_can_get_pictures(name):

View File

@ -1,3 +1,8 @@
from uuid import uuid4
import pytest
from django.utils.timezone import now
# this test is commented since it's very slow, but it can be useful for future development # this test is commented since it's very slow, but it can be useful for future development
# def test_pytest_plugin_initial(migrator): # def test_pytest_plugin_initial(migrator):
# mapping_list = [ # mapping_list = [
@ -72,3 +77,110 @@ def test_artist_credit_migration(migrator):
assert album_obj.artist_credit.all()[0].artist.pk == old_album.artist.pk assert album_obj.artist_credit.all()[0].artist.pk == old_album.artist.pk
assert album_obj.artist_credit.all()[0].joinphrase == "" assert album_obj.artist_credit.all()[0].joinphrase == ""
assert album_obj.artist_credit.all()[0].credit == old_album.artist.name assert album_obj.artist_credit.all()[0].credit == old_album.artist.name
@pytest.mark.django_db
def test_migrate_libraries_to_playlist(migrator):
music_initial_migration = (
"music",
"0060_empty_for_test",
)
music_final_migration = ("music", "0061_migrate_libraries_to_playlist")
# Apply migrations
migrator.migrate(
[
music_initial_migration,
]
)
music_apps = migrator.loader.project_state([music_initial_migration]).apps
Playlist = music_apps.get_model("playlists", "Playlist")
LibraryFollow = music_apps.get_model("federation", "LibraryFollow")
Actor = music_apps.get_model("federation", "Actor")
Domain = music_apps.get_model("federation", "Domain")
Track = music_apps.get_model("music", "Track")
Library = music_apps.get_model("music", "Library")
Upload = music_apps.get_model("music", "Upload")
# Create data
domain = Domain.objects.create()
domain2 = Domain.objects.create(pk=2)
actor = Actor.objects.create(name="Test Actor", domain=domain)
existing_urls = Actor.objects.values_list("fid", flat=True)
print(existing_urls)
target_actor = Actor.objects.create(
name="Test Actor 2",
domain=domain2,
fid="http://test2.com/superduniquemanonmam",
)
library = Library.objects.create(
name="This should becane playlist name",
actor=target_actor,
creation_date=now(),
privacy_level="everyone",
uuid=uuid4(),
description="This is a description",
)
Track.objects.create()
Track.objects.create()
track = Track.objects.create()
track2 = Track.objects.create()
track3 = Track.objects.create()
uploads = [
Upload.objects.create(library=library, track=track),
Upload.objects.create(library=library, track=track2),
Upload.objects.create(library=library, track=track3),
]
library_follow = LibraryFollow.objects.create(
uuid=uuid4(),
target=library,
actor=actor,
approved=True,
creation_date=now(),
modification_date=now(),
)
# Perform migration
migrator.loader.build_graph()
migrator.migrate([music_final_migration])
new_apps = migrator.loader.project_state([music_final_migration]).apps
Playlist = new_apps.get_model("playlists", "Playlist")
PlaylistTrack = new_apps.get_model("playlists", "PlaylistTrack")
Follow = new_apps.get_model("federation", "Follow")
LibraryFollow = new_apps.get_model("federation", "LibraryFollow")
Follow = new_apps.get_model("federation", "Follow")
# Assertions
# Verify Playlist creation
playlist = Playlist.objects.get(name="This should becane playlist name")
assert playlist.actor.pk == library.actor.pk
assert playlist.creation_date == library.creation_date
assert playlist.privacy_level == library.privacy_level
assert playlist.description == library.description
# Verify PlaylistTrack creation
playlist_tracks = PlaylistTrack.objects.filter(playlist=playlist).order_by("index")
assert playlist_tracks.count() == 3
for i, playlist_track in enumerate(playlist_tracks):
assert playlist_track.track.pk == uploads[i].track.pk
# Verify User Follow creation
follow = Follow.objects.get(target__pk=target_actor.pk)
assert follow.actor.pk == actor.pk
assert follow.approved == library_follow.approved
# Verify LibraryFollow deletion and library creation
assert LibraryFollow.objects.count() == 0
# Test fail but works on real db I don't get why
# no library are found in the new app
# NewAppLibrary = new_apps.get_model("music", "Library")
# assert NewAppLibrary.objects.count() == 3

View File

@ -263,7 +263,7 @@ def test_library(factories):
now = timezone.now() now = timezone.now()
actor = factories["federation.Actor"]() actor = factories["federation.Actor"]()
library = factories["music.Library"]( library = factories["music.Library"](
name="Hello world", description="hello", actor=actor, privacy_level="instance" name="Hello world", actor=actor, privacy_level="instance"
) )
assert library.creation_date >= now assert library.creation_date >= now

View File

@ -169,6 +169,7 @@ def test_upload_owner_serializer(factories, to_api_date):
"import_details": {"hello": "world"}, "import_details": {"hello": "world"},
"source": "upload://test", "source": "upload://test",
"import_reference": "ref", "import_reference": "ref",
"privacy_level": upload.library.privacy_level,
} }
serializer = serializers.UploadForOwnerSerializer(upload) serializer = serializers.UploadForOwnerSerializer(upload)
assert serializer.data == expected assert serializer.data == expected
@ -198,6 +199,10 @@ def test_album_serializer(factories, to_api_date):
"is_local": album.is_local, "is_local": album.is_local,
"tags": [], "tags": [],
"attributed_to": federation_serializers.APIActorSerializer(actor).data, "attributed_to": federation_serializers.APIActorSerializer(actor).data,
"description": None,
"tracks": [
serializers.TrackSerializer(track).data for track in album.tracks.all()
],
} }
serializer = serializers.AlbumSerializer( serializer = serializers.AlbumSerializer(
album.__class__.objects.with_tracks_count().get(pk=album.pk) album.__class__.objects.with_tracks_count().get(pk=album.pk)
@ -230,6 +235,10 @@ def test_track_album_serializer(factories, to_api_date):
"is_local": album.is_local, "is_local": album.is_local,
"tags": [], "tags": [],
"attributed_to": federation_serializers.APIActorSerializer(actor).data, "attributed_to": federation_serializers.APIActorSerializer(actor).data,
"description": None,
"tracks": [
serializers.TrackSerializer(track).data for track in album.tracks.all()
],
} }
serializer = serializers.AlbumSerializer( serializer = serializers.AlbumSerializer(
album.__class__.objects.with_tracks_count().get(pk=album.pk) album.__class__.objects.with_tracks_count().get(pk=album.pk)
@ -271,6 +280,7 @@ def test_track_serializer(factories, to_api_date):
"cover": common_serializers.AttachmentSerializer(track.attachment_cover).data, "cover": common_serializers.AttachmentSerializer(track.attachment_cover).data,
"downloads_count": track.downloads_count, "downloads_count": track.downloads_count,
"is_playable": bool(track.playable_uploads), "is_playable": bool(track.playable_uploads),
"description": None,
} }
serializer = serializers.TrackSerializer(track) serializer = serializers.TrackSerializer(track)
assert serializer.data == expected assert serializer.data == expected
@ -406,31 +416,6 @@ def test_track_upload_serializer(factories):
assert data == expected assert data == expected
@pytest.mark.parametrize(
"field,before,after",
[
("privacy_level", "me", "everyone"),
("name", "Before", "After"),
("description", "Before", "After"),
],
)
def test_update_library_privacy_level_broadcasts_to_followers(
factories, field, before, after, mocker
):
dispatch = mocker.patch("funkwhale_api.federation.routes.outbox.dispatch")
library = factories["music.Library"](**{field: before})
serializer = serializers.LibraryForOwnerSerializer(
library, data={field: after}, partial=True
)
assert serializer.is_valid(raise_exception=True)
serializer.save()
dispatch.assert_called_once_with(
{"type": "Update", "object": {"type": "Library"}}, context={"library": library}
)
def test_upload_with_channel(factories, uploaded_audio_file): def test_upload_with_channel(factories, uploaded_audio_file):
channel = factories["audio.Channel"](attributed_to__local=True) channel = factories["audio.Channel"](attributed_to__local=True)
user = channel.attributed_to.user user = channel.attributed_to.user

View File

@ -311,7 +311,6 @@ def test_library_library(spa_html, no_api_auth, client, factories, settings):
}, },
{"tag": "meta", "property": "og:type", "content": "website"}, {"tag": "meta", "property": "og:type", "content": "website"},
{"tag": "meta", "property": "og:title", "content": library.name}, {"tag": "meta", "property": "og:title", "content": library.name},
{"tag": "meta", "property": "og:description", "content": library.description},
{ {
"tag": "link", "tag": "link",
"rel": "alternate", "rel": "alternate",

View File

@ -1460,7 +1460,7 @@ def test_tag_albums_from_tracks(queryset_equal_queries, factories, mocker):
get_tags_from_foreign_key.assert_called_once_with( get_tags_from_foreign_key.assert_called_once_with(
ids=expected_queryset.filter(pk__in=[1, 2]), ids=expected_queryset.filter(pk__in=[1, 2]),
foreign_key_model=models.Track, foreign_key_model=models.Track,
foreign_key_attr="album", foreign_key_attr="albums",
) )
add_tags_batch.assert_called_once_with( add_tags_batch.assert_called_once_with(

View File

@ -1,6 +1,5 @@
import os import os
import pathlib import pathlib
import tempfile
import pytest import pytest
@ -114,25 +113,6 @@ def test_get_dirs_and_files(path, expected, tmpdir):
assert utils.browse_dir(root_path, path) == expected assert utils.browse_dir(root_path, path) == expected
@pytest.mark.parametrize(
"name, expected",
[
("sample.flac", {"bitrate": 128000, "length": 0}),
("test.mp3", {"bitrate": 16000, "length": 268}),
("test.ogg", {"bitrate": 128000, "length": 1}),
("test.opus", {"bitrate": 128000, "length": 1}),
],
)
def test_transcode_file(name, expected):
path = pathlib.Path(os.path.join(DATA_DIR, name))
with tempfile.NamedTemporaryFile() as dest:
utils.transcode_file(path, pathlib.Path(dest.name))
with open(dest.name, "rb") as f:
result = {k: round(v) for k, v in utils.get_audio_file_data(f).items()}
assert result == expected
def test_custom_s3_domain(factories, settings): def test_custom_s3_domain(factories, settings):
"""See #2220""" """See #2220"""
settings.AWS_S3_CUSTOM_DOMAIN = "my.custom.domain.tld" settings.AWS_S3_CUSTOM_DOMAIN = "my.custom.domain.tld"

View File

@ -621,8 +621,6 @@ def test_listen_transcode_in_place(
source="file://" + os.path.join(DATA_DIR, "test.ogg"), source="file://" + os.path.join(DATA_DIR, "test.ogg"),
) )
assert upload.get_audio_segment()
url = reverse("api:v1:listen-detail", kwargs={"uuid": upload.track.uuid}) url = reverse("api:v1:listen-detail", kwargs={"uuid": upload.track.uuid})
handle_serve = mocker.spy(views, "handle_serve") handle_serve = mocker.spy(views, "handle_serve")
response = logged_in_api_client.get(url, {"to": "mp3"}) response = logged_in_api_client.get(url, {"to": "mp3"})
@ -640,25 +638,6 @@ def test_listen_transcode_in_place(
) )
def test_user_can_create_library(factories, logged_in_api_client):
actor = logged_in_api_client.user.create_actor()
url = reverse("api:v1:libraries-list")
response = logged_in_api_client.post(
url, {"name": "hello", "description": "world", "privacy_level": "me"}
)
library = actor.libraries.first()
assert response.status_code == 201
assert library.actor == actor
assert library.name == "hello"
assert library.description == "world"
assert library.privacy_level == "me"
assert library.fid == library.get_federation_id()
assert library.followers_url == library.fid + "/followers"
def test_user_can_list_their_library(factories, logged_in_api_client): def test_user_can_list_their_library(factories, logged_in_api_client):
actor = logged_in_api_client.user.create_actor() actor = logged_in_api_client.user.create_actor()
library = factories["music.Library"](actor=actor) library = factories["music.Library"](actor=actor)
@ -712,17 +691,7 @@ def test_user_cannot_delete_other_actors_library(factories, logged_in_api_client
url = reverse("api:v1:libraries-detail", kwargs={"uuid": library.uuid}) url = reverse("api:v1:libraries-detail", kwargs={"uuid": library.uuid})
response = logged_in_api_client.delete(url) response = logged_in_api_client.delete(url)
assert response.status_code == 404 assert response.status_code == 405
def test_library_delete_via_api_triggers_outbox(factories, mocker):
dispatch = mocker.patch("funkwhale_api.federation.routes.outbox.dispatch")
library = factories["music.Library"]()
view = views.LibraryViewSet()
view.perform_destroy(library)
dispatch.assert_called_once_with(
{"type": "Delete", "object": {"type": "Library"}}, context={"library": library}
)
def test_user_cannot_get_other_not_playable_uploads(factories, logged_in_api_client): def test_user_cannot_get_other_not_playable_uploads(factories, logged_in_api_client):
@ -937,25 +906,6 @@ def test_user_can_patch_draft_upload_status_triggers_processing(
m.assert_called_once_with(tasks.process_upload.delay, upload_id=upload.pk) m.assert_called_once_with(tasks.process_upload.delay, upload_id=upload.pk)
def test_user_can_list_own_library_follows(factories, logged_in_api_client):
actor = logged_in_api_client.user.create_actor()
library = factories["music.Library"](actor=actor)
another_library = factories["music.Library"](actor=actor)
follow = factories["federation.LibraryFollow"](target=library)
factories["federation.LibraryFollow"](target=another_library)
url = reverse("api:v1:libraries-follows", kwargs={"uuid": library.uuid})
response = logged_in_api_client.get(url)
assert response.data == {
"count": 1,
"next": None,
"previous": None,
"results": [federation_api_serializers.LibraryFollowSerializer(follow).data],
}
@pytest.mark.parametrize("entity", ["artist", "album", "track"]) @pytest.mark.parametrize("entity", ["artist", "album", "track"])
def test_can_get_libraries_for_music_entities( def test_can_get_libraries_for_music_entities(
factories, api_client, entity, preferences factories, api_client, entity, preferences
@ -1615,3 +1565,25 @@ def test_album_create_artist_credit(factories, logged_in_api_client):
url, {"artist": artist.pk, "title": "super album"}, format="json" url, {"artist": artist.pk, "title": "super album"}, format="json"
) )
assert response.status_code == 204 assert response.status_code == 204
def test_can_patch_upload_list(factories, logged_in_api_client):
url = reverse("api:v1:uploads-bulk-update")
actor = logged_in_api_client.user.create_actor()
upload = factories["music.Upload"](library__actor=actor)
upload2 = factories["music.Upload"](library__actor=actor)
factories["music.Library"](actor=actor, privacy_level="everyone")
response = logged_in_api_client.patch(
url,
[
{"uuid": upload.uuid, "privacy_level": "everyone"},
{"uuid": upload2.uuid, "privacy_level": "everyone"},
],
format="json",
)
upload.refresh_from_db()
upload2.refresh_from_db()
assert response.status_code == 200
assert upload.library.privacy_level == "everyone"

View File

@ -85,7 +85,35 @@ def test_playlist_serializer(factories, to_api_date):
"duration": 0, "duration": 0,
"tracks_count": 0, "tracks_count": 0,
"album_covers": [], "album_covers": [],
"description": playlist.description,
} }
serializer = serializers.PlaylistSerializer(playlist) serializer = serializers.PlaylistSerializer(playlist)
assert serializer.data == expected assert serializer.data == expected
# to do :
# @pytest.mark.parametrize(
# "field,before,after",
# [
# ("privacy_level", "me", "everyone"),
# ("name", "Before", "After"),
# ("description", "Before", "After"),
# ],
# )
# def test_update_playlist_privacy_level_broadcasts_to_followers(
# factories, field, before, after, mocker
# ):
# dispatch = mocker.patch("funkwhale_api.federation.routes.outbox.dispatch")
# playlist = factories["playlists.Playlist"](**{field: before})
# serializer = serializers.PlaylistSerializer(
# playlist, data={field: after}, partial=True
# )
# assert serializer.is_valid(raise_exception=True)
# serializer.save()
# dispatch.assert_called_once_with(
# {"type": "Update", "object": {"type": "Library"}}, context={"library": library}
# )

View File

@ -91,7 +91,7 @@ def test_build_radio_queryset_with_redis_and_without_fw_db(factories, mocker):
def test_build_radio_queryset_catch_troi_ConnectTimeout(mocker): def test_build_radio_queryset_catch_troi_ConnectTimeout(mocker):
mocker.patch.object( mocker.patch.object(
troi.core.Patch, troi.patch.Patch,
"generate_playlist", "generate_playlist",
side_effect=ConnectTimeout, side_effect=ConnectTimeout,
) )
@ -105,7 +105,7 @@ def test_build_radio_queryset_catch_troi_ConnectTimeout(mocker):
def test_build_radio_queryset_catch_troi_no_candidates(mocker): def test_build_radio_queryset_catch_troi_no_candidates(mocker):
mocker.patch.object( mocker.patch.object(
troi.core.Patch, troi.patch.Patch,
"generate_playlist", "generate_playlist",
) )
qs = Track.objects.all() qs = Track.objects.all()

View File

@ -156,6 +156,9 @@ def test_get_artist_info_2_serializer(factories):
expected = { expected = {
"musicBrainzId": artist.mbid, "musicBrainzId": artist.mbid,
"smallImageUrl": renderers.TagValue(
artist.attachment_cover.download_url_small_square_crop
),
"mediumImageUrl": renderers.TagValue( "mediumImageUrl": renderers.TagValue(
artist.attachment_cover.download_url_medium_square_crop artist.attachment_cover.download_url_medium_square_crop
), ),

View File

@ -227,6 +227,62 @@ def test_get_album(
) )
@pytest.mark.parametrize("f", ["json"])
def test_get_album_info_2(
f, db, logged_in_api_client, factories, mocker, queryset_equal_queries
):
url = reverse("api:subsonic:subsonic-get_album_info_2")
assert url.endswith("getAlbumInfo2") is True
artist_credit = factories["music.ArtistCredit"]()
album = (
factories["music.Album"](artist_credit=artist_credit)
.__class__.objects.with_duration()
.first()
)
factories["music.Track"].create_batch(size=3, album=album, playable=True)
playable_by = mocker.spy(music_models.AlbumQuerySet, "playable_by")
expected = {"albumInfo": serializers.GetAlbumSerializer(album).data}
response = logged_in_api_client.get(url, {"f": f, "id": album.pk})
assert response.status_code == 200
assert response.data == expected
playable_by.assert_called_once_with(
music_models.Album.objects.with_duration().prefetch_related(
"artist_credit__artist"
),
None,
)
@pytest.mark.parametrize("f", ["json"])
def test_get_album_info(
f, db, logged_in_api_client, factories, mocker, queryset_equal_queries
):
url = reverse("api:subsonic:subsonic-get_album_info")
assert url.endswith("getAlbumInfo") is True
artist_credit = factories["music.ArtistCredit"]()
album = (
factories["music.Album"](artist_credit=artist_credit)
.__class__.objects.with_duration()
.first()
)
factories["music.Track"].create_batch(size=3, album=album, playable=True)
playable_by = mocker.spy(music_models.AlbumQuerySet, "playable_by")
expected = {"albumInfo": serializers.GetAlbumSerializer(album).data}
response = logged_in_api_client.get(url, {"f": f, "id": album.pk})
assert response.status_code == 200
assert response.data == expected
playable_by.assert_called_once_with(
music_models.Album.objects.with_duration().prefetch_related(
"artist_credit__artist"
),
None,
)
@pytest.mark.parametrize("f", ["json"]) @pytest.mark.parametrize("f", ["json"])
def test_get_song( def test_get_song(
f, db, logged_in_api_client, factories, mocker, queryset_equal_queries f, db, logged_in_api_client, factories, mocker, queryset_equal_queries
@ -247,6 +303,32 @@ def test_get_song(
playable_by.assert_called_once_with(music_models.Track.objects.all(), None) playable_by.assert_called_once_with(music_models.Track.objects.all(), None)
@pytest.mark.parametrize("f", ["json"])
def test_get_top_songs(
f, db, logged_in_api_client, factories, mocker, queryset_equal_queries
):
url = reverse("api:subsonic:subsonic-get_top_songs")
assert url.endswith("getTopSongs") is True
artist_credit = factories["music.ArtistCredit"]()
album = factories["music.Album"](artist_credit=artist_credit)
track = factories["music.Track"](album=album, playable=True)
tracks = factories["music.Track"].create_batch(20, album=album, playable=True)
factories["music.Upload"](track=track)
factories["history.Listening"].create_batch(20, track=track)
factories["history.Listening"].create_batch(2, track=tracks[2])
playable_by = mocker.spy(music_models.TrackQuerySet, "playable_by")
response = logged_in_api_client.get(
url, {"f": f, "artist": artist_credit.artist.name, "count": 2}
)
assert response.status_code == 200
assert response.data["topSongs"][0] == serializers.get_track_data(
track.album, track, track.uploads.all()[0]
)
playable_by.assert_called_once_with(music_models.Track.objects.all(), None)
@pytest.mark.parametrize("f", ["json"]) @pytest.mark.parametrize("f", ["json"])
def test_stream( def test_stream(
f, db, logged_in_api_client, factories, mocker, queryset_equal_queries, settings f, db, logged_in_api_client, factories, mocker, queryset_equal_queries, settings
@ -646,12 +728,11 @@ def test_search3(f, db, logged_in_api_client, factories):
@pytest.mark.parametrize("f", ["json"]) @pytest.mark.parametrize("f", ["json"])
def test_get_playlists(f, db, logged_in_api_client, factories): def test_get_playlists(f, db, logged_in_api_client, factories):
logged_in_api_client.user.create_actor()
url = reverse("api:subsonic:subsonic-get_playlists") url = reverse("api:subsonic:subsonic-get_playlists")
assert url.endswith("getPlaylists") is True assert url.endswith("getPlaylists") is True
playlist1 = factories["playlists.PlaylistTrack"]( playlist1 = factories["playlists.PlaylistTrack"](
playlist__actor__user=logged_in_api_client.user playlist__actor=logged_in_api_client.user.create_actor()
).playlist ).playlist
playlist2 = factories["playlists.PlaylistTrack"]( playlist2 = factories["playlists.PlaylistTrack"](
playlist__privacy_level="everyone" playlist__privacy_level="everyone"
@ -664,7 +745,6 @@ def test_get_playlists(f, db, logged_in_api_client, factories):
# no track # no track
playlist4 = factories["playlists.Playlist"](privacy_level="everyone") playlist4 = factories["playlists.Playlist"](privacy_level="everyone")
factories["users.User"](actor=playlist1.actor)
factories["users.User"](actor=playlist2.actor) factories["users.User"](actor=playlist2.actor)
factories["users.User"](actor=playlist3.actor) factories["users.User"](actor=playlist3.actor)
factories["users.User"](actor=playlist4.actor) factories["users.User"](actor=playlist4.actor)
@ -692,7 +772,6 @@ def test_get_playlist(f, db, logged_in_api_client, factories):
playlist = factories["playlists.PlaylistTrack"]( playlist = factories["playlists.PlaylistTrack"](
playlist__actor__user=logged_in_api_client.user playlist__actor__user=logged_in_api_client.user
).playlist ).playlist
factories["users.User"](actor=playlist.actor)
response = logged_in_api_client.get(url, {"f": f, "id": playlist.pk}) response = logged_in_api_client.get(url, {"f": f, "id": playlist.pk})

View File

@ -3,8 +3,8 @@ from funkwhale_api.tags import models, tasks
def test_get_tags_from_foreign_key(factories): def test_get_tags_from_foreign_key(factories):
rock_tag = factories["tags.Tag"](name="Rock") rock_tag = factories["tags.Tag"](name="rock")
rap_tag = factories["tags.Tag"](name="Rap") rap_tag = factories["tags.Tag"](name="rap")
artist = factories["music.Artist"]() artist = factories["music.Artist"]()
factories["music.Track"].create_batch( factories["music.Track"].create_batch(
3, artist_credit__artist=artist, set_tags=["rock", "rap"] 3, artist_credit__artist=artist, set_tags=["rock", "rap"]

View File

@ -4,6 +4,7 @@ import pytest
from django.urls import reverse from django.urls import reverse
from funkwhale_api.federation import utils as federation_utils from funkwhale_api.federation import utils as federation_utils
from funkwhale_api.music import models as music_models
from funkwhale_api.users import models from funkwhale_api.users import models
@ -182,6 +183,18 @@ def test_creating_actor_from_user(factories, settings):
) )
def test_creating_libraries_from_user(factories, settings):
user = factories["users.User"](username="Hello M. world", with_actor=True)
models.create_user_libraries(user)
for privacy_level, desc in music_models.LIBRARY_PRIVACY_LEVEL_CHOICES:
assert (
user.actor.libraries.filter(
name=privacy_level, privacy_level=privacy_level, actor=user.actor
).count()
== 1
)
def test_get_channels_groups(factories): def test_get_channels_groups(factories):
user = factories["users.User"](permission_library=True) user = factories["users.User"](permission_library=True)

View File

@ -38,7 +38,7 @@ def test_can_create_user_via_api_mail_verification_mandatory(
} }
preferences["users__registration_enabled"] = True preferences["users__registration_enabled"] = True
response = api_client.post(url, data) response = api_client.post(url, data)
assert response.status_code == 204 assert response.status_code == 201
u = User.objects.get(email="test1@test.com") u = User.objects.get(email="test1@test.com")
assert u.username == "test1" assert u.username == "test1"
@ -102,7 +102,7 @@ def test_can_signup_with_invitation(preferences, factories, api_client):
} }
preferences["users__registration_enabled"] = False preferences["users__registration_enabled"] = False
response = api_client.post(url, data) response = api_client.post(url, data)
assert response.status_code == 204 assert response.status_code == 201
u = User.objects.get(email="test1@test.com") u = User.objects.get(email="test1@test.com")
assert u.username == "test1" assert u.username == "test1"
assert u.invitation == invitation assert u.invitation == invitation
@ -322,7 +322,7 @@ def test_creating_user_creates_actor_as_well(
mocker.patch("funkwhale_api.users.models.create_actor", return_value=actor) mocker.patch("funkwhale_api.users.models.create_actor", return_value=actor)
response = api_client.post(url, data) response = api_client.post(url, data)
assert response.status_code == 204 assert response.status_code == 201
user = User.objects.get(username="test1") user = User.objects.get(username="test1")
@ -343,7 +343,7 @@ def test_creating_user_sends_confirmation_email(
preferences["instance__name"] = "Hello world" preferences["instance__name"] = "Hello world"
response = api_client.post(url, data) response = api_client.post(url, data)
assert response.status_code == 204 assert response.status_code == 201
confirmation_message = mailoutbox[-1] confirmation_message = mailoutbox[-1]
assert "Hello world" in confirmation_message.body assert "Hello world" in confirmation_message.body
@ -425,7 +425,7 @@ def test_signup_with_approval_enabled(
} }
on_commit = mocker.patch("funkwhale_api.common.utils.on_commit") on_commit = mocker.patch("funkwhale_api.common.utils.on_commit")
response = api_client.post(url, data, format="json") response = api_client.post(url, data, format="json")
assert response.status_code == 204 assert response.status_code == 201
u = User.objects.get(email="test1@test.com") u = User.objects.get(email="test1@test.com")
assert u.username == "test1" assert u.username == "test1"
assert u.is_active is False assert u.is_active is False

View File

@ -0,0 +1 @@
ActivityStreams compliance: duration (#1566)

Some files were not shown because too many files have changed in this diff Show More