fix(front): re-base on develop, discarding work done on !2666 https://dev.funkwhale.audio/funkwhale/funkwhale/-/tree/wvffle/new-upload-process?ref_type=heads
This commit is contained in:
parent
3cd7548cf0
commit
82fc748196
|
@ -1,4 +1,4 @@
|
|||
FROM alpine:3.19 AS requirements
|
||||
FROM alpine:3.21 AS requirements
|
||||
|
||||
RUN set -eux; \
|
||||
apk add --no-cache \
|
||||
|
@ -12,7 +12,7 @@ RUN set -eux; \
|
|||
poetry export --without-hashes --extras typesense > requirements.txt; \
|
||||
poetry export --without-hashes --with dev > dev-requirements.txt;
|
||||
|
||||
FROM alpine:3.19 AS builder
|
||||
FROM alpine:3.21 AS builder
|
||||
|
||||
ENV PYTHONDONTWRITEBYTECODE=1
|
||||
ENV PYTHONUNBUFFERED=1
|
||||
|
@ -37,11 +37,11 @@ RUN set -eux; \
|
|||
openssl-dev \
|
||||
postgresql-dev \
|
||||
zlib-dev \
|
||||
py3-cryptography=41.0.7-r0 \
|
||||
py3-lxml=4.9.3-r1 \
|
||||
py3-pillow=10.3.0-r0 \
|
||||
py3-psycopg2=2.9.9-r0 \
|
||||
py3-watchfiles=0.19.0-r1 \
|
||||
py3-cryptography \
|
||||
py3-lxml \
|
||||
py3-pillow \
|
||||
py3-psycopg2 \
|
||||
py3-watchfiles \
|
||||
python3-dev
|
||||
|
||||
# Create virtual env
|
||||
|
@ -61,11 +61,11 @@ RUN --mount=type=cache,target=~/.cache/pip; \
|
|||
# to install the deps using pip.
|
||||
grep -Ev 'cryptography|lxml|pillow|psycopg2|watchfiles' /requirements.txt \
|
||||
| pip3 install -r /dev/stdin \
|
||||
cryptography==41.0.7 \
|
||||
lxml==4.9.3 \
|
||||
pillow==10.2.0 \
|
||||
psycopg2==2.9.9 \
|
||||
watchfiles==0.19.0
|
||||
cryptography \
|
||||
lxml \
|
||||
pillow \
|
||||
psycopg2 \
|
||||
watchfiles
|
||||
|
||||
ARG install_dev_deps=0
|
||||
RUN --mount=type=cache,target=~/.cache/pip; \
|
||||
|
@ -73,14 +73,14 @@ RUN --mount=type=cache,target=~/.cache/pip; \
|
|||
if [ "$install_dev_deps" = "1" ] ; then \
|
||||
grep -Ev 'cryptography|lxml|pillow|psycopg2|watchfiles' /dev-requirements.txt \
|
||||
| pip3 install -r /dev/stdin \
|
||||
cryptography==41.0.7 \
|
||||
lxml==4.9.3 \
|
||||
pillow==10.2.0 \
|
||||
psycopg2==2.9.9 \
|
||||
watchfiles==0.19.0; \
|
||||
cryptography \
|
||||
lxml \
|
||||
pillow \
|
||||
psycopg2 \
|
||||
watchfiles; \
|
||||
fi
|
||||
|
||||
FROM alpine:3.19 AS production
|
||||
FROM alpine:3.21 AS production
|
||||
|
||||
ENV PYTHONDONTWRITEBYTECODE=1
|
||||
ENV PYTHONUNBUFFERED=1
|
||||
|
@ -97,11 +97,11 @@ RUN set -eux; \
|
|||
libpq \
|
||||
libxml2 \
|
||||
libxslt \
|
||||
py3-cryptography=41.0.7-r0 \
|
||||
py3-lxml=4.9.3-r1 \
|
||||
py3-pillow=10.3.0-r0 \
|
||||
py3-psycopg2=2.9.9-r0 \
|
||||
py3-watchfiles=0.19.0-r1 \
|
||||
py3-cryptography \
|
||||
py3-lxml \
|
||||
py3-pillow \
|
||||
py3-psycopg2 \
|
||||
py3-watchfiles \
|
||||
python3 \
|
||||
tzdata
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
FROM python:3.12-slim AS builder
|
||||
FROM python:3.13-slim AS builder
|
||||
|
||||
ARG POETRY_VERSION=1.8
|
||||
|
||||
|
@ -39,7 +39,7 @@ RUN python3 -m venv --system-site-packages ${VIRTUAL_ENV} && . ${VIRTUAL_ENV}/bi
|
|||
RUN --mount=type=cache,target=/opt/.cache \
|
||||
poetry install --no-root --extras typesense
|
||||
|
||||
FROM python:3.12-slim AS runtime
|
||||
FROM python:3.13-slim AS runtime
|
||||
|
||||
ARG POETRY_VERSION=1.8
|
||||
|
||||
|
|
|
@ -299,6 +299,10 @@ def background_task(name):
|
|||
|
||||
|
||||
# HOOKS
|
||||
TRIGGER_THIRD_PARTY_UPLOAD = "third_party_upload"
|
||||
"""
|
||||
Called when a track is being listened
|
||||
"""
|
||||
LISTENING_CREATED = "listening_created"
|
||||
"""
|
||||
Called when a track is being listened
|
||||
|
|
|
@ -114,6 +114,7 @@ else:
|
|||
logger.info("Loaded env file at %s/.env", path)
|
||||
break
|
||||
|
||||
FUNKWHALE_PLUGINS = env("FUNKWHALE_PLUGINS", default="")
|
||||
FUNKWHALE_PLUGINS_PATH = env(
|
||||
"FUNKWHALE_PLUGINS_PATH", default="/srv/funkwhale/plugins/"
|
||||
)
|
||||
|
@ -314,6 +315,7 @@ MIDDLEWARE = (
|
|||
tuple(plugins.trigger_filter(plugins.MIDDLEWARES_BEFORE, [], enabled=True))
|
||||
+ tuple(ADDITIONAL_MIDDLEWARES_BEFORE)
|
||||
+ (
|
||||
"allauth.account.middleware.AccountMiddleware",
|
||||
"django.middleware.security.SecurityMiddleware",
|
||||
"django.middleware.clickjacking.XFrameOptionsMiddleware",
|
||||
"corsheaders.middleware.CorsMiddleware",
|
||||
|
|
|
@ -152,3 +152,6 @@ REST_FRAMEWORK.update(
|
|||
],
|
||||
}
|
||||
)
|
||||
|
||||
# allows makemigrations and superuser creation
|
||||
FORCE = env("FORCE", default=1)
|
||||
|
|
|
@ -2,12 +2,10 @@ from django.conf.urls import include
|
|||
from django.urls import re_path
|
||||
|
||||
from funkwhale_api.common import routers as common_routers
|
||||
from funkwhale_api.music.views import UploadGroupViewSet
|
||||
|
||||
from . import api
|
||||
|
||||
router = common_routers.OptionalSlashRouter()
|
||||
router.register(r"upload-groups", UploadGroupViewSet, "upload-groups")
|
||||
v2_patterns = router.urls
|
||||
|
||||
v2_patterns += [
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
import os
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.management.commands.createsuperuser import (
|
||||
Command as BaseCommand,
|
||||
)
|
||||
|
@ -12,7 +11,8 @@ class Command(BaseCommand):
|
|||
Creating Django Superusers would bypass some of our username checks, which can lead to unexpected behaviour.
|
||||
We therefore prohibit the execution of the command.
|
||||
"""
|
||||
if not os.environ.get("FORCE") == "1":
|
||||
force = settings.FORCE
|
||||
if not force == 1:
|
||||
raise CommandError(
|
||||
"Running createsuperuser on your Funkwhale instance bypasses some of our checks "
|
||||
"which can lead to unexpected behavior of your instance. We therefore suggest to "
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
import os
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.management.base import CommandError
|
||||
from django.core.management.commands.makemigrations import Command as BaseCommand
|
||||
|
||||
|
@ -11,8 +10,8 @@ class Command(BaseCommand):
|
|||
|
||||
We ensure the command is disabled, unless a specific env var is provided.
|
||||
"""
|
||||
force = os.environ.get("FORCE") == "1"
|
||||
if not force:
|
||||
force = settings.FORCE
|
||||
if not force == 1:
|
||||
raise CommandError(
|
||||
"Running makemigrations on your Funkwhale instance can have desastrous"
|
||||
" consequences. This command is disabled, and should only be run in "
|
||||
|
|
|
@ -0,0 +1,13 @@
|
|||
import logging
|
||||
|
||||
from config import plugins
|
||||
from funkwhale_api.contrib.archivedl import tasks
|
||||
|
||||
from .funkwhale_startup import PLUGIN
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@plugins.register_hook(plugins.TRIGGER_THIRD_PARTY_UPLOAD, PLUGIN)
|
||||
def lauch_download(track, conf={}):
|
||||
tasks.archive_download.delay(track_id=track.pk, conf=conf)
|
|
@ -0,0 +1,10 @@
|
|||
from config import plugins
|
||||
|
||||
PLUGIN = plugins.get_plugin_config(
|
||||
name="archivedl",
|
||||
label="Archive-dl",
|
||||
description="",
|
||||
version="0.1",
|
||||
user=False,
|
||||
conf=[],
|
||||
)
|
|
@ -0,0 +1,148 @@
|
|||
import asyncio
|
||||
import hashlib
|
||||
import logging
|
||||
import os
|
||||
import tempfile
|
||||
import urllib.parse
|
||||
|
||||
import requests
|
||||
from django.core.files import File
|
||||
from django.utils import timezone
|
||||
|
||||
from funkwhale_api.federation import actors
|
||||
from funkwhale_api.music import models, utils
|
||||
from funkwhale_api.taskapp import celery
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def create_upload(url, track, files_data):
|
||||
mimetype = f"audio/{files_data.get('format', 'unknown')}"
|
||||
duration = files_data.get("mtime", 0)
|
||||
filesize = files_data.get("size", 0)
|
||||
bitrate = files_data.get("bitrate", 0)
|
||||
|
||||
service_library = models.Library.objects.create(
|
||||
privacy_level="everyone",
|
||||
actor=actors.get_service_actor(),
|
||||
)
|
||||
|
||||
return models.Upload.objects.create(
|
||||
mimetype=mimetype,
|
||||
source=url,
|
||||
third_party_provider="archive-dl",
|
||||
creation_date=timezone.now(),
|
||||
track=track,
|
||||
duration=duration,
|
||||
size=filesize,
|
||||
bitrate=bitrate,
|
||||
library=service_library,
|
||||
from_activity=None,
|
||||
import_status="finished",
|
||||
)
|
||||
|
||||
|
||||
@celery.app.task(name="archivedl.archive_download")
|
||||
@celery.require_instance(models.Track.objects.select_related(), "track")
|
||||
def archive_download(track, conf):
|
||||
artist_name = utils.get_artist_credit_string(track)
|
||||
query = f"mediatype:audio AND title:{track.title} AND creator:{artist_name}"
|
||||
with requests.Session() as session:
|
||||
url = get_search_url(query, page_size=1, page=1)
|
||||
page_data = fetch_json(url, session)
|
||||
for obj in page_data["response"]["docs"]:
|
||||
logger.info(f"launching download item for {str(obj)}")
|
||||
download_item(
|
||||
item_data=obj,
|
||||
session=session,
|
||||
allowed_extensions=utils.SUPPORTED_EXTENSIONS,
|
||||
track=track,
|
||||
)
|
||||
|
||||
|
||||
def fetch_json(url, session):
|
||||
logger.info(f"Fetching {url}...")
|
||||
with session.get(url) as response:
|
||||
return response.json()
|
||||
|
||||
|
||||
def download_item(
|
||||
item_data,
|
||||
session,
|
||||
allowed_extensions,
|
||||
track,
|
||||
):
|
||||
files_data = get_files_data(item_data["identifier"], session)
|
||||
to_download = list(
|
||||
filter_files(
|
||||
files_data["result"],
|
||||
allowed_extensions=allowed_extensions,
|
||||
)
|
||||
)
|
||||
url = f"https://archive.org/download/{item_data['identifier']}/{to_download[0]['name']}"
|
||||
upload = create_upload(url, track, to_download[0])
|
||||
try:
|
||||
with tempfile.TemporaryDirectory() as temp_dir:
|
||||
path = os.path.join(temp_dir, to_download[0]["name"])
|
||||
download_file(
|
||||
path,
|
||||
url=url,
|
||||
session=session,
|
||||
checksum=to_download[0]["sha1"],
|
||||
upload=upload,
|
||||
to_download=to_download,
|
||||
)
|
||||
|
||||
logger.info(f"Finished to download item {item_data['identifier']}...")
|
||||
except Exception as e:
|
||||
upload.delete()
|
||||
raise e
|
||||
|
||||
|
||||
def check_integrity(path, expected_checksum):
|
||||
with open(path, mode="rb") as f:
|
||||
hash = hashlib.sha1()
|
||||
hash.update(f.read())
|
||||
|
||||
return expected_checksum == hash.hexdigest()
|
||||
|
||||
|
||||
def get_files_data(identifier, session):
|
||||
url = f"https://archive.org/metadata/{identifier}/files"
|
||||
logger.info(f"Fetching files data at {url}...")
|
||||
with session.get(url) as response:
|
||||
return response.json()
|
||||
|
||||
|
||||
def download_file(path, url, session, checksum, upload, to_download):
|
||||
if os.path.exists(path) and check_integrity(path, checksum):
|
||||
logger.info(f"Skipping already downloaded file at {path}")
|
||||
return
|
||||
logger.info(f"Downloading file {url}...")
|
||||
with open(path, mode="wb") as f:
|
||||
try:
|
||||
with session.get(url) as response:
|
||||
f.write(response.content)
|
||||
except asyncio.TimeoutError as e:
|
||||
logger.error(f"Timeout error while downloading {url}: {e}")
|
||||
|
||||
with open(path, "rb") as f:
|
||||
upload.audio_file.save(f"{to_download['name']}", File(f))
|
||||
upload.import_status = "finished"
|
||||
upload.url = url
|
||||
upload.save()
|
||||
return upload
|
||||
|
||||
|
||||
def filter_files(files, allowed_extensions):
|
||||
for f in files:
|
||||
if allowed_extensions:
|
||||
extension = os.path.splitext(f["name"])[-1][1:]
|
||||
if extension not in allowed_extensions:
|
||||
continue
|
||||
yield f
|
||||
|
||||
|
||||
def get_search_url(query, page_size, page):
|
||||
q = urllib.parse.urlencode({"q": query})
|
||||
return f"https://archive.org/advancedsearch.php?{q}&sort[]=addeddate+desc&rows={page_size}&page={page}&output=json"
|
|
@ -57,7 +57,7 @@ def import_listenbrainz_listenings(user, user_name, since):
|
|||
new_ts = max(
|
||||
listens,
|
||||
key=lambda obj: datetime.datetime.fromtimestamp(
|
||||
obj.listened_at, timezone.utc
|
||||
obj.listened_at, datetime.timezone.utc
|
||||
),
|
||||
)
|
||||
response = client.get_listens(username=user_name, min_ts=new_ts, count=100)
|
||||
|
@ -74,7 +74,7 @@ def add_lb_listenings_to_db(listens, user):
|
|||
== "Funkwhale ListenBrainz plugin"
|
||||
and history_models.Listening.objects.filter(
|
||||
creation_date=datetime.datetime.fromtimestamp(
|
||||
listen.listened_at, timezone.utc
|
||||
listen.listened_at, datetime.timezone.utc
|
||||
)
|
||||
).exists()
|
||||
):
|
||||
|
@ -103,7 +103,7 @@ def add_lb_listenings_to_db(listens, user):
|
|||
user = user
|
||||
fw_listen = history_models.Listening(
|
||||
creation_date=datetime.datetime.fromtimestamp(
|
||||
listen.listened_at, timezone.utc
|
||||
listen.listened_at, datetime.timezone.utc
|
||||
),
|
||||
track=track,
|
||||
actor=user.actor,
|
||||
|
@ -125,7 +125,7 @@ def import_listenbrainz_favorites(user, user_name, since):
|
|||
last_sync = min(
|
||||
response["feedback"],
|
||||
key=lambda obj: datetime.datetime.fromtimestamp(
|
||||
obj["created"], timezone.utc
|
||||
obj["created"], datetime.timezone.utc
|
||||
),
|
||||
)["created"]
|
||||
add_lb_feedback_to_db(response["feedback"], user)
|
||||
|
@ -149,7 +149,7 @@ def add_lb_feedback_to_db(feedbacks, user):
|
|||
favorites_models.TrackFavorite.objects.get_or_create(
|
||||
actor=user.actor,
|
||||
creation_date=datetime.datetime.fromtimestamp(
|
||||
feedback["created"], timezone.utc
|
||||
feedback["created"], datetime.timezone.utc
|
||||
),
|
||||
track=track,
|
||||
source="Listenbrainz",
|
||||
|
|
|
@ -81,11 +81,12 @@ class SignatureAuthentication(authentication.BaseAuthentication):
|
|||
fetch_delay = 24 * 3600
|
||||
now = timezone.now()
|
||||
last_fetch = actor.domain.nodeinfo_fetch_date
|
||||
if not last_fetch or (
|
||||
last_fetch < (now - datetime.timedelta(seconds=fetch_delay))
|
||||
):
|
||||
tasks.update_domain_nodeinfo(domain_name=actor.domain.name)
|
||||
actor.domain.refresh_from_db()
|
||||
if not actor.domain.is_local:
|
||||
if not last_fetch or (
|
||||
last_fetch < (now - datetime.timedelta(seconds=fetch_delay))
|
||||
):
|
||||
tasks.update_domain_nodeinfo(domain_name=actor.domain.name)
|
||||
actor.domain.refresh_from_db()
|
||||
return actor
|
||||
|
||||
def authenticate(self, request):
|
||||
|
|
|
@ -128,11 +128,6 @@ class ActorFactory(NoUpdateOnCreate, factory.django.DjangoModelFactory):
|
|||
class Meta:
|
||||
model = models.Actor
|
||||
|
||||
class Params:
|
||||
with_real_keys = factory.Trait(
|
||||
keys=factory.LazyFunction(keys.get_key_pair),
|
||||
)
|
||||
|
||||
@factory.post_generation
|
||||
def local(self, create, extracted, **kwargs):
|
||||
if not extracted and not kwargs:
|
||||
|
@ -153,6 +148,26 @@ class ActorFactory(NoUpdateOnCreate, factory.django.DjangoModelFactory):
|
|||
extracted.actor = self
|
||||
extracted.save(update_fields=["user"])
|
||||
else:
|
||||
user = UserFactory(actor=self, **kwargs)
|
||||
user.actor = self
|
||||
user.save()
|
||||
|
||||
@factory.post_generation
|
||||
def user(self, create, extracted, **kwargs):
|
||||
"""
|
||||
Handle the creation or assignment of the related user instance.
|
||||
If `actor__user` is passed, it will be linked; otherwise, no user is created.
|
||||
"""
|
||||
if not create:
|
||||
return
|
||||
|
||||
if extracted: # If a User instance is provided
|
||||
extracted.actor = self
|
||||
extracted.save(update_fields=["actor"])
|
||||
elif kwargs:
|
||||
from funkwhale_api.users.factories import UserFactory
|
||||
|
||||
# Create a User linked to this Actor
|
||||
self.user = UserFactory(actor=self, **kwargs)
|
||||
|
||||
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import logging
|
||||
import os
|
||||
import re
|
||||
import urllib.parse
|
||||
import uuid
|
||||
|
||||
|
@ -1580,6 +1581,50 @@ class TrackSerializer(MusicEntitySerializer):
|
|||
return super().update(obj, validated_data)
|
||||
|
||||
|
||||
def duration_int_to_xml(duration):
|
||||
if not duration:
|
||||
return None
|
||||
|
||||
multipliers = {"S": 1, "M": 60, "H": 3600, "D": 86400}
|
||||
ret = "P"
|
||||
days, seconds = divmod(int(duration), multipliers["D"])
|
||||
ret += f"{days:d}DT" if days > 0 else "T"
|
||||
hours, seconds = divmod(seconds, multipliers["H"])
|
||||
ret += f"{hours:d}H" if hours > 0 else ""
|
||||
minutes, seconds = divmod(seconds, multipliers["M"])
|
||||
ret += f"{minutes:d}M" if minutes > 0 else ""
|
||||
ret += f"{seconds:d}S" if seconds > 0 or ret == "PT" else ""
|
||||
return ret
|
||||
|
||||
|
||||
class DayTimeDurationSerializer(serializers.DurationField):
|
||||
multipliers = {"S": 1, "M": 60, "H": 3600, "D": 86400}
|
||||
|
||||
def to_internal_value(self, value):
|
||||
if isinstance(value, float):
|
||||
return value
|
||||
|
||||
parsed = re.match(
|
||||
r"P([0-9]+D)?T([0-9]+H)?([0-9]+M)?([0-9]+(?:\.[0-9]+)?S)?", str(value)
|
||||
)
|
||||
if parsed is not None:
|
||||
return int(
|
||||
sum(
|
||||
[
|
||||
self.multipliers[s[-1]] * float("0" + s[:-1])
|
||||
for s in parsed.groups()
|
||||
if s is not None
|
||||
]
|
||||
)
|
||||
)
|
||||
self.fail(
|
||||
"invalid", format="https://www.w3.org/TR/xmlschema11-2/#dayTimeDuration"
|
||||
)
|
||||
|
||||
def to_representation(self, value):
|
||||
duration_int_to_xml(value)
|
||||
|
||||
|
||||
class UploadSerializer(jsonld.JsonLdSerializer):
|
||||
type = serializers.ChoiceField(choices=[contexts.AS.Audio])
|
||||
id = serializers.URLField(max_length=500)
|
||||
|
@ -1589,7 +1634,7 @@ class UploadSerializer(jsonld.JsonLdSerializer):
|
|||
updated = serializers.DateTimeField(required=False, allow_null=True)
|
||||
bitrate = serializers.IntegerField(min_value=0)
|
||||
size = serializers.IntegerField(min_value=0)
|
||||
duration = serializers.IntegerField(min_value=0)
|
||||
duration = DayTimeDurationSerializer(min_value=0)
|
||||
|
||||
track = TrackSerializer(required=True)
|
||||
|
||||
|
@ -1701,7 +1746,7 @@ class UploadSerializer(jsonld.JsonLdSerializer):
|
|||
"published": instance.creation_date.isoformat(),
|
||||
"bitrate": instance.bitrate,
|
||||
"size": instance.size,
|
||||
"duration": instance.duration,
|
||||
"duration": duration_int_to_xml(instance.duration),
|
||||
"url": [
|
||||
{
|
||||
"href": utils.full_url(instance.listen_url_no_download),
|
||||
|
@ -1851,7 +1896,7 @@ class ChannelUploadSerializer(jsonld.JsonLdSerializer):
|
|||
url = LinkListSerializer(keep_mediatype=["audio/*"], min_length=1)
|
||||
name = serializers.CharField()
|
||||
published = serializers.DateTimeField(required=False)
|
||||
duration = serializers.IntegerField(min_value=0, required=False)
|
||||
duration = DayTimeDurationSerializer(required=False)
|
||||
position = serializers.IntegerField(min_value=0, allow_null=True, required=False)
|
||||
disc = serializers.IntegerField(min_value=1, allow_null=True, required=False)
|
||||
album = serializers.URLField(max_length=500, required=False)
|
||||
|
@ -1960,7 +2005,7 @@ class ChannelUploadSerializer(jsonld.JsonLdSerializer):
|
|||
if upload.track.local_license:
|
||||
data["license"] = upload.track.local_license["identifiers"][0]
|
||||
|
||||
include_if_not_none(data, upload.duration, "duration")
|
||||
include_if_not_none(data, duration_int_to_xml(upload.duration), "duration")
|
||||
include_if_not_none(data, upload.track.position, "position")
|
||||
include_if_not_none(data, upload.track.disc_number, "disc")
|
||||
include_if_not_none(data, upload.track.copyright, "copyright")
|
||||
|
|
|
@ -30,7 +30,7 @@ def verify_date(raw_date):
|
|||
ts = parse_http_date(raw_date)
|
||||
except ValueError as e:
|
||||
raise forms.ValidationError(str(e))
|
||||
dt = datetime.datetime.utcfromtimestamp(ts)
|
||||
dt = datetime.datetime.fromtimestamp(ts, datetime.timezone.utc)
|
||||
dt = dt.replace(tzinfo=ZoneInfo("UTC"))
|
||||
delta = datetime.timedelta(seconds=DATE_HEADER_VALID_FOR)
|
||||
now = timezone.now()
|
||||
|
|
|
@ -70,7 +70,6 @@ class UploadAdmin(admin.ModelAdmin):
|
|||
"size",
|
||||
"bitrate",
|
||||
"import_status",
|
||||
"upload_group",
|
||||
]
|
||||
list_select_related = ["track"]
|
||||
search_fields = [
|
||||
|
@ -83,11 +82,6 @@ class UploadAdmin(admin.ModelAdmin):
|
|||
list_filter = ["mimetype", "import_status", "library__privacy_level"]
|
||||
|
||||
|
||||
@admin.register(models.UploadGroup)
|
||||
class UploadGroupAdmin(admin.ModelAdmin):
|
||||
pass
|
||||
|
||||
|
||||
@admin.register(models.UploadVersion)
|
||||
class UploadVersionAdmin(admin.ModelAdmin):
|
||||
list_display = [
|
||||
|
|
|
@ -202,15 +202,6 @@ class TrackFactory(
|
|||
license = factory.PostGeneration(_license_post_generation)
|
||||
|
||||
|
||||
@registry.register
|
||||
class UploadGroupFactory(NoUpdateOnCreate, factory.django.DjangoModelFactory):
|
||||
name = factory.Faker("name")
|
||||
owner = factory.SubFactory(federation_factories.ActorFactory)
|
||||
|
||||
class Meta:
|
||||
model = "music.UploadGroup"
|
||||
|
||||
|
||||
@registry.register
|
||||
class UploadFactory(NoUpdateOnCreate, factory.django.DjangoModelFactory):
|
||||
fid = factory.Faker("federation_url")
|
||||
|
@ -219,7 +210,6 @@ class UploadFactory(NoUpdateOnCreate, factory.django.DjangoModelFactory):
|
|||
audio_file = factory.django.FileField(
|
||||
from_path=os.path.join(SAMPLES_PATH, "test.ogg")
|
||||
)
|
||||
upload_group = factory.RelatedFactory(UploadGroupFactory)
|
||||
|
||||
bitrate = None
|
||||
size = None
|
||||
|
|
|
@ -5,7 +5,9 @@ Populates the database with fake data
|
|||
import logging
|
||||
import random
|
||||
|
||||
from funkwhale_api.audio import factories as audio_factories
|
||||
from funkwhale_api.cli import users
|
||||
from funkwhale_api.favorites import factories as favorites_factories
|
||||
from funkwhale_api.federation import factories as federation_factories
|
||||
from funkwhale_api.history import factories as history_factories
|
||||
from funkwhale_api.music import factories as music_factories
|
||||
|
@ -15,7 +17,7 @@ from funkwhale_api.users import models, serializers
|
|||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def create_data(count=2, super_user_name=None):
|
||||
def create_data(super_user_name=None):
|
||||
super_user = None
|
||||
if super_user_name:
|
||||
try:
|
||||
|
@ -35,7 +37,7 @@ def create_data(count=2, super_user_name=None):
|
|||
in errors[0]
|
||||
):
|
||||
print(
|
||||
f"Superuser {super_user_name} already in db. Skipping fake-data creation"
|
||||
f"Superuser {super_user_name} already in db. Skipping superuser creation"
|
||||
)
|
||||
super_user = models.User.objects.get(username=super_user_name)
|
||||
continue
|
||||
|
@ -43,35 +45,103 @@ def create_data(count=2, super_user_name=None):
|
|||
raise e
|
||||
print(f"Superuser with username {super_user_name} and password `funkwhale`")
|
||||
|
||||
library = federation_factories.MusicLibraryFactory(
|
||||
actor=(
|
||||
super_user.actor if super_user else federation_factories.ActorFactory()
|
||||
),
|
||||
local=True,
|
||||
library = federation_factories.MusicLibraryFactory(
|
||||
actor=(super_user.actor if super_user else federation_factories.ActorFactory()),
|
||||
local=True,
|
||||
)
|
||||
uploads = music_factories.UploadFactory.create_batch(
|
||||
size=random.randint(3, 18),
|
||||
playable=True,
|
||||
library=library,
|
||||
local=True,
|
||||
)
|
||||
for upload in uploads[:2]:
|
||||
history_factories.ListeningFactory(
|
||||
track=upload.track, actor=upload.library.actor
|
||||
)
|
||||
uploads = music_factories.UploadFactory.create_batch(
|
||||
size=random.randint(3, 18),
|
||||
playable=True,
|
||||
library=library,
|
||||
local=True,
|
||||
favorites_factories.TrackFavorite(
|
||||
track=upload.track, actor=upload.library.actor
|
||||
)
|
||||
for upload in uploads:
|
||||
history_factories.ListeningFactory(
|
||||
track=upload.track, actor=upload.library.actor
|
||||
)
|
||||
print("Created fid", upload.track.fid)
|
||||
|
||||
playlist = playlist_factories.PlaylistFactory(
|
||||
name="playlist test public",
|
||||
privacy_level="everyone",
|
||||
actor=(
|
||||
super_user.actor if super_user else federation_factories.ActorFactory()
|
||||
),
|
||||
)
|
||||
playlist_factories.PlaylistTrackFactory(playlist=playlist, track=upload.track)
|
||||
federation_factories.LibraryFollowFactory.create_batch(
|
||||
size=random.randint(3, 18), actor=super_user.actor
|
||||
)
|
||||
print("Created fid", upload.track.fid)
|
||||
|
||||
playlist = playlist_factories.PlaylistFactory(
|
||||
name="playlist test public",
|
||||
privacy_level="everyone",
|
||||
actor=(super_user.actor if super_user else federation_factories.ActorFactory()),
|
||||
)
|
||||
playlist_factories.PlaylistTrackFactory(playlist=playlist, track=upload.track)
|
||||
federation_factories.LibraryFollowFactory.create_batch(
|
||||
size=random.randint(3, 18), actor=super_user.actor
|
||||
)
|
||||
|
||||
# my podcast
|
||||
my_podcast_library = federation_factories.MusicLibraryFactory(
|
||||
actor=(super_user.actor if super_user else federation_factories.ActorFactory()),
|
||||
local=True,
|
||||
)
|
||||
my_podcast_channel = audio_factories.ChannelFactory(
|
||||
library=my_podcast_library,
|
||||
attributed_to=super_user.actor,
|
||||
artist__content_category="podcast",
|
||||
)
|
||||
my_podcast_channel_serie = music_factories.AlbumFactory(
|
||||
artist_credit__artist=my_podcast_channel.artist
|
||||
)
|
||||
music_factories.TrackFactory.create_batch(
|
||||
size=random.randint(3, 6),
|
||||
artist_credit__artist=my_podcast_channel.artist,
|
||||
album=my_podcast_channel_serie,
|
||||
)
|
||||
|
||||
# podcast
|
||||
podcast_channel = audio_factories.ChannelFactory(artist__content_category="podcast")
|
||||
podcast_channel_serie = music_factories.AlbumFactory(
|
||||
artist_credit__artist=podcast_channel.artist
|
||||
)
|
||||
music_factories.TrackFactory.create_batch(
|
||||
size=random.randint(3, 6),
|
||||
artist_credit__artist=podcast_channel.artist,
|
||||
album=podcast_channel_serie,
|
||||
)
|
||||
|
||||
audio_factories.SubscriptionFactory(
|
||||
approved=True, target=podcast_channel.actor, actor=super_user.actor
|
||||
)
|
||||
|
||||
# my artist channel
|
||||
my_artist_library = federation_factories.MusicLibraryFactory(
|
||||
actor=(super_user.actor if super_user else federation_factories.ActorFactory()),
|
||||
local=True,
|
||||
)
|
||||
my_artist_channel = audio_factories.ChannelFactory(
|
||||
library=my_artist_library,
|
||||
attributed_to=super_user.actor,
|
||||
artist__content_category="music",
|
||||
)
|
||||
my_artist_channel_serie = music_factories.AlbumFactory(
|
||||
artist_credit__artist=my_artist_channel.artist
|
||||
)
|
||||
music_factories.TrackFactory.create_batch(
|
||||
size=random.randint(3, 6),
|
||||
artist_credit__artist=my_artist_channel.artist,
|
||||
album=my_artist_channel_serie,
|
||||
)
|
||||
|
||||
# artist channel
|
||||
artist_channel = audio_factories.ChannelFactory(artist__content_category="artist")
|
||||
artist_channel_serie = music_factories.AlbumFactory(
|
||||
artist_credit__artist=artist_channel.artist
|
||||
)
|
||||
music_factories.TrackFactory.create_batch(
|
||||
size=random.randint(3, 6),
|
||||
artist_credit__artist=artist_channel.artist,
|
||||
album=artist_channel_serie,
|
||||
)
|
||||
|
||||
audio_factories.SubscriptionFactory(
|
||||
approved=True, target=artist_channel.actor, actor=super_user.actor
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
|
|
@ -16,7 +16,7 @@ from . import models, utils
|
|||
def filter_tags(queryset, name, value):
|
||||
non_empty_tags = [v.lower() for v in value if v]
|
||||
for tag in non_empty_tags:
|
||||
queryset = queryset.filter(tagged_items__tag__name=tag).distinct()
|
||||
queryset = queryset.filter(tagged_items__tag__name__iexact=tag).distinct()
|
||||
return queryset
|
||||
|
||||
|
||||
|
|
|
@ -630,7 +630,7 @@ def process_load_queue(stdout, **kwargs):
|
|||
for path, event in batched_events.copy().items():
|
||||
if time.time() - event["time"] <= flush_delay:
|
||||
continue
|
||||
now = datetime.datetime.utcnow()
|
||||
now = datetime.datetime.now(datetime.timezone.utc)
|
||||
stdout.write(
|
||||
"{} -- Processing {}:{}...\n".format(
|
||||
now.strftime("%Y/%m/%d %H:%M:%S"), event["type"], event["path"]
|
||||
|
|
|
@ -4,6 +4,7 @@ import logging
|
|||
from collections.abc import Mapping
|
||||
|
||||
import arrow
|
||||
import magic
|
||||
import mutagen._util
|
||||
import mutagen.flac
|
||||
import mutagen.oggtheora
|
||||
|
@ -131,6 +132,28 @@ def clean_flac_pictures(apic):
|
|||
return pictures
|
||||
|
||||
|
||||
def clean_ogg_coverart(metadata_block_picture):
|
||||
pictures = []
|
||||
for b64_data in [metadata_block_picture]:
|
||||
try:
|
||||
data = base64.b64decode(b64_data)
|
||||
except (TypeError, ValueError):
|
||||
continue
|
||||
|
||||
mime = magic.Magic(mime=True)
|
||||
mime.from_buffer(data)
|
||||
|
||||
pictures.append(
|
||||
{
|
||||
"mimetype": mime.from_buffer(data),
|
||||
"content": data,
|
||||
"description": "",
|
||||
"type": mutagen.id3.PictureType.COVER_FRONT,
|
||||
}
|
||||
)
|
||||
return pictures
|
||||
|
||||
|
||||
def clean_ogg_pictures(metadata_block_picture):
|
||||
pictures = []
|
||||
for b64_data in [metadata_block_picture]:
|
||||
|
@ -196,10 +219,16 @@ CONF = {
|
|||
"license": {},
|
||||
"copyright": {},
|
||||
"genre": {},
|
||||
"pictures": {
|
||||
"field": "metadata_block_picture",
|
||||
"to_application": clean_ogg_pictures,
|
||||
},
|
||||
"pictures": [
|
||||
{
|
||||
"field": "metadata_block_picture",
|
||||
"to_application": clean_ogg_pictures,
|
||||
},
|
||||
{
|
||||
"field": "coverart",
|
||||
"to_application": clean_ogg_coverart,
|
||||
},
|
||||
],
|
||||
"comment": {"field": "comment"},
|
||||
},
|
||||
},
|
||||
|
@ -221,10 +250,16 @@ CONF = {
|
|||
"license": {},
|
||||
"copyright": {},
|
||||
"genre": {},
|
||||
"pictures": {
|
||||
"field": "metadata_block_picture",
|
||||
"to_application": clean_ogg_pictures,
|
||||
},
|
||||
"pictures": [
|
||||
{
|
||||
"field": "metadata_block_picture",
|
||||
"to_application": clean_ogg_pictures,
|
||||
},
|
||||
{
|
||||
"field": "coverart",
|
||||
"to_application": clean_ogg_coverart,
|
||||
},
|
||||
],
|
||||
"comment": {"field": "comment"},
|
||||
},
|
||||
},
|
||||
|
@ -415,25 +450,30 @@ class Metadata(Mapping):
|
|||
|
||||
def _get_from_self(self, key, default=NODEFAULT):
|
||||
try:
|
||||
field_conf = self._conf["fields"][key]
|
||||
field_confs = self._conf["fields"][key]
|
||||
except KeyError:
|
||||
raise UnsupportedTag(f"{key} is not supported for this file format")
|
||||
real_key = field_conf.get("field", key)
|
||||
try:
|
||||
getter = field_conf.get("getter", self._conf["getter"])
|
||||
v = getter(self._file, real_key)
|
||||
except KeyError:
|
||||
if default == NODEFAULT:
|
||||
raise TagNotFound(real_key)
|
||||
return default
|
||||
if not isinstance(field_confs, list):
|
||||
field_confs = [field_confs]
|
||||
|
||||
converter = field_conf.get("to_application")
|
||||
if converter:
|
||||
v = converter(v)
|
||||
field = VALIDATION.get(key)
|
||||
if field:
|
||||
v = field.to_python(v)
|
||||
return v
|
||||
for field_conf in field_confs:
|
||||
real_key = field_conf.get("field", key)
|
||||
try:
|
||||
getter = field_conf.get("getter", self._conf["getter"])
|
||||
v = getter(self._file, real_key)
|
||||
except KeyError:
|
||||
continue
|
||||
|
||||
converter = field_conf.get("to_application")
|
||||
if converter:
|
||||
v = converter(v)
|
||||
field = VALIDATION.get(key)
|
||||
if field:
|
||||
v = field.to_python(v)
|
||||
return v
|
||||
if default == NODEFAULT:
|
||||
raise TagNotFound(real_key)
|
||||
return default
|
||||
|
||||
def get_picture(self, *picture_types):
|
||||
if not picture_types:
|
||||
|
|
|
@ -0,0 +1,21 @@
|
|||
# Generated by Django 4.2.9 on 2025-01-03 20:43
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("music", "0059_remove_album_artist_remove_track_artist_artistcredit_and_more"),
|
||||
("playlists", "0007_alter_playlist_actor_alter_playlisttrack_uuid_and_more"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name="library",
|
||||
name="description",
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name="library",
|
||||
name="followers_url",
|
||||
),
|
||||
]
|
|
@ -0,0 +1,108 @@
|
|||
# Generated by Django 4.2.9 on 2025-01-03 16:12
|
||||
|
||||
from django.db import migrations, models
|
||||
from django.db import IntegrityError
|
||||
|
||||
from funkwhale_api.federation import utils as federation_utils
|
||||
from django.urls import reverse
|
||||
import uuid
|
||||
|
||||
|
||||
def insert_tracks_to_playlist(apps, playlist, uploads):
|
||||
PlaylistTrack = apps.get_model("playlists", "PlaylistTrack")
|
||||
plts = [
|
||||
PlaylistTrack(
|
||||
creation_date=playlist.creation_date,
|
||||
playlist=playlist,
|
||||
track=upload.track,
|
||||
index=0 + i,
|
||||
uuid=(new_uuid := uuid.uuid4()),
|
||||
fid=federation_utils.full_url(
|
||||
reverse(
|
||||
f"federation:music:playlists-detail",
|
||||
kwargs={"uuid": new_uuid},
|
||||
)
|
||||
),
|
||||
)
|
||||
for i, upload in enumerate(uploads)
|
||||
if upload.track
|
||||
]
|
||||
|
||||
return PlaylistTrack.objects.bulk_create(plts)
|
||||
|
||||
|
||||
def migrate_libraries_to_playlist(apps, schema_editor):
|
||||
Playlist = apps.get_model("playlists", "Playlist")
|
||||
Library = apps.get_model("music", "Library")
|
||||
LibraryFollow = apps.get_model("federation", "LibraryFollow")
|
||||
Follow = apps.get_model("federation", "Follow")
|
||||
User = apps.get_model("users", "User")
|
||||
Actor = apps.get_model("federation", "Actor")
|
||||
|
||||
# library to playlist
|
||||
for library in Library.objects.all():
|
||||
playlist = Playlist.objects.create(
|
||||
name=library.name,
|
||||
actor=library.actor,
|
||||
creation_date=library.creation_date,
|
||||
privacy_level=library.privacy_level,
|
||||
uuid=(new_uuid := uuid.uuid4()),
|
||||
fid=federation_utils.full_url(
|
||||
reverse(
|
||||
f"federation:music:playlists-detail",
|
||||
kwargs={"uuid": new_uuid},
|
||||
)
|
||||
),
|
||||
)
|
||||
playlist.save()
|
||||
|
||||
if library.uploads.all().exists():
|
||||
insert_tracks_to_playlist(apps, playlist, library.uploads.all())
|
||||
|
||||
# library follows to user follow
|
||||
for lib_follow in LibraryFollow.objects.filter(target=library):
|
||||
try:
|
||||
Follow.objects.create(
|
||||
uuid=lib_follow.uuid,
|
||||
target=library.actor,
|
||||
actor=lib_follow.actor,
|
||||
approved=lib_follow.approved,
|
||||
creation_date=lib_follow.creation_date,
|
||||
modification_date=lib_follow.modification_date,
|
||||
)
|
||||
except IntegrityError:
|
||||
pass
|
||||
|
||||
LibraryFollow.objects.all().delete()
|
||||
|
||||
# migrate uploads to new library
|
||||
for actor in Actor.objects.all():
|
||||
privacy_levels = ["me", "instance", "everyone"]
|
||||
for privacy_level in privacy_levels:
|
||||
build_in_lib = Library.objects.create(
|
||||
actor=actor,
|
||||
privacy_level=privacy_level,
|
||||
name=privacy_level,
|
||||
uuid=(new_uuid := uuid.uuid4()),
|
||||
fid=federation_utils.full_url(
|
||||
reverse(
|
||||
f"federation:music:playlists-detail",
|
||||
kwargs={"uuid": new_uuid},
|
||||
)
|
||||
),
|
||||
)
|
||||
for library in actor.libraries.filter(privacy_level=privacy_level):
|
||||
library.uploads.all().update(library=build_in_lib)
|
||||
if library.pk is not build_in_lib.pk:
|
||||
library.delete()
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("music", "0060_remove_library_description_and_more"),
|
||||
]
|
||||
operations = [
|
||||
migrations.RunPython(
|
||||
migrate_libraries_to_playlist, reverse_code=migrations.RunPython.noop
|
||||
),
|
||||
]
|
|
@ -0,0 +1,42 @@
|
|||
# Generated by Django 4.2.9 on 2024-12-21 20:02
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("music", "0061_migrate_libraries_to_playlist"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="upload",
|
||||
name="third_party_provider",
|
||||
field=models.CharField(blank=True, max_length=100, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="uploadversion",
|
||||
name="mimetype",
|
||||
field=models.CharField(
|
||||
choices=[
|
||||
("audio/mp3", "mp3"),
|
||||
("audio/mpeg3", "mp3"),
|
||||
("audio/x-mp3", "mp3"),
|
||||
("audio/mpeg", "mp3"),
|
||||
("video/ogg", "ogg"),
|
||||
("audio/ogg", "ogg"),
|
||||
("audio/opus", "opus"),
|
||||
("audio/x-m4a", "aac"),
|
||||
("audio/x-m4a", "m4a"),
|
||||
("audio/m4a", "m4a"),
|
||||
("audio/x-flac", "flac"),
|
||||
("audio/flac", "flac"),
|
||||
("audio/aiff", "aif"),
|
||||
("audio/x-aiff", "aif"),
|
||||
("audio/aiff", "aiff"),
|
||||
("audio/x-aiff", "aiff"),
|
||||
],
|
||||
max_length=50,
|
||||
),
|
||||
),
|
||||
]
|
|
@ -7,7 +7,7 @@ import urllib.parse
|
|||
import uuid
|
||||
|
||||
import arrow
|
||||
import pydub
|
||||
import slugify
|
||||
from django.conf import settings
|
||||
from django.contrib.contenttypes.fields import GenericRelation
|
||||
from django.contrib.postgres.indexes import GinIndex
|
||||
|
@ -24,6 +24,7 @@ from django.dispatch import receiver
|
|||
from django.urls import reverse
|
||||
from django.utils import timezone
|
||||
|
||||
from config import plugins
|
||||
from funkwhale_api import musicbrainz
|
||||
from funkwhale_api.common import fields
|
||||
from funkwhale_api.common import models as common_models
|
||||
|
@ -522,10 +523,19 @@ class TrackQuerySet(common_models.LocalFromFidQuerySet, models.QuerySet):
|
|||
|
||||
def with_playable_uploads(self, actor):
|
||||
uploads = Upload.objects.playable_by(actor)
|
||||
return self.prefetch_related(
|
||||
queryset = self.prefetch_related(
|
||||
models.Prefetch("uploads", queryset=uploads, to_attr="playable_uploads")
|
||||
)
|
||||
|
||||
if queryset and queryset[0].uploads.count() > 0:
|
||||
return queryset
|
||||
else:
|
||||
plugins.trigger_hook(
|
||||
plugins.TRIGGER_THIRD_PARTY_UPLOAD,
|
||||
track=self.first(),
|
||||
)
|
||||
return queryset
|
||||
|
||||
def order_for_album(self):
|
||||
"""
|
||||
Order by disc number then position
|
||||
|
@ -709,7 +719,7 @@ class Track(APIModelMixin):
|
|||
@property
|
||||
def listen_url(self) -> str:
|
||||
# Not using reverse because this is slow
|
||||
return f"/api/v1/listen/{self.uuid}/"
|
||||
return f"/api/v2/listen/{self.uuid}/"
|
||||
|
||||
@property
|
||||
def local_license(self):
|
||||
|
@ -766,11 +776,16 @@ TRACK_FILE_IMPORT_STATUS_CHOICES = (
|
|||
|
||||
|
||||
def get_file_path(instance, filename):
|
||||
# Convert unicode characters in name to ASCII characters.
|
||||
filename = slugify.slugify(filename, ok=slugify.SLUG_OK + ".", only_ascii=True)
|
||||
|
||||
if isinstance(instance, UploadVersion):
|
||||
return common_utils.ChunkedPath("transcoded")(instance, filename)
|
||||
|
||||
if instance.library.actor.get_user():
|
||||
return common_utils.ChunkedPath("tracks")(instance, filename)
|
||||
elif instance.third_party_provider:
|
||||
return common_utils.ChunkedPath("third_party_tracks")(instance, filename)
|
||||
else:
|
||||
# we cache remote tracks in a different directory
|
||||
return common_utils.ChunkedPath("federation_cache/tracks")(instance, filename)
|
||||
|
@ -811,9 +826,6 @@ class Upload(models.Model):
|
|||
related_name="uploads",
|
||||
on_delete=models.CASCADE,
|
||||
)
|
||||
upload_group = models.ForeignKey(
|
||||
"UploadGroup", related_name="uploads", on_delete=models.CASCADE, null=True
|
||||
)
|
||||
|
||||
# metadata from federation
|
||||
metadata = JSONField(
|
||||
|
@ -830,7 +842,6 @@ class Upload(models.Model):
|
|||
)
|
||||
# a short reference provided by the client to group multiple files
|
||||
# in the same import
|
||||
# TODO DEPRECATED This can be removed when APIv1 gets removed or fully replace by import_group.name
|
||||
import_reference = models.CharField(max_length=50, default=get_import_reference)
|
||||
|
||||
# optional metadata about import results (error messages, etc.)
|
||||
|
@ -846,6 +857,9 @@ class Upload(models.Model):
|
|||
checksum = models.CharField(max_length=100, db_index=True, null=True, blank=True)
|
||||
|
||||
quality = models.IntegerField(choices=quality_choices, default=1)
|
||||
|
||||
third_party_provider = models.CharField(max_length=100, null=True, blank=True)
|
||||
|
||||
objects = UploadQuerySet.as_manager()
|
||||
|
||||
@property
|
||||
|
@ -928,6 +942,12 @@ class Upload(models.Model):
|
|||
if self.source and self.source.startswith("file://"):
|
||||
return open(self.source.replace("file://", "", 1), "rb")
|
||||
|
||||
def get_audio_file_path(self):
|
||||
if self.audio_file:
|
||||
return self.audio_file.path
|
||||
if self.source and self.source.startswith("file://"):
|
||||
return self.source.replace("file://", "", 1)
|
||||
|
||||
def get_audio_data(self):
|
||||
audio_file = self.get_audio_file()
|
||||
if not audio_file:
|
||||
|
@ -941,14 +961,6 @@ class Upload(models.Model):
|
|||
"size": self.get_file_size(),
|
||||
}
|
||||
|
||||
def get_audio_segment(self):
|
||||
input = self.get_audio_file()
|
||||
if not input:
|
||||
return
|
||||
|
||||
audio = pydub.AudioSegment.from_file(input)
|
||||
return audio
|
||||
|
||||
def get_quality(self):
|
||||
extension_to_mimetypes = utils.get_extension_to_mimetype_dict()
|
||||
|
||||
|
@ -1072,8 +1084,8 @@ class Upload(models.Model):
|
|||
)
|
||||
version.audio_file.save(new_name, f)
|
||||
utils.transcode_audio(
|
||||
audio=self.get_audio_segment(),
|
||||
output=version.audio_file,
|
||||
audio_file_path=self.get_audio_file_path(),
|
||||
output_path=version.audio_file.path,
|
||||
output_format=utils.MIMETYPE_TO_EXTENSION[mimetype],
|
||||
bitrate=str(bitrate),
|
||||
)
|
||||
|
@ -1562,28 +1574,3 @@ def update_request_status(sender, instance, created, **kwargs):
|
|||
# let's mark the request as imported since the import is over
|
||||
instance.import_request.status = "imported"
|
||||
return instance.import_request.save(update_fields=["status"])
|
||||
|
||||
|
||||
class UploadGroup(models.Model):
|
||||
"""
|
||||
Upload groups are supposed to bundle uploads in order to make it easier to keep an overview
|
||||
|
||||
Attributes
|
||||
----------
|
||||
name A name that can be selected by the user
|
||||
guid A globally unique identifier to reference the group
|
||||
"""
|
||||
|
||||
name = models.CharField(max_length=255, default=datetime.datetime.now)
|
||||
guid = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
|
||||
owner = models.ForeignKey(
|
||||
"federation.Actor", on_delete=models.CASCADE, related_name="upload_groups"
|
||||
)
|
||||
created_at = models.DateTimeField(default=timezone.now)
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
@property
|
||||
def upload_url(self):
|
||||
return f"{settings.FUNKWHALE_URL}/api/v2/upload-groups/{self.guid}/uploads"
|
||||
|
|
|
@ -907,107 +907,3 @@ class SearchResultSerializer(serializers.Serializer):
|
|||
tracks = TrackSerializer(many=True)
|
||||
albums = AlbumSerializer(many=True)
|
||||
tags = tags_serializers.TagSerializer(many=True)
|
||||
|
||||
|
||||
class UploadGroupSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = models.UploadGroup
|
||||
fields = ["guid", "name", "createdAt", "uploadUrl"]
|
||||
|
||||
name = serializers.CharField(required=False)
|
||||
uploadUrl = serializers.URLField(read_only=True, source="upload_url")
|
||||
createdAt = serializers.DateTimeField(read_only=True, source="created_at")
|
||||
|
||||
|
||||
class UploadGroupUploadMetadataReleaseSerializer(serializers.Serializer):
|
||||
title = serializers.CharField()
|
||||
artist = serializers.CharField()
|
||||
mbid = serializers.UUIDField(required=False)
|
||||
|
||||
|
||||
class UploadGroupUploadMetadataArtistSerializer(serializers.Serializer):
|
||||
name = serializers.CharField()
|
||||
mbid = serializers.UUIDField(required=False)
|
||||
|
||||
|
||||
class UploadGroupUploadMetadataSerializer(serializers.Serializer):
|
||||
title = serializers.CharField()
|
||||
mbid = serializers.UUIDField(required=False)
|
||||
tags = serializers.ListField(child=serializers.CharField(), required=False)
|
||||
position = serializers.IntegerField(required=False)
|
||||
entryNumber = serializers.IntegerField(required=False)
|
||||
releaseDate = serializers.DateField(required=False)
|
||||
license = serializers.URLField(required=False)
|
||||
release = UploadGroupUploadMetadataReleaseSerializer(required=False)
|
||||
artist = UploadGroupUploadMetadataArtistSerializer(required=False)
|
||||
|
||||
|
||||
class TargetSerializer(serializers.Serializer):
|
||||
library = serializers.UUIDField(required=False)
|
||||
collections = serializers.ListField(child=serializers.UUIDField(), required=False)
|
||||
channels = serializers.ListField(child=serializers.UUIDField(), required=False)
|
||||
|
||||
def validate(self, data):
|
||||
# At the moment we allow to set exactly one target, it can be either a library or a channel.
|
||||
# The structure already allows setting multiple targets in the future, however this is disabled for now.
|
||||
if "channels" in data and "library" in data:
|
||||
raise serializers.ValidationError
|
||||
if "channels" not in data and "library" not in data:
|
||||
raise serializers.ValidationError
|
||||
if "collections" in data:
|
||||
raise serializers.ValidationError("Not yet implemented")
|
||||
try:
|
||||
if len(data.channels) > 1:
|
||||
raise serializers.ValidationError
|
||||
except AttributeError:
|
||||
pass
|
||||
return data
|
||||
|
||||
|
||||
class UploadGroupUploadSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = models.Upload
|
||||
fields = [
|
||||
"audioFile",
|
||||
"target",
|
||||
"metadata",
|
||||
] # , "cover"] TODO we need to process the cover
|
||||
|
||||
metadata = serializers.JSONField(source="import_metadata")
|
||||
target = serializers.JSONField()
|
||||
audioFile = serializers.FileField(source="audio_file")
|
||||
# cover = serializers.FileField(required=False)
|
||||
|
||||
def validate_target(self, value):
|
||||
serializer = TargetSerializer(data=value)
|
||||
if serializer.is_valid():
|
||||
return serializer.validated_data
|
||||
else:
|
||||
print(serializer.errors)
|
||||
raise serializers.ValidationError
|
||||
|
||||
def validate_metadata(self, value):
|
||||
serializer = UploadGroupUploadMetadataSerializer(data=value)
|
||||
if serializer.is_valid():
|
||||
return serializer.validated_data
|
||||
else:
|
||||
print(serializer.errors)
|
||||
raise serializers.ValidationError
|
||||
|
||||
def create(self, validated_data):
|
||||
library = models.Library.objects.get(uuid=validated_data["target"]["library"])
|
||||
del validated_data["target"]
|
||||
return models.Upload.objects.create(
|
||||
library=library, source="upload://test", **validated_data
|
||||
)
|
||||
|
||||
|
||||
class BaseUploadSerializer(serializers.ModelSerializer):
|
||||
class Meta:
|
||||
model = models.Upload
|
||||
fields = ["guid", "createdDate", "uploadGroup", "status"]
|
||||
|
||||
guid = serializers.UUIDField(source="uuid")
|
||||
createdDate = serializers.DateTimeField(source="creation_date")
|
||||
uploadGroup = serializers.UUIDField(source="upload_group.guid")
|
||||
status = serializers.CharField(source="import_status")
|
||||
|
|
|
@ -176,7 +176,7 @@ def fail_import(upload, error_code, detail=None, **fields):
|
|||
upload.import_metadata, "funkwhale", "config", "broadcast", default=True
|
||||
)
|
||||
if broadcast:
|
||||
signals.upload_import_status_updated.send(
|
||||
signals.upload_import_status_updated.send_robust(
|
||||
old_status=old_status,
|
||||
new_status=upload.import_status,
|
||||
upload=upload,
|
||||
|
@ -297,7 +297,7 @@ def process_upload(upload, update_denormalization=True):
|
|||
update_fields=["import_details", "import_status", "import_date", "track"]
|
||||
)
|
||||
if broadcast:
|
||||
signals.upload_import_status_updated.send(
|
||||
signals.upload_import_status_updated.send_robust(
|
||||
old_status=old_status,
|
||||
new_status=upload.import_status,
|
||||
upload=upload,
|
||||
|
@ -341,7 +341,7 @@ def process_upload(upload, update_denormalization=True):
|
|||
)
|
||||
|
||||
if broadcast:
|
||||
signals.upload_import_status_updated.send(
|
||||
signals.upload_import_status_updated.send_robust(
|
||||
old_status=old_status,
|
||||
new_status=upload.import_status,
|
||||
upload=upload,
|
||||
|
@ -993,7 +993,7 @@ def albums_set_tags_from_tracks(ids=None, dry_run=False):
|
|||
data = tags_tasks.get_tags_from_foreign_key(
|
||||
ids=qs,
|
||||
foreign_key_model=models.Track,
|
||||
foreign_key_attr="album",
|
||||
foreign_key_attr="albums",
|
||||
)
|
||||
logger.info("Found automatic tags for %s albums…", len(data))
|
||||
if dry_run:
|
||||
|
|
|
@ -4,10 +4,10 @@ import pathlib
|
|||
|
||||
import magic
|
||||
import mutagen
|
||||
import pydub
|
||||
from django.conf import settings
|
||||
from django.core.cache import cache
|
||||
from django.db.models import F
|
||||
from ffmpeg import FFmpeg
|
||||
|
||||
from funkwhale_api.common import throttling
|
||||
from funkwhale_api.common.search import get_fts_query # noqa
|
||||
|
@ -56,6 +56,7 @@ AUDIO_EXTENSIONS_AND_MIMETYPE = [
|
|||
("opus", "audio/opus"),
|
||||
("aac", "audio/x-m4a"),
|
||||
("m4a", "audio/x-m4a"),
|
||||
("m4a", "audio/m4a"),
|
||||
("flac", "audio/x-flac"),
|
||||
("flac", "audio/flac"),
|
||||
("aif", "audio/aiff"),
|
||||
|
@ -113,15 +114,10 @@ def get_actor_from_request(request):
|
|||
return actor
|
||||
|
||||
|
||||
def transcode_file(input, output, input_format=None, output_format="mp3", **kwargs):
|
||||
with input.open("rb"):
|
||||
audio = pydub.AudioSegment.from_file(input, format=input_format)
|
||||
return transcode_audio(audio, output, output_format, **kwargs)
|
||||
|
||||
|
||||
def transcode_audio(audio, output, output_format, **kwargs):
|
||||
with output.open("wb"):
|
||||
return audio.export(output, format=output_format, **kwargs)
|
||||
def transcode_audio(audio_file_path, output_path, output_format="mp3", **kwargs):
|
||||
FFmpeg().input(audio_file_path).output(
|
||||
output_path, format=output_format, **kwargs
|
||||
).option("y").execute()
|
||||
|
||||
|
||||
def increment_downloads_count(upload, user, wsgi_request):
|
||||
|
|
|
@ -8,7 +8,7 @@ import requests.exceptions
|
|||
from django.conf import settings
|
||||
from django.core.cache import cache
|
||||
from django.db import transaction
|
||||
from django.db.models import Count, F, Prefetch, Q, Sum
|
||||
from django.db.models import BooleanField, Case, Count, F, Prefetch, Q, Sum, Value, When
|
||||
from django.db.models.functions import Collate
|
||||
from django.utils import timezone
|
||||
from drf_spectacular.utils import OpenApiParameter, extend_schema, extend_schema_view
|
||||
|
@ -16,7 +16,6 @@ from rest_framework import mixins, renderers
|
|||
from rest_framework import settings as rest_settings
|
||||
from rest_framework import views, viewsets
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.parsers import FormParser, MultiPartParser
|
||||
from rest_framework.response import Response
|
||||
|
||||
from funkwhale_api.common import decorators as common_decorators
|
||||
|
@ -666,7 +665,15 @@ def handle_stream(track, request, download, explicit_file, format, max_bitrate):
|
|||
if explicit_file:
|
||||
queryset = queryset.filter(uuid=explicit_file)
|
||||
queryset = queryset.playable_by(actor)
|
||||
queryset = queryset.order_by(F("audio_file").desc(nulls_last=True))
|
||||
# third_party uploads are displayed before manual upload only if no audio file is found in manual upload
|
||||
queryset = queryset.order_by(
|
||||
Case(
|
||||
When(third_party_provider__isnull=False, then=Value(1)),
|
||||
default=Value(0),
|
||||
output_field=BooleanField(),
|
||||
),
|
||||
F("audio_file").desc(nulls_last=True),
|
||||
)
|
||||
upload = queryset.first()
|
||||
if not upload:
|
||||
return Response(status=404)
|
||||
|
@ -791,43 +798,6 @@ class UploadViewSet(
|
|||
cover_data["content"] = base64.b64encode(cover_data["content"])
|
||||
return Response(payload, status=200)
|
||||
|
||||
@action(methods=["post"], detail=False)
|
||||
def perform_upload_action(self, request, *args, **kwargs):
|
||||
queryset = self.get_queryset()
|
||||
serializer = serializers.UploadActionSerializer(request.data, queryset=queryset)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
result = serializer.save()
|
||||
return Response(result, status=200)
|
||||
|
||||
def get_serializer_context(self):
|
||||
context = super().get_serializer_context()
|
||||
context["user"] = self.request.user
|
||||
return context
|
||||
|
||||
def perform_create(self, serializer):
|
||||
group_name = serializer.validated_data.get("import_reference") or str(
|
||||
datetime.datetime.date(datetime.datetime.now())
|
||||
)
|
||||
upload_group, _ = models.UploadGroup.objects.get_or_create(
|
||||
name=group_name, owner=self.request.user.actor
|
||||
)
|
||||
upload = serializer.save(upload_group=upload_group)
|
||||
if upload.import_status == "pending":
|
||||
common_utils.on_commit(tasks.process_upload.delay, upload_id=upload.pk)
|
||||
|
||||
def perform_update(self, serializer):
|
||||
upload = serializer.save()
|
||||
if upload.import_status == "pending":
|
||||
common_utils.on_commit(tasks.process_upload.delay, upload_id=upload.pk)
|
||||
|
||||
@transaction.atomic
|
||||
def perform_destroy(self, instance):
|
||||
routes.outbox.dispatch(
|
||||
{"type": "Delete", "object": {"type": "Audio"}},
|
||||
context={"uploads": [instance]},
|
||||
)
|
||||
instance.delete()
|
||||
|
||||
@action(detail=False, methods=["patch"])
|
||||
def bulk_update(self, request, *args, **kwargs):
|
||||
"""
|
||||
|
@ -845,6 +815,37 @@ class UploadViewSet(
|
|||
status=200,
|
||||
)
|
||||
|
||||
@action(methods=["post"], detail=False)
|
||||
def action(self, request, *args, **kwargs):
|
||||
queryset = self.get_queryset()
|
||||
serializer = serializers.UploadActionSerializer(request.data, queryset=queryset)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
result = serializer.save()
|
||||
return Response(result, status=200)
|
||||
|
||||
def get_serializer_context(self):
|
||||
context = super().get_serializer_context()
|
||||
context["user"] = self.request.user
|
||||
return context
|
||||
|
||||
def perform_create(self, serializer):
|
||||
upload = serializer.save()
|
||||
if upload.import_status == "pending":
|
||||
common_utils.on_commit(tasks.process_upload.delay, upload_id=upload.pk)
|
||||
|
||||
def perform_update(self, serializer):
|
||||
upload = serializer.save()
|
||||
if upload.import_status == "pending":
|
||||
common_utils.on_commit(tasks.process_upload.delay, upload_id=upload.pk)
|
||||
|
||||
@transaction.atomic
|
||||
def perform_destroy(self, instance):
|
||||
routes.outbox.dispatch(
|
||||
{"type": "Delete", "object": {"type": "Audio"}},
|
||||
context={"uploads": [instance]},
|
||||
)
|
||||
instance.delete()
|
||||
|
||||
|
||||
class Search(views.APIView):
|
||||
max_results = 3
|
||||
|
@ -976,38 +977,3 @@ class OembedView(views.APIView):
|
|||
serializer.is_valid(raise_exception=True)
|
||||
embed_data = serializer.save()
|
||||
return Response(embed_data)
|
||||
|
||||
|
||||
class UploadGroupViewSet(viewsets.ModelViewSet):
|
||||
permission_classes = [oauth_permissions.ScopePermission]
|
||||
required_scope = "libraries"
|
||||
serializer_class = serializers.UploadGroupSerializer
|
||||
|
||||
def get_queryset(self):
|
||||
return models.UploadGroup.objects.filter(owner__user__id=self.request.user.id)
|
||||
|
||||
def perform_create(self, serializer):
|
||||
serializer.save(owner=self.request.user.actor)
|
||||
|
||||
@action(
|
||||
detail=True,
|
||||
methods=["post"],
|
||||
parser_classes=(MultiPartParser, FormParser),
|
||||
serializer_class=serializers.UploadGroupUploadSerializer,
|
||||
)
|
||||
def uploads(self, request, pk=None):
|
||||
print(request.data)
|
||||
serializer = self.get_serializer(data=request.data)
|
||||
if serializer.is_valid():
|
||||
upload_group = models.UploadGroup.objects.get(guid=pk)
|
||||
if upload_group.owner == request.user.actor:
|
||||
upload = serializer.save(upload_group=upload_group)
|
||||
common_utils.on_commit(tasks.process_upload.delay, upload_id=upload.pk)
|
||||
response = serializers.BaseUploadSerializer(upload).data
|
||||
return Response(response, status=200)
|
||||
else:
|
||||
return Response("You don't own this Upload Group", status=403)
|
||||
else:
|
||||
print(serializer.errors)
|
||||
|
||||
return Response("Fehler", status=202)
|
||||
|
|
|
@ -244,7 +244,7 @@ class PlaylistViewSet(
|
|||
serializer = music_serializers.AlbumSerializer(releases, many=True)
|
||||
return Response(serializer.data, status=200)
|
||||
|
||||
@extend_schema(operation_id="get_playlist_artists")
|
||||
@extend_schema(operation_id="get_playlist_artits")
|
||||
@action(methods=["get"], detail=True)
|
||||
@transaction.atomic
|
||||
def artists(self, request, *args, **kwargs):
|
||||
|
|
|
@ -419,12 +419,12 @@ class SubsonicViewSet(viewsets.GenericViewSet):
|
|||
queryset = (
|
||||
queryset.playable_by(actor)
|
||||
.filter(
|
||||
Q(tagged_items__tag__name=genre)
|
||||
| Q(artist_credit__artist__tagged_items__tag__name=genre)
|
||||
Q(tagged_items__tag__name__iexact=genre)
|
||||
| Q(artist_credit__artist__tagged_items__tag__name__iexact=genre)
|
||||
| Q(
|
||||
artist_credit__albums__artist_credit__artist__tagged_items__tag__name=genre
|
||||
artist_credit__albums__artist_credit__artist__tagged_items__tag__name__iexact=genre
|
||||
)
|
||||
| Q(artist_credit__albums__tagged_items__tag__name=genre)
|
||||
| Q(artist_credit__albums__tagged_items__tag__name__iexact=genre)
|
||||
)
|
||||
.prefetch_related("uploads")
|
||||
.distinct()
|
||||
|
@ -485,8 +485,8 @@ class SubsonicViewSet(viewsets.GenericViewSet):
|
|||
elif type == "byGenre" and data.get("genre"):
|
||||
genre = data.get("genre")
|
||||
queryset = queryset.filter(
|
||||
Q(tagged_items__tag__name=genre)
|
||||
| Q(artist_credit__artist__tagged_items__tag__name=genre)
|
||||
Q(tagged_items__tag__name__iexact=genre)
|
||||
| Q(artist_credit__artist__tagged_items__tag__name__iexact=genre)
|
||||
)
|
||||
elif type == "byYear":
|
||||
try:
|
||||
|
|
|
@ -11,12 +11,6 @@ class Migration(migrations.Migration):
|
|||
]
|
||||
|
||||
operations = [
|
||||
CreateCollation(
|
||||
"case_insensitive",
|
||||
provider="icu",
|
||||
locale="und-u-ks-level2",
|
||||
deterministic=False,
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="tag",
|
||||
name="mbid",
|
||||
|
@ -25,8 +19,6 @@ class Migration(migrations.Migration):
|
|||
migrations.AlterField(
|
||||
model_name="tag",
|
||||
name="name",
|
||||
field=models.CharField(
|
||||
db_collation="case_insensitive", max_length=100, unique=True
|
||||
),
|
||||
field=models.CharField(max_length=100, unique=True),
|
||||
),
|
||||
]
|
||||
|
|
|
@ -12,7 +12,8 @@ TAG_REGEX = re.compile(r"^((\w+)([\d_]*))$")
|
|||
|
||||
class Tag(models.Model):
|
||||
name = models.CharField(
|
||||
max_length=100, unique=True, db_collation="case_insensitive"
|
||||
max_length=100,
|
||||
unique=True,
|
||||
)
|
||||
mbid = models.UUIDField(null=True, db_index=True, blank=True, unique=True)
|
||||
creation_date = models.DateTimeField(default=timezone.now)
|
||||
|
|
|
@ -24,10 +24,10 @@ def get_tags_from_foreign_key(
|
|||
objs = foreign_key_model.objects.filter(
|
||||
**{f"artist_credit__{foreign_key_attr}__pk__in": ids}
|
||||
).order_by("-id")
|
||||
objs = objs.only("id", f"artist_credit__{foreign_key_attr}_id").prefetch_related(
|
||||
objs = objs.only("id", f"artist_credit__{foreign_key_attr}__id").prefetch_related(
|
||||
tagged_items_attr
|
||||
)
|
||||
for obj in objs.iterator():
|
||||
for obj in objs.iterator(chunk_size=1000):
|
||||
for ac in obj.artist_credit.all():
|
||||
# loop on all objects, store the objs tags + counter on the corresponding foreign key
|
||||
row_data = data.setdefault(
|
||||
|
|
|
@ -1,27 +1,30 @@
|
|||
from troi import Artist, Element, Playlist, Recording
|
||||
from troi import Artist, ArtistCredit, Element, Playlist, Recording
|
||||
from troi.patch import Patch
|
||||
|
||||
recording_list = [
|
||||
Recording(
|
||||
name="I Want It That Way",
|
||||
mbid="87dfa566-21c3-45ed-bc42-1d345b8563fa",
|
||||
artist=Artist(name="artist_name"),
|
||||
artist_credit=ArtistCredit(artists=[Artist(name="artist_name")]),
|
||||
),
|
||||
Recording(
|
||||
name="Untouchable",
|
||||
artist_credit=ArtistCredit(artists=[Artist(name="Another lol")]),
|
||||
),
|
||||
Recording(name="Untouchable", artist=Artist(name="Another lol")),
|
||||
Recording(
|
||||
name="The Perfect Kiss",
|
||||
mbid="ec0da94e-fbfe-4eb0-968e-024d4c32d1d0",
|
||||
artist=Artist(name="artist_name2"),
|
||||
artist_credit=ArtistCredit(artists=[Artist(name="artist_name2")]),
|
||||
),
|
||||
Recording(
|
||||
name="Love Your Voice",
|
||||
mbid="93726547-f8c0-4efd-8e16-d2dee76500f6",
|
||||
artist=Artist(name="artist_name"),
|
||||
artist_credit=ArtistCredit(artists=[Artist(name="artist_name")]),
|
||||
),
|
||||
Recording(
|
||||
name="Hall of Fame",
|
||||
mbid="395bd5a1-79cc-4e04-8869-ca9eabc78d09",
|
||||
artist=Artist(name="artist_name_3"),
|
||||
artist_credit=ArtistCredit(artists=[Artist(name="artist_name3")]),
|
||||
),
|
||||
]
|
||||
|
||||
|
@ -34,8 +37,19 @@ class DummyElement(Element):
|
|||
return [Playlist]
|
||||
|
||||
def read(self, sources):
|
||||
recordings = recording_list
|
||||
|
||||
recordings = [
|
||||
Recording(
|
||||
name="I Want It That Way", mbid="87dfa566-21c3-45ed-bc42-1d345b8563fa"
|
||||
),
|
||||
Recording(name="Untouchable"),
|
||||
Recording(
|
||||
name="The Perfect Kiss", mbid="ec0da94e-fbfe-4eb0-968e-024d4c32d1d0"
|
||||
),
|
||||
Recording(
|
||||
name="Love Your Voice", mbid="93726547-f8c0-4efd-8e16-d2dee76500f6"
|
||||
),
|
||||
Recording(name="Hall of Fame", mbid="395bd5a1-79cc-4e04-8869-ca9eabc78d09"),
|
||||
]
|
||||
return [
|
||||
Playlist(
|
||||
name="Test Export Playlist",
|
||||
|
|
|
@ -57,7 +57,7 @@ def resolve_recordings_to_fw_track(recordings):
|
|||
|
||||
for recording in recordings:
|
||||
rec = mc.clean_recording(recording.name)
|
||||
artist = mc.clean_artist(recording.artist.name)
|
||||
artist = mc.clean_artist(recording.artist_credit.artists[0].name)
|
||||
canonical_name_for_track = delete_non_alnum_characters(artist + rec)
|
||||
|
||||
logger.debug(f"Trying to resolve : {canonical_name_for_track}")
|
||||
|
|
|
@ -0,0 +1,40 @@
|
|||
# Generated by Django 4.2.18 on 2025-01-15 13:45
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("users", "0024_alter_accesstoken_user_and_more"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="application",
|
||||
name="allowed_origins",
|
||||
field=models.TextField(
|
||||
blank=True,
|
||||
default="",
|
||||
help_text="Allowed origins list to enable CORS, space separated",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="application",
|
||||
name="hash_client_secret",
|
||||
field=models.BooleanField(default=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="application",
|
||||
name="post_logout_redirect_uris",
|
||||
field=models.TextField(
|
||||
blank=True,
|
||||
default="",
|
||||
help_text="Allowed Post Logout URIs list, space separated",
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="accesstoken",
|
||||
name="token",
|
||||
field=models.CharField(db_index=True, max_length=255, unique=True),
|
||||
),
|
||||
]
|
|
@ -0,0 +1,55 @@
|
|||
# Generated by Django 5.1.5 on 2025-01-15 17:10
|
||||
|
||||
import oauth2_provider.models
|
||||
from django.db import migrations, models
|
||||
|
||||
import oauth2_provider.models
|
||||
from django.db import migrations, models
|
||||
from oauth2_provider.settings import oauth2_settings
|
||||
|
||||
# see https://github.com/jazzband/django-oauth-toolkit/blob/master/oauth2_provider/migrations/0012_add_token_checksum.py
|
||||
|
||||
|
||||
def forwards_func(apps, schema_editor):
|
||||
"""
|
||||
Forward migration touches every "old" accesstoken.token which will cause the checksum to be computed.
|
||||
"""
|
||||
AccessToken = apps.get_model(oauth2_settings.ACCESS_TOKEN_MODEL)
|
||||
accesstokens = AccessToken._default_manager.iterator()
|
||||
for accesstoken in accesstokens:
|
||||
accesstoken.save(update_fields=["token_checksum"])
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("users", "0025_application_allowed_origins_and_more"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="accesstoken",
|
||||
name="token_checksum",
|
||||
field=oauth2_provider.models.TokenChecksumField(
|
||||
blank=True, null=True, max_length=64
|
||||
),
|
||||
preserve_default=False,
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="refreshtoken",
|
||||
name="token_family",
|
||||
field=models.UUIDField(blank=True, editable=False, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name="accesstoken",
|
||||
name="token",
|
||||
field=models.TextField(),
|
||||
),
|
||||
migrations.RunPython(forwards_func, migrations.RunPython.noop),
|
||||
migrations.AlterField(
|
||||
model_name="accesstoken",
|
||||
name="token_checksum",
|
||||
field=oauth2_provider.models.TokenChecksumField(
|
||||
blank=False, max_length=64, db_index=True, unique=True
|
||||
),
|
||||
),
|
||||
]
|
|
@ -374,14 +374,14 @@ class Application(oauth2_models.AbstractApplication):
|
|||
OOB_SCHEMES = ["urn:ietf:wg:oauth:2.0:oob", "urn:ietf:wg:oauth:2.0:oob:auto"]
|
||||
|
||||
|
||||
class CustomRedirectURIValidator(oauth2_validators.RedirectURIValidator):
|
||||
class CustomRedirectURIValidator(oauth2_validators.AllowedURIValidator):
|
||||
def __call__(self, value):
|
||||
if value in OOB_SCHEMES:
|
||||
return value
|
||||
return super().__call__(value)
|
||||
|
||||
|
||||
oauth2_models.RedirectURIValidator = CustomRedirectURIValidator
|
||||
oauth2_models.AllowedURIValidator = CustomRedirectURIValidator
|
||||
|
||||
|
||||
class Grant(oauth2_models.AbstractGrant):
|
||||
|
@ -455,7 +455,11 @@ def create_actor(user, **kwargs):
|
|||
args["private_key"] = private.decode("utf-8")
|
||||
args["public_key"] = public.decode("utf-8")
|
||||
|
||||
return federation_models.Actor.objects.create(user=user, **args)
|
||||
actor = federation_models.Actor.objects.create(**args)
|
||||
user.actor = actor
|
||||
user.save()
|
||||
|
||||
return actor
|
||||
|
||||
|
||||
def create_user_libraries(user):
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -25,104 +25,105 @@ exclude = ["tests"]
|
|||
funkwhale-manage = 'funkwhale_api.main:main'
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = "^3.8,<3.13"
|
||||
python = "^3.10,<3.14"
|
||||
|
||||
# Django
|
||||
dj-rest-auth = "5.0.2"
|
||||
django = "4.2.9"
|
||||
django-allauth = "0.55.2"
|
||||
django-cache-memoize = "0.1.10"
|
||||
django-cacheops = "==7.0.2"
|
||||
django-cleanup = "==8.1.0"
|
||||
django-cors-headers = "==4.3.1"
|
||||
django-dynamic-preferences = "==1.14.0"
|
||||
django-environ = "==0.11.2"
|
||||
django-filter = "==23.5"
|
||||
django-oauth-toolkit = "2.2.0"
|
||||
django-redis = "==5.2.0"
|
||||
django-storages = "==1.13.2"
|
||||
dj-rest-auth = "7.0.1"
|
||||
django = "5.1.5"
|
||||
django-allauth = "65.3.1"
|
||||
django-cache-memoize = "0.2.1"
|
||||
django-cacheops = "==7.1"
|
||||
django-cleanup = "==9.0.0"
|
||||
django-cors-headers = "==4.6.0"
|
||||
django-dynamic-preferences = "==1.17.0"
|
||||
django-environ = "==0.12.0"
|
||||
django-filter = "==24.3"
|
||||
django-oauth-toolkit = "3.0.1"
|
||||
django-redis = "==5.4.0"
|
||||
django-storages = "==1.14.4"
|
||||
django-versatileimagefield = "==3.1"
|
||||
djangorestframework = "==3.14.0"
|
||||
drf-spectacular = "==0.26.5"
|
||||
markdown = "==3.4.4"
|
||||
djangorestframework = "==3.15.2"
|
||||
drf-spectacular = "==0.28.0"
|
||||
markdown = "==3.7"
|
||||
persisting-theory = "==1.0"
|
||||
psycopg2-binary = "==2.9.9"
|
||||
redis = "==5.0.1"
|
||||
psycopg2-binary = "==2.9.10"
|
||||
redis = "==5.2.1"
|
||||
|
||||
# Django LDAP
|
||||
django-auth-ldap = "==4.1.0"
|
||||
django-auth-ldap = "==5.1.0"
|
||||
python-ldap = "==3.4.4"
|
||||
|
||||
# Channels
|
||||
channels = { extras = ["daphne"], version = "==4.0.0" }
|
||||
channels-redis = "==4.1.0"
|
||||
channels = { extras = ["daphne"], version = "==4.2.0" }
|
||||
channels-redis = "==4.2.1"
|
||||
|
||||
# Celery
|
||||
kombu = "5.3.4"
|
||||
celery = "5.3.6"
|
||||
kombu = "5.4.2"
|
||||
celery = "5.4.0"
|
||||
|
||||
# Deployment
|
||||
gunicorn = "==21.2.0"
|
||||
uvicorn = { version = "==0.20.0", extras = ["standard"] }
|
||||
gunicorn = "==23.0.0"
|
||||
uvicorn = { version = "==0.34.0", extras = ["standard"] }
|
||||
|
||||
# Libs
|
||||
aiohttp = "3.9.1"
|
||||
arrow = "==1.2.3"
|
||||
aiohttp = "3.11.11"
|
||||
arrow = "==1.3.0"
|
||||
backports-zoneinfo = { version = "==0.2.1", python = "<3.9" }
|
||||
bleach = "==6.1.0"
|
||||
boto3 = "==1.26.161"
|
||||
click = "==8.1.7"
|
||||
cryptography = "==41.0.7"
|
||||
feedparser = "==6.0.10"
|
||||
bleach = "==6.2.0"
|
||||
boto3 = "==1.35.99"
|
||||
click = "==8.1.8"
|
||||
cryptography = "==44.0.0"
|
||||
defusedxml = "0.7.1"
|
||||
feedparser = "==6.0.11"
|
||||
python-ffmpeg = "==2.0.12"
|
||||
liblistenbrainz = "==0.5.5"
|
||||
musicbrainzngs = "==0.7.1"
|
||||
mutagen = "==1.46.0"
|
||||
pillow = "==10.2.0"
|
||||
pydub = "==0.25.1"
|
||||
pyld = "==2.0.3"
|
||||
pillow = "==11.1.0"
|
||||
pyld = "==2.0.4"
|
||||
python-magic = "==0.4.27"
|
||||
requests = "==2.31.0"
|
||||
requests = "==2.32.3"
|
||||
requests-http-message-signatures = "==0.3.1"
|
||||
sentry-sdk = "==1.19.1"
|
||||
watchdog = "==4.0.0"
|
||||
troi = "==2024.1.26.0"
|
||||
lb-matching-tools = "==2024.1.25.0rc1"
|
||||
unidecode = "==1.3.7"
|
||||
pycountry = "23.12.11"
|
||||
sentry-sdk = "==2.20.0"
|
||||
watchdog = "==6.0.0"
|
||||
troi = "==2025.1.10.0"
|
||||
lb-matching-tools = "==2024.1.30.1"
|
||||
unidecode = "==1.3.8"
|
||||
pycountry = "24.6.1"
|
||||
|
||||
# Typesense
|
||||
typesense = { version = "==0.15.1", optional = true }
|
||||
typesense = { version = "==0.21.0", optional = true }
|
||||
|
||||
# Dependencies pinning
|
||||
ipython = "==8.12.3"
|
||||
ipython = "==8.31.0"
|
||||
pluralizer = "==1.2.0"
|
||||
service-identity = "==24.1.0"
|
||||
service-identity = "==24.2.0"
|
||||
unicode-slugify = "==0.1.5"
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
aioresponses = "==0.7.6"
|
||||
aioresponses = "==0.7.7"
|
||||
asynctest = "==0.13.0"
|
||||
black = "==24.1.1"
|
||||
coverage = { version = "==7.4.1", extras = ["toml"] }
|
||||
debugpy = "==1.6.7.post1"
|
||||
django-coverage-plugin = "==3.0.0"
|
||||
django-debug-toolbar = "==4.2.0"
|
||||
factory-boy = "==3.2.1"
|
||||
faker = "==23.2.1"
|
||||
flake8 = "==3.9.2"
|
||||
black = "==24.10.0"
|
||||
coverage = { version = "==7.6.10", extras = ["toml"] }
|
||||
debugpy = "==1.8.11"
|
||||
django-coverage-plugin = "==3.1.0"
|
||||
django-debug-toolbar = "==5.0.1"
|
||||
factory-boy = "==3.3.1"
|
||||
faker = "==33.3.1"
|
||||
flake8 = "==7.1.1"
|
||||
ipdb = "==0.13.13"
|
||||
pytest = "==8.0.0"
|
||||
pytest-asyncio = "==0.21.0"
|
||||
prompt-toolkit = "==3.0.41"
|
||||
pytest-cov = "==4.0.0"
|
||||
pytest-django = "==4.5.2"
|
||||
pytest-env = "==1.1.3"
|
||||
pytest-mock = "==3.10.0"
|
||||
pytest-randomly = "==3.12.0"
|
||||
pytest = "==8.3.4"
|
||||
pytest-asyncio = "==0.25.2"
|
||||
prompt-toolkit = "==3.0.48"
|
||||
pytest-cov = "==6.0.0"
|
||||
pytest-django = "==4.9.0"
|
||||
pytest-env = "==1.1.5"
|
||||
pytest-mock = "==3.14.0"
|
||||
pytest-randomly = "==3.16.0"
|
||||
pytest-sugar = "==1.0.0"
|
||||
requests-mock = "==1.10.0"
|
||||
pylint = "==3.0.3"
|
||||
pylint-django = "==2.5.5"
|
||||
requests-mock = "==1.12.1"
|
||||
pylint = "==3.3.3"
|
||||
pylint-django = "==2.6.1"
|
||||
django-extensions = "==3.2.3"
|
||||
|
||||
[tool.poetry.extras]
|
||||
|
|
|
@ -1,115 +1,115 @@
|
|||
import pytest
|
||||
from click.testing import CliRunner
|
||||
|
||||
from funkwhale_api.cli import library, main
|
||||
from funkwhale_api.cli import library, main, users
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"cmd, args, handlers",
|
||||
[
|
||||
# (
|
||||
# ("users", "create"),
|
||||
# (
|
||||
# "--username",
|
||||
# "testuser",
|
||||
# "--password",
|
||||
# "testpassword",
|
||||
# "--email",
|
||||
# "test@hello.com",
|
||||
# "--upload-quota",
|
||||
# "35",
|
||||
# "--permission",
|
||||
# "library",
|
||||
# "--permission",
|
||||
# "moderation",
|
||||
# "--staff",
|
||||
# "--superuser",
|
||||
# ),
|
||||
# [
|
||||
# (
|
||||
# users,
|
||||
# "handler_create_user",
|
||||
# {
|
||||
# "username": "testuser",
|
||||
# "password": "testpassword",
|
||||
# "email": "test@hello.com",
|
||||
# "upload_quota": 35,
|
||||
# "permissions": ("library", "moderation"),
|
||||
# "is_staff": True,
|
||||
# "is_superuser": True,
|
||||
# },
|
||||
# )
|
||||
# ],
|
||||
# ),
|
||||
# (
|
||||
# ("users", "rm"),
|
||||
# ("testuser1", "testuser2", "--no-input"),
|
||||
# [
|
||||
# (
|
||||
# users,
|
||||
# "handler_delete_user",
|
||||
# {"usernames": ("testuser1", "testuser2"), "soft": True},
|
||||
# )
|
||||
# ],
|
||||
# ),
|
||||
# (
|
||||
# ("users", "rm"),
|
||||
# (
|
||||
# "testuser1",
|
||||
# "testuser2",
|
||||
# "--no-input",
|
||||
# "--hard",
|
||||
# ),
|
||||
# [
|
||||
# (
|
||||
# users,
|
||||
# "handler_delete_user",
|
||||
# {"usernames": ("testuser1", "testuser2"), "soft": False},
|
||||
# )
|
||||
# ],
|
||||
# ),
|
||||
# (
|
||||
# ("users", "set"),
|
||||
# (
|
||||
# "testuser1",
|
||||
# "testuser2",
|
||||
# "--no-input",
|
||||
# "--inactive",
|
||||
# "--upload-quota",
|
||||
# "35",
|
||||
# "--no-staff",
|
||||
# "--superuser",
|
||||
# "--permission-library",
|
||||
# "--no-permission-moderation",
|
||||
# "--no-permission-settings",
|
||||
# "--password",
|
||||
# "newpassword",
|
||||
# ),
|
||||
# [
|
||||
# (
|
||||
# users,
|
||||
# "handler_update_user",
|
||||
# {
|
||||
# "usernames": ("testuser1", "testuser2"),
|
||||
# "kwargs": {
|
||||
# "is_active": False,
|
||||
# "upload_quota": 35,
|
||||
# "is_staff": False,
|
||||
# "is_superuser": True,
|
||||
# "permission_library": True,
|
||||
# "permission_moderation": False,
|
||||
# "permission_settings": False,
|
||||
# "password": "newpassword",
|
||||
# },
|
||||
# },
|
||||
# )
|
||||
# ],
|
||||
# ),
|
||||
# (
|
||||
# ("albums", "add-tags-from-tracks"),
|
||||
# tuple(),
|
||||
# [(library, "handler_add_tags_from_tracks", {"albums": True})],
|
||||
# ),
|
||||
(
|
||||
("users", "create"),
|
||||
(
|
||||
"--username",
|
||||
"testuser",
|
||||
"--password",
|
||||
"testpassword",
|
||||
"--email",
|
||||
"test@hello.com",
|
||||
"--upload-quota",
|
||||
"35",
|
||||
"--permission",
|
||||
"library",
|
||||
"--permission",
|
||||
"moderation",
|
||||
"--staff",
|
||||
"--superuser",
|
||||
),
|
||||
[
|
||||
(
|
||||
users,
|
||||
"handler_create_user",
|
||||
{
|
||||
"username": "testuser",
|
||||
"password": "testpassword",
|
||||
"email": "test@hello.com",
|
||||
"upload_quota": 35,
|
||||
"permissions": ("library", "moderation"),
|
||||
"is_staff": True,
|
||||
"is_superuser": True,
|
||||
},
|
||||
)
|
||||
],
|
||||
),
|
||||
(
|
||||
("users", "rm"),
|
||||
("testuser1", "testuser2", "--no-input"),
|
||||
[
|
||||
(
|
||||
users,
|
||||
"handler_delete_user",
|
||||
{"usernames": ("testuser1", "testuser2"), "soft": True},
|
||||
)
|
||||
],
|
||||
),
|
||||
(
|
||||
("users", "rm"),
|
||||
(
|
||||
"testuser1",
|
||||
"testuser2",
|
||||
"--no-input",
|
||||
"--hard",
|
||||
),
|
||||
[
|
||||
(
|
||||
users,
|
||||
"handler_delete_user",
|
||||
{"usernames": ("testuser1", "testuser2"), "soft": False},
|
||||
)
|
||||
],
|
||||
),
|
||||
(
|
||||
("users", "set"),
|
||||
(
|
||||
"testuser1",
|
||||
"testuser2",
|
||||
"--no-input",
|
||||
"--inactive",
|
||||
"--upload-quota",
|
||||
"35",
|
||||
"--no-staff",
|
||||
"--superuser",
|
||||
"--permission-library",
|
||||
"--no-permission-moderation",
|
||||
"--no-permission-settings",
|
||||
"--password",
|
||||
"newpassword",
|
||||
),
|
||||
[
|
||||
(
|
||||
users,
|
||||
"handler_update_user",
|
||||
{
|
||||
"usernames": ("testuser1", "testuser2"),
|
||||
"kwargs": {
|
||||
"is_active": False,
|
||||
"upload_quota": 35,
|
||||
"is_staff": False,
|
||||
"is_superuser": True,
|
||||
"permission_library": True,
|
||||
"permission_moderation": False,
|
||||
"permission_settings": False,
|
||||
"password": "newpassword",
|
||||
},
|
||||
},
|
||||
)
|
||||
],
|
||||
),
|
||||
(
|
||||
("albums", "add-tags-from-tracks"),
|
||||
tuple(),
|
||||
[(library, "handler_add_tags_from_tracks", {"albums": True})],
|
||||
),
|
||||
(
|
||||
("artists", "add-tags-from-tracks"),
|
||||
tuple(),
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
import os
|
||||
|
||||
import pytest
|
||||
from django.conf import settings
|
||||
from django.core.management import call_command
|
||||
from django.core.management.base import CommandError
|
||||
|
||||
|
@ -117,15 +116,15 @@ commands = ["createsuperuser", "makemigrations"]
|
|||
|
||||
|
||||
@pytest.mark.parametrize("command", commands)
|
||||
def test_blocked_commands(command, mocker):
|
||||
mocker.patch.dict(os.environ, {"FORCE": "0"})
|
||||
def test_blocked_commands(command):
|
||||
with pytest.raises(CommandError):
|
||||
setattr(settings, "FORCE", 0)
|
||||
call_command(command)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("command", commands)
|
||||
def test_unblocked_commands(command, mocker):
|
||||
mocker.patch.dict(os.environ, {"FORCE": "1"})
|
||||
setattr(settings, "FORCE", 1)
|
||||
|
||||
call_command(command)
|
||||
|
||||
|
|
|
@ -391,7 +391,7 @@ def migrator(transactional_db):
|
|||
@pytest.fixture(autouse=True)
|
||||
def rsa_small_key(settings):
|
||||
# smaller size for faster generation, since it's CPU hungry
|
||||
settings.RSA_KEY_SIZE = 512
|
||||
settings.RSA_KEY_SIZE = 1024
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
|
|
|
@ -4,7 +4,6 @@ import logging
|
|||
import liblistenbrainz
|
||||
import pytest
|
||||
from django.urls import reverse
|
||||
from django.utils import timezone
|
||||
|
||||
from config import plugins
|
||||
from funkwhale_api.contrib.listenbrainz import funkwhale_ready
|
||||
|
@ -52,7 +51,8 @@ def test_sync_listenings_from_listenbrainz(factories, mocker, caplog):
|
|||
factories["music.Track"](mbid="f89db7f8-4a1f-4228-a0a1-e7ba028b7476")
|
||||
track = factories["music.Track"](mbid="54c60860-f43d-484e-b691-7ab7ec8de559")
|
||||
factories["history.Listening"](
|
||||
creation_date=datetime.datetime.fromtimestamp(1871, timezone.utc), track=track
|
||||
creation_date=datetime.datetime.fromtimestamp(1871, datetime.timezone.utc),
|
||||
track=track,
|
||||
)
|
||||
|
||||
conf = {
|
||||
|
|
|
@ -5,8 +5,10 @@ from funkwhale_api.federation import authentication, exceptions, jsonld, keys
|
|||
|
||||
def test_authenticate(factories, mocker, api_request):
|
||||
private, public = keys.get_key_pair()
|
||||
factories["federation.Domain"](name="test.federation", nodeinfo_fetch_date=None)
|
||||
actor_url = "https://test.federation/actor"
|
||||
domain = factories["federation.Domain"](
|
||||
name="test.federationnolocal", nodeinfo_fetch_date=None
|
||||
)
|
||||
actor_url = "https://test.federationnolocal/actor"
|
||||
mocker.patch(
|
||||
"funkwhale_api.federation.actors.get_actor_data",
|
||||
return_value={
|
||||
|
@ -42,11 +44,12 @@ def test_authenticate(factories, mocker, api_request):
|
|||
authenticator = authentication.SignatureAuthentication()
|
||||
user, _ = authenticator.authenticate(django_request)
|
||||
actor = django_request.actor
|
||||
|
||||
actor.domain = domain
|
||||
actor.save()
|
||||
assert user.is_anonymous is True
|
||||
assert actor.public_key == public.decode("utf-8")
|
||||
assert actor.fid == actor_url
|
||||
update_domain_nodeinfo.assert_called_once_with(domain_name="test.federation")
|
||||
update_domain_nodeinfo.assert_called_once_with(domain_name="test.federationnolocal")
|
||||
|
||||
|
||||
def test_authenticate_skips_blocked_domain(factories, api_request):
|
||||
|
|
|
@ -1268,7 +1268,7 @@ def test_activity_pub_upload_serializer_from_ap(factories, mocker, r_mock):
|
|||
"name": "Ignored",
|
||||
"published": published.isoformat(),
|
||||
"updated": updated.isoformat(),
|
||||
"duration": 43,
|
||||
"duration": "PT43S",
|
||||
"bitrate": 42,
|
||||
"size": 66,
|
||||
"url": {"href": "https://audio.file", "type": "Link", "mediaType": "audio/mp3"},
|
||||
|
@ -1337,7 +1337,7 @@ def test_activity_pub_upload_serializer_from_ap(factories, mocker, r_mock):
|
|||
assert track_create.call_count == 1
|
||||
assert upload.fid == data["id"]
|
||||
assert upload.track.fid == data["track"]["id"]
|
||||
assert upload.duration == data["duration"]
|
||||
assert upload.duration == 43
|
||||
assert upload.size == data["size"]
|
||||
assert upload.bitrate == data["bitrate"]
|
||||
assert upload.source == data["url"]["href"]
|
||||
|
@ -1357,7 +1357,7 @@ def test_activity_pub_upload_serializer_from_ap_update(factories, mocker, now, r
|
|||
"name": "Ignored",
|
||||
"published": now.isoformat(),
|
||||
"updated": now.isoformat(),
|
||||
"duration": 42,
|
||||
"duration": "PT42S",
|
||||
"bitrate": 42,
|
||||
"size": 66,
|
||||
"url": {
|
||||
|
@ -1376,7 +1376,7 @@ def test_activity_pub_upload_serializer_from_ap_update(factories, mocker, now, r
|
|||
upload.refresh_from_db()
|
||||
|
||||
assert upload.fid == data["id"]
|
||||
assert upload.duration == data["duration"]
|
||||
assert upload.duration == 42
|
||||
assert upload.size == data["size"]
|
||||
assert upload.bitrate == data["bitrate"]
|
||||
assert upload.source == data["url"]["href"]
|
||||
|
@ -1408,7 +1408,7 @@ def test_activity_pub_audio_serializer_to_ap(factories):
|
|||
"name": upload.track.full_name,
|
||||
"published": upload.creation_date.isoformat(),
|
||||
"updated": upload.modification_date.isoformat(),
|
||||
"duration": upload.duration,
|
||||
"duration": "PT43S",
|
||||
"bitrate": upload.bitrate,
|
||||
"size": upload.size,
|
||||
"to": contexts.AS.Public,
|
||||
|
@ -1777,7 +1777,7 @@ def test_channel_upload_serializer(factories):
|
|||
"content": common_utils.render_html(content.text, content.content_type),
|
||||
"to": "https://www.w3.org/ns/activitystreams#Public",
|
||||
"position": upload.track.position,
|
||||
"duration": upload.duration,
|
||||
"duration": "PT54S",
|
||||
"album": upload.track.album.fid,
|
||||
"disc": upload.track.disc_number,
|
||||
"copyright": upload.track.copyright,
|
||||
|
@ -1826,7 +1826,7 @@ def test_channel_upload_serializer_from_ap_create(factories, now, mocker):
|
|||
"published": now.isoformat(),
|
||||
"mediaType": "text/html",
|
||||
"content": "<p>Hello</p>",
|
||||
"duration": 543,
|
||||
"duration": "PT543S",
|
||||
"position": 4,
|
||||
"disc": 2,
|
||||
"album": album.fid,
|
||||
|
@ -1875,7 +1875,7 @@ def test_channel_upload_serializer_from_ap_create(factories, now, mocker):
|
|||
assert upload.mimetype == payload["url"][1]["mediaType"]
|
||||
assert upload.size == payload["url"][1]["size"]
|
||||
assert upload.bitrate == payload["url"][1]["bitrate"]
|
||||
assert upload.duration == payload["duration"]
|
||||
assert upload.duration == 543
|
||||
assert upload.track.artist_credit.all()[0].artist == channel.artist
|
||||
assert upload.track.position == payload["position"]
|
||||
assert upload.track.disc_number == payload["disc"]
|
||||
|
@ -1909,7 +1909,7 @@ def test_channel_upload_serializer_from_ap_update(factories, now, mocker):
|
|||
"published": now.isoformat(),
|
||||
"mediaType": "text/html",
|
||||
"content": "<p>Hello</p>",
|
||||
"duration": 543,
|
||||
"duration": "PT543S",
|
||||
"position": 4,
|
||||
"disc": 2,
|
||||
"album": album.fid,
|
||||
|
@ -1959,7 +1959,7 @@ def test_channel_upload_serializer_from_ap_update(factories, now, mocker):
|
|||
assert upload.mimetype == payload["url"][1]["mediaType"]
|
||||
assert upload.size == payload["url"][1]["size"]
|
||||
assert upload.bitrate == payload["url"][1]["bitrate"]
|
||||
assert upload.duration == payload["duration"]
|
||||
assert upload.duration == 543
|
||||
assert upload.track.artist_credit.all()[0].artist == channel.artist
|
||||
assert upload.track.position == payload["position"]
|
||||
assert upload.track.disc_number == payload["disc"]
|
||||
|
|
|
@ -19,7 +19,7 @@ def test_upload_import_status_updated_broadcast(factories, mocker):
|
|||
upload = factories["music.Upload"](
|
||||
import_status="finished", library__actor__user=user
|
||||
)
|
||||
signals.upload_import_status_updated.send(
|
||||
signals.upload_import_status_updated.send_robust(
|
||||
sender=None, upload=upload, old_status="pending", new_status="finished"
|
||||
)
|
||||
group_send.assert_called_once_with(
|
||||
|
|
Binary file not shown.
|
@ -187,6 +187,7 @@ def test_can_get_metadata_from_id3_aiff_file(field, value):
|
|||
"with_cover.ogg",
|
||||
"with_cover.opus",
|
||||
"test.m4a",
|
||||
"test_coverart.ogg",
|
||||
],
|
||||
)
|
||||
def test_can_get_pictures(name):
|
||||
|
|
|
@ -1460,7 +1460,7 @@ def test_tag_albums_from_tracks(queryset_equal_queries, factories, mocker):
|
|||
get_tags_from_foreign_key.assert_called_once_with(
|
||||
ids=expected_queryset.filter(pk__in=[1, 2]),
|
||||
foreign_key_model=models.Track,
|
||||
foreign_key_attr="album",
|
||||
foreign_key_attr="albums",
|
||||
)
|
||||
|
||||
add_tags_batch.assert_called_once_with(
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
import os
|
||||
import pathlib
|
||||
import tempfile
|
||||
|
||||
import pytest
|
||||
|
||||
|
@ -114,25 +113,6 @@ def test_get_dirs_and_files(path, expected, tmpdir):
|
|||
assert utils.browse_dir(root_path, path) == expected
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"name, expected",
|
||||
[
|
||||
("sample.flac", {"bitrate": 128000, "length": 0}),
|
||||
("test.mp3", {"bitrate": 16000, "length": 268}),
|
||||
("test.ogg", {"bitrate": 128000, "length": 1}),
|
||||
("test.opus", {"bitrate": 128000, "length": 1}),
|
||||
],
|
||||
)
|
||||
def test_transcode_file(name, expected):
|
||||
path = pathlib.Path(os.path.join(DATA_DIR, name))
|
||||
with tempfile.NamedTemporaryFile() as dest:
|
||||
utils.transcode_file(path, pathlib.Path(dest.name))
|
||||
with open(dest.name, "rb") as f:
|
||||
result = {k: round(v) for k, v in utils.get_audio_file_data(f).items()}
|
||||
|
||||
assert result == expected
|
||||
|
||||
|
||||
def test_custom_s3_domain(factories, settings):
|
||||
"""See #2220"""
|
||||
settings.AWS_S3_CUSTOM_DOMAIN = "my.custom.domain.tld"
|
||||
|
|
|
@ -621,8 +621,6 @@ def test_listen_transcode_in_place(
|
|||
source="file://" + os.path.join(DATA_DIR, "test.ogg"),
|
||||
)
|
||||
|
||||
assert upload.get_audio_segment()
|
||||
|
||||
url = reverse("api:v1:listen-detail", kwargs={"uuid": upload.track.uuid})
|
||||
handle_serve = mocker.spy(views, "handle_serve")
|
||||
response = logged_in_api_client.get(url, {"to": "mp3"})
|
||||
|
@ -782,56 +780,6 @@ def test_user_can_create_upload(logged_in_api_client, factories, mocker, audio_f
|
|||
m.assert_called_once_with(tasks.process_upload.delay, upload_id=upload.pk)
|
||||
|
||||
|
||||
def test_upload_creates_implicit_upload_group(
|
||||
logged_in_api_client, factories, mocker, audio_file
|
||||
):
|
||||
library = factories["music.Library"](actor__user=logged_in_api_client.user)
|
||||
url = reverse("api:v1:uploads-list")
|
||||
upload_group_count = models.UploadGroup.objects.count()
|
||||
|
||||
response = logged_in_api_client.post(
|
||||
url,
|
||||
{
|
||||
"audio_file": audio_file,
|
||||
"source": "upload://test",
|
||||
"library": library.uuid,
|
||||
"import_metadata": '{"title": "foo"}',
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == 201
|
||||
assert upload_group_count + 1 == models.UploadGroup.objects.count()
|
||||
assert (
|
||||
models.UploadGroup.objects.filter(
|
||||
name=str(datetime.datetime.date(datetime.datetime.now()))
|
||||
).count()
|
||||
== 1
|
||||
)
|
||||
|
||||
|
||||
def test_upload_creates_named_upload_group(
|
||||
logged_in_api_client, factories, mocker, audio_file
|
||||
):
|
||||
library = factories["music.Library"](actor__user=logged_in_api_client.user)
|
||||
url = reverse("api:v1:uploads-list")
|
||||
upload_group_count = models.UploadGroup.objects.count()
|
||||
|
||||
response = logged_in_api_client.post(
|
||||
url,
|
||||
{
|
||||
"audio_file": audio_file,
|
||||
"source": "upload://test",
|
||||
"import_reference": "test",
|
||||
"library": library.uuid,
|
||||
"import_metadata": '{"title": "foo"}',
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == 201
|
||||
assert upload_group_count + 1 == models.UploadGroup.objects.count()
|
||||
assert models.UploadGroup.objects.filter(name="test").count() == 1
|
||||
|
||||
|
||||
def test_user_can_create_upload_in_channel(
|
||||
logged_in_api_client, factories, mocker, audio_file
|
||||
):
|
||||
|
@ -1639,90 +1587,3 @@ def test_can_patch_upload_list(factories, logged_in_api_client):
|
|||
|
||||
assert response.status_code == 200
|
||||
assert upload.library.privacy_level == "everyone"
|
||||
|
||||
|
||||
def test_can_create_upload_group_without_name(logged_in_api_client):
|
||||
logged_in_api_client.user.create_actor()
|
||||
count = models.UploadGroup.objects.count()
|
||||
|
||||
url = reverse("api:v2:upload-groups-list")
|
||||
response = logged_in_api_client.post(url)
|
||||
|
||||
assert response.status_code == 201
|
||||
assert count + 1 == models.UploadGroup.objects.count()
|
||||
assert response.data.get("guid") != ""
|
||||
assert response.data.get("name") != ""
|
||||
assert "https://test.federation/api/v2/upload-groups/" in response.data.get(
|
||||
"uploadUrl"
|
||||
)
|
||||
|
||||
|
||||
def test_can_create_upload_group_with_name(logged_in_api_client):
|
||||
logged_in_api_client.user.create_actor()
|
||||
count = models.UploadGroup.objects.count()
|
||||
|
||||
url = reverse("api:v2:upload-groups-list")
|
||||
response = logged_in_api_client.post(url, {"name": "Test Name"})
|
||||
|
||||
assert response.status_code == 201
|
||||
assert count + 1 == models.UploadGroup.objects.count()
|
||||
assert response.data.get("guid") != ""
|
||||
assert response.data.get("name") == "Test Name"
|
||||
assert "https://test.federation/api/v2/upload-groups/" in response.data.get(
|
||||
"uploadUrl"
|
||||
)
|
||||
|
||||
|
||||
def test_user_can_create_upload_v2(logged_in_api_client, factories, mocker, audio_file):
|
||||
library = factories["music.Library"](actor__user=logged_in_api_client.user)
|
||||
logged_in_api_client.user.create_actor()
|
||||
|
||||
upload_group = factories["music.UploadGroup"](owner=logged_in_api_client.user.actor)
|
||||
upload_url = upload_group.upload_url
|
||||
|
||||
m = mocker.patch("funkwhale_api.common.utils.on_commit")
|
||||
|
||||
response = logged_in_api_client.post(
|
||||
upload_url,
|
||||
{
|
||||
"audioFile": audio_file,
|
||||
"metadata": '{"title": "foo"}',
|
||||
"target": f'{{"library": "{ library.uuid }"}}',
|
||||
},
|
||||
)
|
||||
|
||||
print(response.data)
|
||||
|
||||
assert response.status_code == 200
|
||||
|
||||
upload = library.uploads.latest("id")
|
||||
|
||||
audio_file.seek(0)
|
||||
assert upload.audio_file.read() == audio_file.read()
|
||||
assert upload.source == "upload://test"
|
||||
assert upload.import_status == "pending"
|
||||
assert upload.import_metadata == {"title": "foo"}
|
||||
assert upload.track is None
|
||||
assert upload.upload_group == upload_group
|
||||
m.assert_called_once_with(tasks.process_upload.delay, upload_id=upload.pk)
|
||||
|
||||
|
||||
def test_user_cannot_create_upload_for_foreign_group(
|
||||
logged_in_api_client, factories, mocker, audio_file
|
||||
):
|
||||
library = factories["music.Library"](actor__user=logged_in_api_client.user)
|
||||
logged_in_api_client.user.create_actor()
|
||||
|
||||
upload_group = factories["music.UploadGroup"]()
|
||||
upload_url = upload_group.upload_url
|
||||
|
||||
response = logged_in_api_client.post(
|
||||
upload_url,
|
||||
{
|
||||
"audioFile": audio_file,
|
||||
"metadata": '{"title": "foo"}',
|
||||
"target": f'{{"library": "{ library.uuid }"}}',
|
||||
},
|
||||
)
|
||||
|
||||
assert response.status_code == 403
|
||||
|
|
|
@ -91,7 +91,7 @@ def test_build_radio_queryset_with_redis_and_without_fw_db(factories, mocker):
|
|||
|
||||
def test_build_radio_queryset_catch_troi_ConnectTimeout(mocker):
|
||||
mocker.patch.object(
|
||||
troi.core.Patch,
|
||||
troi.patch.Patch,
|
||||
"generate_playlist",
|
||||
side_effect=ConnectTimeout,
|
||||
)
|
||||
|
@ -105,7 +105,7 @@ def test_build_radio_queryset_catch_troi_ConnectTimeout(mocker):
|
|||
|
||||
def test_build_radio_queryset_catch_troi_no_candidates(mocker):
|
||||
mocker.patch.object(
|
||||
troi.core.Patch,
|
||||
troi.patch.Patch,
|
||||
"generate_playlist",
|
||||
)
|
||||
qs = Track.objects.all()
|
||||
|
|
|
@ -646,12 +646,11 @@ def test_search3(f, db, logged_in_api_client, factories):
|
|||
|
||||
@pytest.mark.parametrize("f", ["json"])
|
||||
def test_get_playlists(f, db, logged_in_api_client, factories):
|
||||
logged_in_api_client.user.create_actor()
|
||||
url = reverse("api:subsonic:subsonic-get_playlists")
|
||||
assert url.endswith("getPlaylists") is True
|
||||
|
||||
playlist1 = factories["playlists.PlaylistTrack"](
|
||||
playlist__actor__user=logged_in_api_client.user
|
||||
playlist__actor=logged_in_api_client.user.create_actor()
|
||||
).playlist
|
||||
playlist2 = factories["playlists.PlaylistTrack"](
|
||||
playlist__privacy_level="everyone"
|
||||
|
@ -664,7 +663,6 @@ def test_get_playlists(f, db, logged_in_api_client, factories):
|
|||
# no track
|
||||
playlist4 = factories["playlists.Playlist"](privacy_level="everyone")
|
||||
|
||||
factories["users.User"](actor=playlist1.actor)
|
||||
factories["users.User"](actor=playlist2.actor)
|
||||
factories["users.User"](actor=playlist3.actor)
|
||||
factories["users.User"](actor=playlist4.actor)
|
||||
|
@ -692,7 +690,6 @@ def test_get_playlist(f, db, logged_in_api_client, factories):
|
|||
playlist = factories["playlists.PlaylistTrack"](
|
||||
playlist__actor__user=logged_in_api_client.user
|
||||
).playlist
|
||||
factories["users.User"](actor=playlist.actor)
|
||||
|
||||
response = logged_in_api_client.get(url, {"f": f, "id": playlist.pk})
|
||||
|
||||
|
|
|
@ -3,8 +3,8 @@ from funkwhale_api.tags import models, tasks
|
|||
|
||||
|
||||
def test_get_tags_from_foreign_key(factories):
|
||||
rock_tag = factories["tags.Tag"](name="Rock")
|
||||
rap_tag = factories["tags.Tag"](name="Rap")
|
||||
rock_tag = factories["tags.Tag"](name="rock")
|
||||
rap_tag = factories["tags.Tag"](name="rap")
|
||||
artist = factories["music.Artist"]()
|
||||
factories["music.Track"].create_batch(
|
||||
3, artist_credit__artist=artist, set_tags=["rock", "rap"]
|
||||
|
|
|
@ -38,7 +38,7 @@ def test_can_create_user_via_api_mail_verification_mandatory(
|
|||
}
|
||||
preferences["users__registration_enabled"] = True
|
||||
response = api_client.post(url, data)
|
||||
assert response.status_code == 204
|
||||
assert response.status_code == 201
|
||||
|
||||
u = User.objects.get(email="test1@test.com")
|
||||
assert u.username == "test1"
|
||||
|
@ -102,7 +102,7 @@ def test_can_signup_with_invitation(preferences, factories, api_client):
|
|||
}
|
||||
preferences["users__registration_enabled"] = False
|
||||
response = api_client.post(url, data)
|
||||
assert response.status_code == 204
|
||||
assert response.status_code == 201
|
||||
u = User.objects.get(email="test1@test.com")
|
||||
assert u.username == "test1"
|
||||
assert u.invitation == invitation
|
||||
|
@ -322,7 +322,7 @@ def test_creating_user_creates_actor_as_well(
|
|||
mocker.patch("funkwhale_api.users.models.create_actor", return_value=actor)
|
||||
response = api_client.post(url, data)
|
||||
|
||||
assert response.status_code == 204
|
||||
assert response.status_code == 201
|
||||
|
||||
user = User.objects.get(username="test1")
|
||||
|
||||
|
@ -343,7 +343,7 @@ def test_creating_user_sends_confirmation_email(
|
|||
preferences["instance__name"] = "Hello world"
|
||||
response = api_client.post(url, data)
|
||||
|
||||
assert response.status_code == 204
|
||||
assert response.status_code == 201
|
||||
|
||||
confirmation_message = mailoutbox[-1]
|
||||
assert "Hello world" in confirmation_message.body
|
||||
|
@ -425,7 +425,7 @@ def test_signup_with_approval_enabled(
|
|||
}
|
||||
on_commit = mocker.patch("funkwhale_api.common.utils.on_commit")
|
||||
response = api_client.post(url, data, format="json")
|
||||
assert response.status_code == 204
|
||||
assert response.status_code == 201
|
||||
u = User.objects.get(email="test1@test.com")
|
||||
assert u.username == "test1"
|
||||
assert u.is_active is False
|
||||
|
|
|
@ -18,7 +18,8 @@
|
|||
"test:generate-mock-server": "msw-auto-mock ../docs/schema.yml -o test/msw-server.ts --node",
|
||||
"lint": "eslint --cache --cache-strategy content --ext .ts,.js,.vue,.json,.html src test cypress public/embed.html",
|
||||
"lint:tsc": "vue-tsc --noEmit --incremental && tsc --noEmit --incremental -p cypress",
|
||||
"generate-local-schema": "yarn run openapi-typescript ../api/schema.yml -o src/generated/types.ts"
|
||||
"generate-local-schema": "yarn run openapi-typescript ../api/schema.yml -o src/generated/types.ts",
|
||||
"generate-remote-schema": "yarn run openapi-typescript https://docs.funkwhale.audio/develop/swagger/schema.yml -o src/generated/types.ts"
|
||||
},
|
||||
"dependencies": {
|
||||
"@sentry/tracing": "7.47.0",
|
||||
|
|
Loading…
Reference in New Issue